├── scripts ├── run_tests.sh ├── build_dev.sh ├── start_dev.sh ├── db │ └── create_database.sql └── export.js ├── res ├── pizza-1.png ├── pizza-2.png ├── pizza-3.png ├── docs │ └── FlowBuild-Modelagem-Blueprints.pdf ├── examples │ ├── nodes │ │ ├── finishNode.json │ │ ├── startNode.json │ │ ├── setToBagNode.json │ │ ├── timerNode.json │ │ ├── flowNode.json │ │ ├── userTaskNode.json │ │ └── httpNode.json │ └── blueprints │ │ └── pizza1_blueprint.json ├── flowbuild-workflow.postman_environment.json └── pizza.md ├── .github ├── label-settings │ ├── name-pattern-labeler.yml │ └── labeler.yml ├── ISSUE_TEMPLATE │ ├── config.yaml │ ├── 3-failing_test.md │ ├── 4-documentation.md │ ├── 2-feature_request.md │ └── 1-bug_report.md ├── workflows │ ├── auto-labeler-pr.yml │ ├── postman-cli-tests.yml │ ├── first-time-contrib.yml │ ├── auto-merge-release-bump.yml │ ├── stale-pr-issue.yml │ └── pr-validation.yml ├── SECURITY.md └── pull_request_template.md ├── src ├── middlewares │ ├── persist.js │ ├── userAgent.js │ ├── trace.js │ └── actordata.js ├── server.js ├── utils │ ├── db.js │ ├── identifyTarget.js │ ├── jwtSecret.js │ ├── logger.js │ ├── publishWorkflow.js │ ├── engineLogger.js │ └── tracing.js ├── tests │ ├── utils │ │ ├── db.js │ │ ├── samples.js │ │ ├── requestConfig.js │ │ ├── fixtures.js │ │ ├── world.js │ │ ├── process_request.js │ │ ├── task_requests.js │ │ ├── cockpit_requests.js │ │ └── auxiliar.js │ ├── healthCheck.test.js │ ├── token.test.js │ ├── cockpitBlueprintValidation.test.js │ └── activityManagerValidation.test.js ├── controllers │ ├── swagger.js │ ├── cockpit │ │ ├── workflow.js │ │ ├── processTree.js │ │ ├── activityManager.js │ │ └── nodes.js │ ├── token.js │ ├── cockpit.js │ ├── package.js │ ├── diagram.js │ ├── connection.js │ └── healthcheck.js ├── samples │ ├── packages.js │ ├── token.js │ ├── blueprints │ │ ├── basic.js │ │ ├── environmentVariables.js │ │ ├── singleUserTask.js │ │ ├── longTimer.js │ │ ├── timersConflicting.js │ │ ├── timersConflicting2.js │ │ ├── timersTimeoutProcess.js │ │ ├── timersDurationProcess.js │ │ ├── activitySchemaValidation.js │ │ ├── findProcess.js │ │ ├── notifyUserTask.js │ │ ├── filterData.js │ │ ├── indexProcess.js │ │ ├── customNode.js │ │ ├── timersTimeout.js │ │ ├── remapData.js │ │ ├── testTreeLeaf.js │ │ ├── kafka.js │ │ ├── scriptNode.js │ │ ├── testTreeRoot.js │ │ ├── timersDuration.js │ │ ├── basicAuth.js │ │ ├── timersDueDate.js │ │ ├── grpcNode.js │ │ └── createUuid.js │ └── grpcdescriptor.js ├── validators │ ├── process.js │ ├── schemas │ │ ├── processExecution.js │ │ ├── cockpitListProcesses.js │ │ ├── processesStats.js │ │ ├── processStateFromNode.js │ │ ├── cockpitProcessesStates.js │ │ ├── processState.js │ │ ├── listProcessFilters.js │ │ ├── workflow.schema.js │ │ └── workflow.js │ ├── cockpit.js │ └── base.js ├── engine.js ├── services │ ├── tokenGenerator.js │ ├── broker │ │ ├── index.js │ │ ├── mqtt.js │ │ ├── kafka.js │ │ └── rabbitMQ.js │ └── compareBlueprints.js ├── routers │ └── freeRouter.js ├── nodes │ ├── validateSchemaNode.js │ ├── index.js │ ├── retrieveProcessNode.js │ ├── tokenizeNode.js │ ├── createUuidNode.js │ ├── basicAuthNode.js │ ├── kafkaPublishNode.js │ └── graphqlNode.js └── app.js ├── Dockerfile ├── postman ├── local_environment.json ├── newman │ └── local_environment.json └── collections │ └── Auth & Healthcheck_d80d7616-3f76-4733-8ad6-4738ad200128.json ├── .vscode └── settings.json ├── db ├── migrations │ ├── 20200601193803_add_actor_data_process_state.js │ ├── 20200601205617_add_engine_id_to_process_state.js │ ├── 20210510165700_add_blueprint_hash_to_workflow.js │ ├── 20200604114249_add_status_to_process_table.js │ ├── 20210623160700_set_timer_expiration_index_to_timer.js │ ├── 20200611150201_add_duration_to_proccess_state.js │ ├── 20210623163300_set_am_action_index_to_activity_manager.js │ ├── 20210623163200_set_am_started_index_to_activity_manager.js │ ├── 20210623163900_set_process_id_index_to_process_state.js │ ├── 20230516151924_create_environment_variable_table.js │ ├── 20191017164358_create-packages-table.js │ ├── 20191006162403_create-process-table.js │ ├── 20230302215201_process_tree.js │ ├── 20191006162328_create-workflow-table.js │ ├── 20191211105603_create-activity-table.js │ ├── 20191211105257_create-activity_manager-table.js │ ├── 20200531164354_create_timer_table.js │ ├── 20210707192414_create_index_table.js │ ├── 20191006162428_create-process-state-table.js │ ├── 20230216123710_create_nodes_view.js │ └── 20200601145635_add_current_state_to_process.js └── seeds │ ├── packages │ └── test_package.js │ ├── blueprints │ ├── test_subprocess_child.js │ ├── test_subprocess_parent.js │ ├── pizza1_blueprint.js │ └── test_workflow_blueprint.js │ └── seed-test-workflow.js ├── index.js ├── codecov.yml ├── public └── swagger-ui │ ├── initializer.js │ └── index.html ├── .eslintrc.js ├── .gitignore ├── .env.docker ├── .postman └── api ├── LICENSE ├── .releaserc.json ├── docker-compose.yml ├── knexfile.js └── package.json /scripts/run_tests.sh: -------------------------------------------------------------------------------- 1 | ./scripts/build_dev.sh -------------------------------------------------------------------------------- /scripts/build_dev.sh: -------------------------------------------------------------------------------- 1 | npm install 2 | npm audit fix 3 | npm run migrations 4 | -------------------------------------------------------------------------------- /scripts/start_dev.sh: -------------------------------------------------------------------------------- 1 | ./scripts/build_dev.sh 2 | npm run seeds 3 | npm run start 4 | -------------------------------------------------------------------------------- /res/pizza-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flow-build/workflow-api/HEAD/res/pizza-1.png -------------------------------------------------------------------------------- /res/pizza-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flow-build/workflow-api/HEAD/res/pizza-2.png -------------------------------------------------------------------------------- /res/pizza-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flow-build/workflow-api/HEAD/res/pizza-3.png -------------------------------------------------------------------------------- /scripts/db/create_database.sql: -------------------------------------------------------------------------------- 1 | -- local database 2 | CREATE DATABASE koa_workflow WITH OWNER postgres ENCODING 'UTF-8'; 3 | -------------------------------------------------------------------------------- /res/docs/FlowBuild-Modelagem-Blueprints.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flow-build/workflow-api/HEAD/res/docs/FlowBuild-Modelagem-Blueprints.pdf -------------------------------------------------------------------------------- /.github/label-settings/name-pattern-labeler.yml: -------------------------------------------------------------------------------- 1 | feature: ["feature/*", "feat/*", "features/*"] 2 | bug: ["fix/*", "bug/*","bugfix/*",] 3 | chore: chore/* 4 | fixed-branch: fixed-branch-name 5 | -------------------------------------------------------------------------------- /res/examples/nodes/finishNode.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "any_string (system friendly recommended)", 3 | "name": "any string", 4 | "type": "Finish", 5 | "lane_id": "any_lane_id", 6 | "next": null 7 | } 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yaml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Questions & Help 4 | url: https://gitter.im/flow-build/flow-build 5 | about: Please ask and answer questions here. 6 | -------------------------------------------------------------------------------- /src/middlewares/persist.js: -------------------------------------------------------------------------------- 1 | const setPersist = (persist) => { 2 | return async (ctx, next) => { 3 | ctx.state.persist = persist; 4 | return next(); 5 | }; 6 | }; 7 | 8 | module.exports = { 9 | setPersist, 10 | }; 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:18-alpine as base 2 | 3 | RUN apk update && apk add bash && apk add curl 4 | 5 | RUN mkdir /usr/app 6 | WORKDIR /usr/app 7 | COPY . /usr/app 8 | 9 | RUN npm install 10 | 11 | EXPOSE 3000 12 | 13 | CMD ["node", "src/server.js"] 14 | -------------------------------------------------------------------------------- /src/server.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | require('newrelic'); 3 | require("./utils/tracing"); 4 | 5 | const { startServer } = require("./app"); 6 | 7 | const port = 3000; 8 | 9 | const server = startServer(port); 10 | 11 | module.exports = server; 12 | -------------------------------------------------------------------------------- /postman/local_environment.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "local", 3 | "values": [ 4 | { 5 | "key": "host", 6 | "value": "localhost:3000", 7 | "enabled": true 8 | }, 9 | { 10 | "key": "token", 11 | "value": "", 12 | "enabled": true 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /res/examples/nodes/startNode.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "string", 3 | "name": "string", 4 | "next": "string", 5 | "type": "start", 6 | "lane_id": "string", 7 | "parameters": { 8 | "input_schema": {}, 9 | "timeout": 100 10 | } 11 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "conventionalCommits.scopes": [ 3 | "middlewares", 4 | "validators", 5 | "samples", 6 | "utils", 7 | "services", 8 | "routers", 9 | "controllers", 10 | "tests" 11 | ] 12 | } -------------------------------------------------------------------------------- /postman/newman/local_environment.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "local", 3 | "values": [ 4 | { 5 | "key": "host", 6 | "value": "localhost:3000", 7 | "enabled": true 8 | }, 9 | { 10 | "key": "token", 11 | "value": "", 12 | "enabled": true 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /src/utils/db.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const knex = require("knex"); 3 | const knexConfig = require("../../knexfile"); 4 | 5 | const _config = knexConfig[process.env.KNEX_ENV || "test"]; 6 | 7 | module.exports = { 8 | db_config: _config, 9 | db: knex(_config) 10 | }; 11 | -------------------------------------------------------------------------------- /src/tests/utils/db.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const knex = require("knex"); 3 | const knexConfig = require("../../../knexfile"); 4 | 5 | const _config = knexConfig[process.env.KNEX_ENV || "test"]; 6 | 7 | module.exports = { 8 | db_config: _config, 9 | db: knex(_config) 10 | }; 11 | -------------------------------------------------------------------------------- /src/controllers/swagger.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | 3 | const getSwagger = async (ctx, next) => { 4 | ctx.type = 'text/html; charset=utf-8', 5 | ctx.body = fs.createReadStream('public/swagger-ui/index.html') 6 | 7 | return next() 8 | } 9 | 10 | module.exports = { 11 | getSwagger 12 | } -------------------------------------------------------------------------------- /res/examples/nodes/setToBagNode.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "any_string (system friendly recommended)", 3 | "name": "any string", 4 | "next": "other_node_id", 5 | "lane_id": "one_lane_id", 6 | "type": "SystemTask", 7 | "category": "setToBag", 8 | "parameters": { 9 | "input": {} 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /res/examples/nodes/timerNode.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "any_string (system friendly recommended)", 3 | "name": "any string", 4 | "next": "other_node_id", 5 | "lane_id": "one_lane_id", 6 | "type": "SystemTask", 7 | "category": "timer", 8 | "parameters": { 9 | "input": {}, 10 | "timeout": 60 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/3-failing_test.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Failing Test 3 | about: Report failing tests 4 | labels: test 5 | --- 6 | 7 | **Which jobs are failing**: 8 | 9 | **Which test(s) are failing**: 10 | 11 | **Since when has it been failing**: 12 | 13 | **Reason for failure**: 14 | 15 | **Anything else we need to know**: 16 | -------------------------------------------------------------------------------- /src/samples/packages.js: -------------------------------------------------------------------------------- 1 | const dummy = { 2 | name: "package_test_1", 3 | description: "test package 1", 4 | code: [ 5 | "do", 6 | [ 7 | "def", 8 | "package_test_1", 9 | ["fn", [], ["prn", ["`", "Dummy test is running!"]]], 10 | ], 11 | ], 12 | }; 13 | 14 | module.exports = { 15 | dummy, 16 | }; 17 | -------------------------------------------------------------------------------- /res/examples/nodes/flowNode.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "any_string (system friendly recommended)", 3 | "name": "any string", 4 | "next": { 5 | "string": "other_node_id", 6 | "default": "other_node_id" 7 | }, 8 | "type": "flow", 9 | "lane_id": "any_lane_id", 10 | "parameters": { 11 | "input": { 12 | "key": "string" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/tests/utils/samples.js: -------------------------------------------------------------------------------- 1 | const samples = { 2 | valid_token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxIiwibmFtZSI6ImNoZXNoaXJlIGdyaW4iLCJpYXQiOjE1MTYyMzkwMjIsImFjdG9yX2lkIjoxLCJjbGFpbXMiOltdfQ.ujLJkTD3V0sViQbz5JkCe86gI5WBJEmVKjeeTKICKS8", 3 | actor_data: { 4 | actor_id: 1, 5 | claims: [] 6 | } 7 | }; 8 | 9 | module.exports = samples; 10 | -------------------------------------------------------------------------------- /db/migrations/20200601193803_add_actor_data_process_state.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.table("process_state", (table) => { 3 | table.jsonb("actor_data"); 4 | }); 5 | }; 6 | 7 | exports.down = function (knex) { 8 | return knex.schema.table("process_state", (table) => { 9 | table.dropColumn("actor_data"); 10 | }); 11 | }; 12 | -------------------------------------------------------------------------------- /db/migrations/20200601205617_add_engine_id_to_process_state.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.table("process_state", (table) => { 3 | table.uuid("engine_id"); 4 | }); 5 | }; 6 | 7 | exports.down = function (knex) { 8 | return knex.schema.table("process_state", (table) => { 9 | table.dropColumn("engine_id"); 10 | }); 11 | }; 12 | -------------------------------------------------------------------------------- /db/migrations/20210510165700_add_blueprint_hash_to_workflow.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.table("workflow", (table) => { 3 | table.text("blueprint_hash"); 4 | }); 5 | }; 6 | 7 | exports.down = function (knex) { 8 | return knex.schema.table("workflow", (table) => { 9 | table.dropColumn("blueprint_hash"); 10 | }); 11 | }; 12 | -------------------------------------------------------------------------------- /db/migrations/20200604114249_add_status_to_process_table.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.alterTable("process", (table) => { 3 | table.string("current_status"); 4 | }); 5 | }; 6 | 7 | exports.down = function (knex) { 8 | return knex.schema.alterTable("process", (table) => { 9 | table.dropColumn("current_status"); 10 | }); 11 | }; 12 | -------------------------------------------------------------------------------- /src/samples/token.js: -------------------------------------------------------------------------------- 1 | const validToken = 2 | "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxIiwibmFtZSI6ImNoZXNoaXJlIGdyaW4iLCJpYXQiOjE1MTYyMzkwMjIsImFjdG9yX2lkIjoxLCJjbGFpbXMiOltdfQ.ujLJkTD3V0sViQbz5JkCe86gI5WBJEmVKjeeTKICKS8"; 3 | 4 | const actorData = { 5 | actor_id: 1, 6 | claims: [], 7 | }; 8 | 9 | module.exports = { 10 | validToken, 11 | actorData, 12 | }; 13 | -------------------------------------------------------------------------------- /db/migrations/20210623160700_set_timer_expiration_index_to_timer.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.raw(`CREATE INDEX idx_timer_expiration ON timer (expires_at DESC) WHERE active = true`); 3 | }; 4 | 5 | exports.down = function (knex) { 6 | return knex.schema.table("timer", (table) => { 7 | table.dropIndex("idx_timer_expiration"); 8 | }); 9 | }; 10 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const freeRouter = require("./src/routers/freeRouter"); 2 | const mainRouter = require("./src/routers/mainRouter"); 3 | const cockpitRouter = require("./src/routers/cockpitRouter"); 4 | const { setEngine, setCockpit } = require("./src/engine"); 5 | 6 | module.exports = { 7 | freeRouter, 8 | mainRouter, 9 | cockpitRouter, 10 | setEngine, 11 | setCockpit, 12 | }; 13 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | require_ci_to_pass: yes 3 | 4 | coverage: 5 | precision: 2 6 | round: down 7 | range: "70...100" 8 | 9 | parsers: 10 | gcov: 11 | branch_detection: 12 | conditional: yes 13 | loop: yes 14 | method: no 15 | macro: no 16 | 17 | comment: 18 | layout: "reach,diff,flags,tree" 19 | behavior: default 20 | require_changes: no -------------------------------------------------------------------------------- /db/migrations/20200611150201_add_duration_to_proccess_state.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.alterTable("process_state", (table) => { 3 | table.bigInteger("time_elapsed"); 4 | }); 5 | }; 6 | 7 | exports.down = function (knex) { 8 | return knex.schema.alterTable("process_state", (table) => { 9 | table.dropColumn("time_elapsed"); 10 | }); 11 | }; 12 | -------------------------------------------------------------------------------- /db/migrations/20210623163300_set_am_action_index_to_activity_manager.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.raw(`CREATE INDEX idx_am_action ON activity_manager ((props ->> 'action'), created_at)`); 3 | }; 4 | 5 | exports.down = function (knex) { 6 | return knex.schema.table("activity_manager", (table) => { 7 | table.dropIndex("idx_am_action"); 8 | }); 9 | }; 10 | -------------------------------------------------------------------------------- /db/migrations/20210623163200_set_am_started_index_to_activity_manager.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.raw(`CREATE INDEX idx_am_started ON activity_manager (created_at ASC) WHERE status='started'`); 3 | }; 4 | 5 | exports.down = function (knex) { 6 | return knex.schema.table("activity_manager", (table) => { 7 | table.dropIndex("idx_am_started"); 8 | }); 9 | }; 10 | -------------------------------------------------------------------------------- /public/swagger-ui/initializer.js: -------------------------------------------------------------------------------- 1 | window.onload = function () { 2 | function HideItemsPlugin() { 3 | return { 4 | wrapComponents: { 5 | InfoUrl: () => () => null 6 | } 7 | } 8 | } 9 | 10 | window.ui = SwaggerUIBundle({ 11 | url: '/swagger.yml', 12 | dom_id: '#swagger-ui', 13 | deepLinking: true, 14 | plugins: [HideItemsPlugin] 15 | }) 16 | } 17 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | commonjs: true, 4 | es2021: true, 5 | node: true, 6 | jest: true, 7 | }, 8 | extends: 'eslint:recommended', 9 | parserOptions: { 10 | ecmaVersion: 2020, 11 | sourceType: 'module' 12 | }, 13 | rules: { 14 | 'max-len': 'off', 15 | camelcase: 'off', 16 | indent: ['error', 2], 17 | 'no-undef': 'off' 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /db/migrations/20210623163900_set_process_id_index_to_process_state.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.table("process_state", (table) => { 3 | table.index("process_id", "idx_process_state_process_id"); 4 | }); 5 | }; 6 | 7 | exports.down = function (knex) { 8 | return knex.schema.table("process_state", (table) => { 9 | table.dropIndex("idx_process_state_process_id"); 10 | }); 11 | }; 12 | -------------------------------------------------------------------------------- /db/seeds/packages/test_package.js: -------------------------------------------------------------------------------- 1 | const test_workflow_package = [ 2 | "do", 3 | [ 4 | "js", 5 | [ 6 | "`", 7 | "function lisp_test_task(args) { const n_interp = args[0].n_interp; let dates = []; for(let i = 0; i { 3 | table.string("key").primary(); 4 | table.string("value").notNullable(); 5 | table.string("type").notNullable(); 6 | table.timestamps(true, true); 7 | }); 8 | }; 9 | 10 | exports.down = function (knex) { 11 | return knex.schema.dropTable("environment_variable"); 12 | }; 13 | -------------------------------------------------------------------------------- /src/validators/schemas/processExecution.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | type: "array", 3 | items: { 4 | type: "object", 5 | properties: { 6 | state_id: { type: "string", format: "uuid" }, 7 | step_number: { type: "integer" }, 8 | node_type: { type: "string" }, 9 | node: { type: "string" }, 10 | next_node_id: { anyOf: [{ type: "string" }, { type: "null" }] }, 11 | status: { type: "string" }, 12 | }, 13 | }, 14 | }; 15 | -------------------------------------------------------------------------------- /src/services/tokenGenerator.js: -------------------------------------------------------------------------------- 1 | const rs = require("jsrsasign"); 2 | 3 | const createJWTToken = (payload, secret, duration) => { 4 | const jwtHeader = { alg: "HS256", typ: "JWT" }; 5 | 6 | const tNow = rs.KJUR.jws.IntDate.get("now"); 7 | const tEnd = tNow + duration; 8 | payload.iat = tNow; 9 | payload.exp = tEnd; 10 | 11 | return rs.KJUR.jws.JWS.sign("HS256", jwtHeader, payload, { utf8: secret }) 12 | } 13 | 14 | module.exports = { 15 | createJWTToken 16 | }; -------------------------------------------------------------------------------- /src/validators/schemas/cockpitListProcesses.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | type: "array", 3 | items: { 4 | type: "object", 5 | properties: { 6 | process_id: { type: "string", format: "uuid" }, 7 | created_at: { type: "string", format: "date-time" }, 8 | state_id: { type: "string", format: "uuid" }, 9 | status: { type: "string" }, 10 | workflow_name: { type: "string" }, 11 | workflow_version: { type: "integer" }, 12 | }, 13 | }, 14 | }; 15 | -------------------------------------------------------------------------------- /db/migrations/20191017164358_create-packages-table.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.createTable("packages", table => { 3 | table.uuid("id").primary(); 4 | table.timestamp("created_at").notNullable(); 5 | table.string("name").notNullable().unique(); 6 | table.string("description").notNullable(); 7 | table.text("code").notNullable(); 8 | }); 9 | }; 10 | 11 | exports.down = function(knex) { 12 | return knex.schema.dropTable("packages"); 13 | }; 14 | -------------------------------------------------------------------------------- /db/migrations/20191006162403_create-process-table.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.createTable("process", table => { 3 | table.uuid("id").primary(); 4 | table.uuid("workflow_id").notNullable(); 5 | table.foreign("workflow_id").references("workflow.id"); 6 | table.jsonb("blueprint_spec").notNullable(); 7 | table.timestamp("created_at").notNullable(); 8 | }); 9 | }; 10 | 11 | exports.down = function(knex) { 12 | return knex.schema.dropTable("process"); 13 | }; 14 | -------------------------------------------------------------------------------- /src/tests/utils/requestConfig.js: -------------------------------------------------------------------------------- 1 | const tokenSamples = require("../../samples/token"); 2 | 3 | const config = { 4 | baseURL: "http://127.0.0.1:3001", 5 | headers: { 6 | common: { 7 | Authorization: `Bearer ${tokenSamples.validToken}`, 8 | }, 9 | post: { 10 | "Content-Type": "application/json", 11 | }, 12 | }, 13 | timeout: 2000, 14 | validateStatus: function (status) { 15 | return status <= 500; 16 | }, 17 | }; 18 | 19 | module.exports = { 20 | config, 21 | }; 22 | -------------------------------------------------------------------------------- /.github/label-settings/labeler.yml: -------------------------------------------------------------------------------- 1 | js: 2 | - 'src/**/*.js' 3 | test: 4 | - '**/tests' 5 | - '**/_tests_' 6 | - '**/*.spec.js' 7 | - '**/*.test.js' 8 | 9 | doc: 10 | - '**/*.md' 11 | 12 | configuration: 13 | - '**/*.yml' 14 | - '**/*.json' 15 | - '**/.env' 16 | - '**/.env.**' 17 | - '**/knexfile.js' 18 | - '**/jest.config.js' 19 | 20 | dependencies: 21 | - '**/package.json' 22 | - '**/build.gradle' 23 | - '**/.*ignore' 24 | 25 | github-actions: 26 | - '**/.github/workflows/**' 27 | -------------------------------------------------------------------------------- /src/utils/identifyTarget.js: -------------------------------------------------------------------------------- 1 | const identifyTarget = (blueprint_spec) => { 2 | try { 3 | const { nodes } = blueprint_spec 4 | const startNode = nodes.find((node) => node.type.toLowerCase() === 'start') 5 | const { parameters } = startNode 6 | if (parameters && parameters.target) { 7 | return [true, parameters.target] 8 | } 9 | return [false,] 10 | } catch (e) { 11 | console.log('Error: ', e) 12 | return [false,] 13 | } 14 | } 15 | 16 | module.exports = { 17 | identifyTarget 18 | } -------------------------------------------------------------------------------- /db/migrations/20230302215201_process_tree.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.createTable("process_tree", (table) => { 3 | table.uuid("process_id").notNullable(); 4 | table.uuid("root_id").notNullable(); 5 | table.uuid("parent_id"); 6 | table.integer("depth"); 7 | table.index(["process_id"], "idx_process_id"); 8 | table.index(["root_id"], "idx_root_id"); 9 | }); 10 | }; 11 | 12 | exports.down = function (knex) { 13 | return knex.schema.dropTable("process_tree"); 14 | }; 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Node ### 2 | # Logs 3 | logs 4 | *.log 5 | npm-debug.log* 6 | yarn-debug.log* 7 | yarn-error.log* 8 | lerna-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Optional npm cache directory 20 | .npm 21 | 22 | #IDE Directory 23 | .idea 24 | # Jest 25 | coverage 26 | 27 | node_modules 28 | .npmrc 29 | 30 | .env 31 | .env.studio 32 | 33 | /export 34 | .vscode 35 | .vscode/settings.json 36 | 37 | /build 38 | /nginx -------------------------------------------------------------------------------- /db/migrations/20191006162328_create-workflow-table.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.createTable("workflow", table => { 3 | table.uuid("id").primary(); 4 | table.string("name", 255).notNullable(); 5 | table.text("description").notNullable(); 6 | table.jsonb("blueprint_spec").notNullable(); 7 | table.integer("version").notNullable(); 8 | table.timestamp("created_at").notNullable(); 9 | table.unique(["name", "version"]); 10 | }); 11 | }; 12 | 13 | exports.down = function(knex) { 14 | return knex.schema.dropTable("workflow"); 15 | }; 16 | -------------------------------------------------------------------------------- /src/validators/schemas/processesStats.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | type: "object", 3 | properties: { 4 | workflows: { 5 | type: "object", 6 | propertyNames: { 7 | type: "string", 8 | format: "uuid", 9 | }, 10 | patternProperties: { 11 | ".": { 12 | type: "object", 13 | properties: { 14 | workflow_name: { type: "string" }, 15 | workflow_description: { type: "string" }, 16 | workflow_version: { type: "integer" }, 17 | }, 18 | }, 19 | }, 20 | }, 21 | }, 22 | }; 23 | -------------------------------------------------------------------------------- /src/validators/schemas/processStateFromNode.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | type: "array", 3 | items: { 4 | type: "object", 5 | properties: { 6 | workflow_name: { type: "string" }, 7 | version: { type: "integer" }, 8 | workflow_id: { type: "string", format: "uuid" }, 9 | state_id: { type: "string", format: "uuid" }, 10 | step_number: { type: "integer" }, 11 | next_node_id: { type: "string" }, 12 | result: { type: "object" }, 13 | status: { type: "string" }, 14 | created_at: { type: "string", format: "date-time" }, 15 | }, 16 | }, 17 | }; 18 | -------------------------------------------------------------------------------- /db/migrations/20191211105603_create-activity-table.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.createTable("activity", (table) => { 3 | table.uuid("id").primary(); 4 | table.timestamp("created_at").notNullable(); 5 | table.uuid("activity_manager_id").notNullable(); 6 | table.foreign("activity_manager_id").references("activity_manager.id"); 7 | table.jsonb("actor_data").notNullable(); 8 | table.jsonb("data").notNullable(); 9 | table.string("status").notNullable(); 10 | }); 11 | }; 12 | 13 | exports.down = function (knex) { 14 | return knex.schema.dropTable("activity"); 15 | }; 16 | -------------------------------------------------------------------------------- /public/swagger-ui/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Flowbuild API 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /src/validators/schemas/cockpitProcessesStates.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | type: "array", 3 | items: { 4 | type: "object", 5 | properties: { 6 | workflow_name: { type: "string" }, 7 | version: { type: "integer" }, 8 | workflow_id: { type: "string", format: "uuid" }, 9 | state_id: { type: "string", format: "uuid" }, 10 | step_number: { type: "integer" }, 11 | next_node_id: { anyOf: [{ type: "string" }, { type: "null" }] }, 12 | result: { type: "object" }, 13 | status: { type: "string" }, 14 | created_at: { type: "string", format: "date-time" }, 15 | }, 16 | }, 17 | }; 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/4-documentation.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Documentation 3 | about: reporting a documentation error 4 | labels: doc 5 | --- 6 | 7 | **What part of docs are missing out**: 8 | 10 | 11 | **We need to cover up de doc with**: 12 | 14 | 15 | - [] especification 16 | - [] code-block 17 | - [] grossary 18 | - [] organization 19 | 20 | **Anything else we need to know**: 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/2-feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: feature request 3 | about: Suggest an enhancement or new feature 4 | labels: feature 5 | --- 6 | 7 | 8 | 9 | **What would you like to be added**: 10 | 11 | **Why is this needed**: 12 | 13 | - [] this issue need to update on docs? if yes where? 14 | 15 | - [] this issue interact with a plugin or application? if yes spe 16 | 17 | **Anything else we need to know**: 18 | -------------------------------------------------------------------------------- /src/utils/jwtSecret.js: -------------------------------------------------------------------------------- 1 | const { logger } = require("./logger"); 2 | let jwtSecret = process.env.JWT_KEY || "1234"; 3 | 4 | if(process.env.JWT_ALG === "RS256") { 5 | logger.info('using RS256, switching secretKey to certificate') 6 | const pemHeader = "-----BEGIN PUBLIC KEY-----"; 7 | const pemFooter = "-----END PUBLIC KEY-----"; 8 | const t0 = pemHeader + '\n' + process.env.JWT_KEY + '\n' + pemFooter 9 | jwtSecret = Buffer.from(t0); 10 | } 11 | 12 | const jwtAlgorithms = process.env.JWT_ALG || "HS256" 13 | const jwtPassthrough = process.env.JWT_PASSTHROUGH === 'true' 14 | 15 | module.exports = { 16 | jwtSecret, 17 | jwtAlgorithms, 18 | jwtPassthrough 19 | }; -------------------------------------------------------------------------------- /.env.docker: -------------------------------------------------------------------------------- 1 | POSTGRES_USER=postgres 2 | POSTGRES_PASSWORD=postgres 3 | POSTGRES_DB=workflow 4 | JWT_KEY=1234 5 | NODE_ENV=docker 6 | KNEX_ENV=docker 7 | FLOWBUILD_URL='http://localhost:3000' 8 | # KAFKA=true 9 | # MQTT=true 10 | # MQTT_HOST=localhost 11 | # MQTT_PORT=1883 12 | # MQTT_PROTOCOL=http 13 | # MQTT_USERNAME=admin 14 | # MQTT_PASSWORD=hivemq 15 | # BROKER_PASSWORD=guest 16 | # BROKER_USERNAME=guest 17 | # BROKER_HOST='localhost:9092' 18 | # BROKER_QUEUE=flowbuild 19 | # AMQP=false 20 | # ACTIVITY_MANAGER_BROKER=MQTT 21 | # PROCESS_STATE_BROKER=MQTT 22 | # ACTIVITY_MANAGER_SEND_ONLY_ON_CREATION=true 23 | # WORKFLOW_EVENTS_BROKER=KAFKA 24 | # WORKFLOW_EVENTS_NAMESPACE='local' -------------------------------------------------------------------------------- /db/migrations/20191211105257_create-activity_manager-table.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.createTable("activity_manager", table => { 3 | table.uuid("id").primary(); 4 | table.timestamp("created_at").notNullable(); 5 | table.string("type").notNullable(); 6 | table.uuid("process_state_id").notNullable(); 7 | table.foreign("process_state_id").references("process_state.id"); 8 | table.jsonb("props").notNullable(); 9 | table.jsonb("parameters").notNullable(); 10 | table.string("status").notNullable(); 11 | }); 12 | }; 13 | 14 | exports.down = function(knex) { 15 | return knex.schema.dropTable("activity_manager"); 16 | }; 17 | -------------------------------------------------------------------------------- /db/migrations/20200531164354_create_timer_table.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.createTable("timer", (table) => { 3 | table.uuid("id").primary(); 4 | table.timestamp("created_at").notNullable(); 5 | table.timestamp("expires_at", (options = { useTz: false })).notNullable(); 6 | table.boolean("active").notNullable().defaultTo(false); 7 | table.string("resource_type").notNullable(); 8 | table.uuid("resource_id").notNullable(); 9 | table.jsonb("params"); 10 | table.timestamp("fired_at", (options = { useTz: false })); 11 | }); 12 | }; 13 | 14 | exports.down = function (knex) { 15 | return knex.schema.dropTable("timer"); 16 | }; 17 | -------------------------------------------------------------------------------- /db/migrations/20210707192414_create_index_table.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.createTable("index", (table) => { 3 | table.uuid("id").primary(); 4 | table.string("entity_type"); 5 | table.string("entity_id").notNullable(); 6 | table.string("process_id").notNullable(); 7 | table.string("activity_manager_id"); 8 | table.timestamp("created_at").notNullable(); 9 | table.index(["process_id"], "idx_process"); 10 | table.index(["entity_id"], "idx_entity"); 11 | table.unique(["process_id", "entity_id", "entity_type"]); 12 | }); 13 | }; 14 | 15 | exports.down = function (knex) { 16 | return knex.schema.dropTable("index"); 17 | }; 18 | -------------------------------------------------------------------------------- /.github/workflows/auto-labeler-pr.yml: -------------------------------------------------------------------------------- 1 | # Workflow to associate labels automatically 2 | name: Labeler 3 | 4 | on: 5 | - pull_request 6 | 7 | jobs: 8 | pattern-labeler: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/labeler@v4 12 | with: 13 | configuration-path: .github/label-settings/labeler.yml 14 | repo-token: ${{ secrets.GITHUB_TOKEN }} 15 | 16 | branch-name-labeler: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: TimonVS/pr-labeler-action@v4 20 | with: 21 | configuration-path: .github/label-settings/name-pattern-labeler.yml 22 | env: 23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 24 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/1-bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Bug report" 3 | about: Create a report to help us improve 4 | labels: bug 5 | --- 6 | 7 | 8 | 9 | **What happened**: 10 | 11 | **What you expected to happen**: 12 | 13 | **How to reproduce it (as minimally and precisely as possible)**: 14 | 15 | **Anything else we need to know?**: 16 | 17 | **Environment**: 18 | 19 | - version: 20 | - OS installed on: 21 | - User OS & Browser or mobile: 22 | - Plugins: 23 | - Others: 24 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | Use this section to tell people about which versions of your project are 6 | currently being supported with security updates. 7 | 8 | | Version | Supported | 9 | | ------- | ------------------ | 10 | | 5.1.x | :white_check_mark: | 11 | | 5.0.x | :x: | 12 | | 4.0.x | :white_check_mark: | 13 | | < 4.0 | :x: | 14 | 15 | ## Reporting a Vulnerability 16 | 17 | Use this section to tell people how to report a vulnerability. 18 | 19 | Tell them where to go, how often they can expect to get an update on a 20 | reported vulnerability, what to expect if the vulnerability is accepted or 21 | declined, etc. 22 | -------------------------------------------------------------------------------- /src/utils/logger.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const { createLogger, format, transports } = require("winston"); 3 | 4 | const logger = createLogger({ 5 | level: process.env.KOA_LOG_LEVEL || "info", 6 | format: format.combine( 7 | format.colorize(), 8 | format.timestamp(), 9 | format.label({ label: "KW", message: true }), 10 | //format.align(), 11 | format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`) 12 | ), 13 | transports: [new transports.Console()], 14 | exceptionHandlers: [ 15 | new transports.Console({ 16 | format: format.errors(), 17 | }), 18 | ], 19 | rejectionHandlers: [new transports.Console()], 20 | }); 21 | 22 | module.exports = { 23 | logger 24 | }; 25 | -------------------------------------------------------------------------------- /src/tests/utils/fixtures.js: -------------------------------------------------------------------------------- 1 | const { Engine, Cockpit } = require("@flowbuild/engine"); 2 | const { cleanDb } = require("../utils/auxiliar"); 3 | const { setEngine, setCockpit } = require("../../engine"); 4 | 5 | 6 | module.exports.tearDownEnvironment = async (server, db) => { 7 | Engine.kill(); 8 | await cleanDb(); 9 | await db.destroy(); 10 | await server.close(); 11 | } 12 | 13 | module.exports.createTestEngine = (db) => { 14 | const engine = new Engine("knex", db, process.env.ENGINE_LOG_LEVEL); 15 | setEngine(engine); 16 | return engine 17 | } 18 | 19 | module.exports.createTestCockpit = (db) => { 20 | const cockpit = new Cockpit("knex", db, process.env.ENGINE_LOG_LEVEL); 21 | setCockpit(cockpit); 22 | return cockpit 23 | } 24 | 25 | -------------------------------------------------------------------------------- /.github/workflows/postman-cli-tests.yml: -------------------------------------------------------------------------------- 1 | name: Automated API tests using Postman CLI 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - test 7 | 8 | jobs: 9 | automated-api-tests: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Install Postman CLI 14 | run: | 15 | curl -o- "https://dl-cli.pstmn.io/install/linux64.sh" | sh 16 | - name: Login to Postman CLI 17 | run: postman login --with-api-key ${{ secrets.POSTMAN_API_KEY }} 18 | - name: Run API tests 19 | run: | 20 | postman collection run "${{ github.workspace }}/Postman Collections/Tests.json" -e "2387160-96db1f0a-871c-4095-a671-d4f8ea251bdc" --integration-id "124557-${{ github.run_id }}" 21 | -------------------------------------------------------------------------------- /src/routers/freeRouter.js: -------------------------------------------------------------------------------- 1 | const Router = require("@koa/router"); 2 | const bodyParser = require("koa-bodyparser"); 3 | const cors = require("koa2-cors"); 4 | const healthCtrl = require("../controllers/healthcheck"); 5 | const tokenCtrl = require("../controllers/token"); 6 | const swaggerCtrl = require('../controllers/swagger') 7 | 8 | module.exports = (opts = {}) => { 9 | const router = new Router(); 10 | 11 | router.use(bodyParser()); 12 | router.use(cors(opts.corsOptions)); 13 | 14 | //main routes, no validation, no middleware 15 | router.get("/", healthCtrl.healthCheck); 16 | router.get("/healthcheck", healthCtrl.healthCheck); 17 | router.get("/swagger", swaggerCtrl.getSwagger); 18 | router.post("/token", tokenCtrl.getToken); 19 | 20 | return router; 21 | }; 22 | -------------------------------------------------------------------------------- /src/validators/schemas/processState.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | type: "object", 3 | properties: { 4 | id: { type: "string", format: "uuid" }, 5 | process_id: { type: "string", format: "uuid" }, 6 | step_number: { type: "integer" }, 7 | node_id: { type: "string" }, 8 | //next_node_id: { type: "string" }, 9 | bag: { type: "object" }, 10 | external_input: { type: "null" }, 11 | result: { type: "object" }, 12 | error: { oneOf: [{ type: "null" }, { type: "string" }] }, 13 | status: { type: "string" }, 14 | created_at: { type: "string", format: "date-time" }, 15 | actor_data: { type: "object" }, 16 | engine_id: { type: "string", format: "uuid" }, 17 | time_elapsed: { oneOf: [{ type: "null" }, { type: "string" }] }, 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /db/migrations/20191006162428_create-process-state-table.js: -------------------------------------------------------------------------------- 1 | exports.up = function(knex) { 2 | return knex.schema.createTable("process_state", table => { 3 | table.uuid("id").primary(); 4 | table.uuid("process_id").notNullable(); 5 | table.foreign("process_id").references("process.id"); 6 | table.integer("step_number").notNullable(); 7 | table.string("node_id", 255).notNullable(); 8 | table.string("next_node_id"); 9 | table.jsonb("bag").notNullable(); 10 | table.jsonb("external_input"); 11 | table.jsonb("result"); 12 | table.text("error"); 13 | table.string("status").notNullable(); 14 | table.timestamp("created_at").defaultTo(knex.fn.now()); 15 | }); 16 | }; 17 | 18 | exports.down = function(knex) { 19 | return knex.schema.dropTable("process_state"); 20 | }; 21 | -------------------------------------------------------------------------------- /src/samples/blueprints/basic.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "basic", 3 | description: "system workflow", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | parameters: { 13 | input_schema: {}, 14 | }, 15 | next: "2", 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "2", 20 | type: "Finish", 21 | name: "Finish node", 22 | next: null, 23 | lane_id: "1", 24 | }, 25 | ], 26 | lanes: [ 27 | { 28 | id: "1", 29 | name: "the_only_lane", 30 | rule: ["fn", ["&", "args"], true], 31 | }, 32 | ], 33 | environment: {}, 34 | }, 35 | }; 36 | -------------------------------------------------------------------------------- /src/validators/schemas/listProcessFilters.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | type: "object", 3 | properties: { 4 | workflow_name: { 5 | oneOf: [{ type: "string" }, { type: "array", items: { type: "string" } }], 6 | }, 7 | workflow_id: { 8 | oneOf: [ 9 | { type: "string", format: "uuid" }, 10 | { type: "array", items: { type: "string", format: "uuid" } }, 11 | ], 12 | }, 13 | process_id: { 14 | oneOf: [ 15 | { type: "string", format: "uuid" }, 16 | { type: "array", items: { type: "string", format: "uuid" } }, 17 | ], 18 | }, 19 | current_status: { 20 | oneOf: [{ type: "string" }, { type: "array", items: { type: "string" } }], 21 | }, 22 | limit: { type: "integer" }, 23 | offset: { type: "integer" }, 24 | }, 25 | } -------------------------------------------------------------------------------- /.github/workflows/first-time-contrib.yml: -------------------------------------------------------------------------------- 1 | name: Welcome first time contributors 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | issues: 8 | types: 9 | - opened 10 | 11 | jobs: 12 | welcome: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/first-interaction@v1.1.1 16 | with: 17 | repo-token: ${{ secrets.GITHUB_TOKEN }} 18 | issue-message: | 19 | Welcome to FlowBuild. Thanks a lot for reporting your first issue. 20 | 21 | Keep in mind there are also other channels you can use to interact with FlowBuild community. 22 | pr-message: | 23 | Welcome to FlowBuild. Thanks a lot for creating your first pull request. 24 | 25 | Keep in mind there are also other channels you can use to interact with FlowBuild community. -------------------------------------------------------------------------------- /src/controllers/cockpit/workflow.js: -------------------------------------------------------------------------------- 1 | const { getCockpit } = require("../../engine"); 2 | const { logger } = require("../../utils/logger"); 3 | const { validate } = require("uuid"); 4 | 5 | module.exports.fetchWorkflowsWithProcessStatusCount = async (ctx, next) => { 6 | logger.verbose("Called fetchWorkflowsWithProcessStatusCount"); 7 | const cockpit = getCockpit(); 8 | const filters = ctx.query; 9 | 10 | let is_valid = true; 11 | if (filters.workflow_id) { 12 | logger.debug("validating workflow_id"); 13 | is_valid = validate(filters.workflow_id); 14 | } 15 | 16 | if (!is_valid) { 17 | ctx.status = 400; 18 | ctx.body = { 19 | message: "Invalid uuid", 20 | }; 21 | } else { 22 | const workflows = await cockpit.fetchWorkflowsWithProcessStatusCount(filters); 23 | ctx.status = 200; 24 | ctx.body = { 25 | workflows, 26 | }; 27 | } 28 | 29 | return next(); 30 | }; -------------------------------------------------------------------------------- /src/samples/blueprints/environmentVariables.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "environment_variables", 3 | description: "system workflow with environment variables", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | parameters: { 13 | input_schema: {}, 14 | }, 15 | next: "2", 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "2", 20 | type: "Finish", 21 | name: "Finish node", 22 | next: null, 23 | lane_id: "1", 24 | }, 25 | ], 26 | lanes: [ 27 | { 28 | id: "1", 29 | name: "the_only_lane", 30 | rule: ["fn", ["&", "args"], true], 31 | }, 32 | ], 33 | environment: { 34 | TEST_VARIABLE: "KOA_LOG_LEVEL", 35 | }, 36 | }, 37 | }; 38 | -------------------------------------------------------------------------------- /src/middlewares/userAgent.js: -------------------------------------------------------------------------------- 1 | const { logger } = require("../utils/logger"); 2 | const RequestIp = require("@supercharge/request-ip"); 3 | 4 | const captureUserAgentAndIp = async (ctx, next) => { 5 | logger.debug("[MIDDLEWARES] captureUserAgentAndIp"); 6 | const clientIp = RequestIp.getClientIp(ctx.request); 7 | const userAgent = { 8 | isMobile: ctx.userAgent._agent.isMobile, 9 | os: ctx.userAgent._agent.os, 10 | version: ctx.userAgent._agent.version, 11 | browser: ctx.userAgent._agent.browser, 12 | platform: ctx.userAgent._agent.platform, 13 | } 14 | 15 | if (ctx.state.actor_data) { 16 | ctx.state.actor_data.userAgent = userAgent; 17 | ctx.state.actor_data.requestIp = clientIp; 18 | } else { 19 | ctx.state.actor_data = { 20 | userAgent: userAgent, 21 | requestIp: clientIp, 22 | }; 23 | } 24 | 25 | return next(); 26 | }; 27 | 28 | module.exports = { 29 | captureUserAgentAndIp 30 | } -------------------------------------------------------------------------------- /.postman/api: -------------------------------------------------------------------------------- 1 | id = 8b847e91-141c-4ceb-bfa4-7fcf6d30632e 2 | 3 | [relations] 4 | 5 | [relations.collections] 6 | rootDirectory = postman/collections 7 | files[] = {"id":"2387160-d36f89a2-0ad9-4cf3-b56b-1a874b9ac814","path":"Auth & Healthcheck_d80d7616-3f76-4733-8ad6-4738ad200128.json","metaData":{}} 8 | files[] = {"id":"2387160-2021feff-c3ae-4bbb-97b0-edc69afa6bc2","path":"Cockpit_6d7a0568-0eb3-442c-a4c5-0081ea4ef929.json","metaData":{}} 9 | files[] = {"id":"2387160-6e9cb38a-db1c-4913-91ad-2546e9ac4218","path":"FlowBuild_e2d1e060-3122-40af-901d-8a2dcd4e852b.json","metaData":{}} 10 | files[] = {"id":"2387160-81e44739-2999-4f0d-9855-5f26f493f763","path":"Tests_7aa1e169-177d-44a0-a7d2-7116348d54e6.json","metaData":{}} 11 | 12 | [relations.collections.metaData] 13 | 14 | [relations.apiDefinition] 15 | rootDirectory = postman/schemas 16 | files[] = {"path":"flowbuild.yaml","metaData":{}} 17 | 18 | [relations.apiDefinition.metaData] 19 | type = openapi:3 20 | -------------------------------------------------------------------------------- /res/flowbuild-workflow.postman_environment.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "41ab133f-e325-4915-b18d-88d4990f0ef3", 3 | "name": "flowbuild-workflow", 4 | "values": [ 5 | { 6 | "key": "token", 7 | "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjE1MTYyMzkwMjJ9.ZZhdzFeBoMHcszpMrXUAV0kMqp43RDtHqyRqWAtek90", 8 | "enabled": true 9 | }, 10 | { 11 | "key": "host", 12 | "value": "localhost:3000", 13 | "enabled": true 14 | }, 15 | { 16 | "key": "process_id", 17 | "value": "", 18 | "enabled": true 19 | }, 20 | { 21 | "key": "workflow_id", 22 | "value": "", 23 | "enabled": true 24 | }, 25 | { 26 | "key": "activity_manager_id", 27 | "value": "", 28 | "enabled": true 29 | } 30 | ], 31 | "_postman_variable_scope": "environment", 32 | "_postman_exported_at": "2020-11-10T20:04:53.505Z", 33 | "_postman_exported_using": "Postman/7.35.0" 34 | } -------------------------------------------------------------------------------- /src/services/broker/index.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const mqtt = require("./mqtt"); 3 | const rabbitMQ = require("./rabbitMQ"); 4 | const kafka = require('./kafka'); 5 | 6 | async function connect() { 7 | if (process.env.MQTT === "true") { 8 | await mqtt.connect(); 9 | } 10 | if (process.env.AMQP === "true") { 11 | await rabbitMQ.connect(); 12 | } 13 | if (process.env.KAFKA === "true") { 14 | await kafka.connect(); 15 | } 16 | } 17 | 18 | async function publishMessage(payload, broker) { 19 | if (process.env.MQTT === "true" && broker === "MQTT") { 20 | await mqtt.publishMessage(payload.topic, payload.message); 21 | } else if (process.env.AMQP === "true" && broker === "AMQP") { 22 | await rabbitMQ.publishMessage(payload.context); 23 | } else if (process.env.KAFKA === "true" && broker === "KAFKA") { 24 | await kafka.publishMessage(payload.context); 25 | } 26 | } 27 | 28 | module.exports = { 29 | connect, 30 | publishMessage, 31 | } -------------------------------------------------------------------------------- /src/validators/cockpit.js: -------------------------------------------------------------------------------- 1 | const { validateBodyWithSchema } = require("./base"); 2 | 3 | const validateSetProcessState = validateBodyWithSchema({ 4 | type: "object", 5 | properties: { 6 | next_node_id: { type: "string" }, 7 | bag: { type: "object" }, 8 | result: { type: "object" }, 9 | }, 10 | additionalProperties: false, 11 | required: ["next_node_id", "bag", "result"], 12 | }) 13 | 14 | const validateFetchNodeSchema = validateBodyWithSchema({ 15 | type: 'object', 16 | required: ['type'], 17 | properties: { 18 | type: { type: 'string' }, 19 | category: { type: 'string' } 20 | }, 21 | additionalProperties: false 22 | }) 23 | 24 | const validateEnvironmentSchema = validateBodyWithSchema({ 25 | type: "object", 26 | properties: { 27 | key: { type: "string" }, 28 | value: { type: ["string", "number", "boolean"] }, 29 | }, 30 | required: ["key", "value"], 31 | }) 32 | 33 | module.exports = { 34 | validateSetProcessState, 35 | validateFetchNodeSchema, 36 | validateEnvironmentSchema, 37 | }; 38 | -------------------------------------------------------------------------------- /src/samples/blueprints/singleUserTask.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "user_task", 3 | description: "user task workflow", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "2", 20 | type: "UserTask", 21 | name: "User Task node", 22 | next: "3", 23 | lane_id: "1", 24 | parameters: { 25 | action: "do something", 26 | input: {}, 27 | }, 28 | }, 29 | { 30 | id: "3", 31 | type: "Finish", 32 | name: "Finish node", 33 | next: null, 34 | lane_id: "1", 35 | }, 36 | ], 37 | lanes: [ 38 | { 39 | id: "1", 40 | name: "the_only_lane", 41 | rule: ["fn", ["&", "args"], true], 42 | }, 43 | ], 44 | environment: {}, 45 | }, 46 | }; 47 | -------------------------------------------------------------------------------- /src/utils/publishWorkflow.js: -------------------------------------------------------------------------------- 1 | const { publishMessage } = require("../services/broker"); 2 | const { identifyTarget } = require("../utils/identifyTarget"); 3 | const namespace = process.env.WORKFLOW_EVENTS_NAMESPACE || process.env.NODE_ENV; 4 | 5 | const publishWorkflow = async (workflow) => { 6 | const [hasTarget, event] = identifyTarget(workflow.blueprint_spec) 7 | let topic = (namespace) ? 8 | `${namespace}.wem.workflow.target.create` 9 | : `wem.workflow.target.create` 10 | if (process.env.WORKFLOW_EVENTS_BROKER === 'MQTT') { 11 | topic = (namespace) ? 12 | `/${namespace}/workflow/${workflow.id}/create` 13 | : `/workflow/${workflow.id}/create` 14 | } 15 | publishMessage({ 16 | context: { 17 | topic, 18 | message: { 19 | name: workflow.name, 20 | workflow_id: workflow.id, 21 | hash: workflow._blueprint_hash, 22 | hasTarget: hasTarget, 23 | event: event, 24 | version: workflow._version, 25 | } 26 | } 27 | }, process.env.WORKFLOW_EVENTS_BROKER) 28 | } 29 | 30 | module.exports = { 31 | publishWorkflow 32 | } -------------------------------------------------------------------------------- /db/migrations/20230216123710_create_nodes_view.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema.createView("nodes", (view) => { 3 | view.columns(["workflow_id", "workflow_name", "workflow_version", "latest", "node_id", "node_type", "node_category"]); 4 | view.as( 5 | knex.raw( 6 | `select 7 | w1.id as workflow_id, 8 | w1.name as workflow_name, 9 | w1.version as workflow_version, 10 | case when w1.version = wmax.version then true else false end as latest, 11 | jsonb_array_elements(w1.blueprint_spec -> 'nodes') ->> 'id' as node_id, 12 | jsonb_array_elements(w1.blueprint_spec -> 'nodes') ->> 'type' as node_type, 13 | jsonb_array_elements(w1.blueprint_spec -> 'nodes') ->> 'category' as category 14 | from workflow w1 15 | join (select name, max(version) as version from workflow group by name) as wmax on w1.name = wmax.name` 16 | ) 17 | ); 18 | }); 19 | }; 20 | 21 | exports.down = function (knex) { 22 | return knex.schema.dropViewIfExists("nodes"); 23 | }; 24 | -------------------------------------------------------------------------------- /src/middlewares/trace.js: -------------------------------------------------------------------------------- 1 | const { logger } = require("../utils/logger"); 2 | const TraceParent = require("traceparent"); 3 | 4 | const captureTraceData = async (ctx, next) => { 5 | logger.debug("middleware captureTraceData"); 6 | const tracesettings = { transactionSampleRate: 1 }; 7 | 8 | let parent; 9 | if (ctx.request.headers?.traceparent) { 10 | logger.silly('old traceparent: ', ctx.request.headers.traceparent) 11 | parent = TraceParent.fromString(ctx.request.headers.traceparent); 12 | } else { 13 | logger.silly('no traceparent: ') 14 | } 15 | 16 | const traceparent = TraceParent.startOrResume(parent, tracesettings); 17 | 18 | const trace = { 19 | tracestate: ctx.request.headers.tracestate, 20 | traceparent: traceparent.toString(), 21 | }; 22 | 23 | if (ctx.state.actor_data) { 24 | ctx.state.actor_data.trace = trace; 25 | } else { 26 | ctx.state.actor_data = { 27 | trace: trace, 28 | }; 29 | } 30 | 31 | logger.silly('new traceparent: ', traceparent.toString()) 32 | 33 | return next(); 34 | }; 35 | 36 | module.exports = { 37 | captureTraceData, 38 | }; 39 | -------------------------------------------------------------------------------- /src/samples/blueprints/longTimer.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "long_timer_process", 3 | description: "long timer task workflow", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "2", 20 | type: "SystemTask", 21 | category: "timer", 22 | name: "Long Timer node", 23 | next: "3", 24 | lane_id: "1", 25 | parameters: { 26 | input: {}, 27 | timeout: 1000, 28 | }, 29 | }, 30 | { 31 | id: "3", 32 | type: "Finish", 33 | name: "Finish node", 34 | next: null, 35 | lane_id: "1", 36 | }, 37 | ], 38 | lanes: [ 39 | { 40 | id: "1", 41 | name: "the_only_lane", 42 | rule: ["fn", ["&", "args"], true], 43 | }, 44 | ], 45 | environment: {}, 46 | }, 47 | }; 48 | -------------------------------------------------------------------------------- /db/migrations/20200601145635_add_current_state_to_process.js: -------------------------------------------------------------------------------- 1 | exports.up = function (knex) { 2 | return knex.schema 3 | .alterTable("process", (table) => { 4 | table.uuid("current_state_id"); 5 | }) 6 | .then(() => { 7 | knex.schema.alterTable("process", (table) => { 8 | table.foreign("current_state_id").references("process_state.id"); 9 | }); 10 | }) 11 | .then(() => { 12 | knex.raw(`update process set current_state_id = msn_id.id from 13 | (select process_state.id, msn, process_state.process_id from process_state, ( 14 | select process_state.process_id as pid, max(process_state.step_number) 15 | as msn from process_state group by process_state.process_id 16 | ) AS sq where process_state.step_number = msn and process_state.process_id = pid) AS msn_id 17 | where process.id = msn_id.process_id`); 18 | }); 19 | }; 20 | 21 | exports.down = function (knex) { 22 | return knex.schema.alterTable("process", (table) => { 23 | table.dropColumn("current_state_id"); 24 | }); 25 | }; 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 FlowBuild 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/auto-merge-release-bump.yml: -------------------------------------------------------------------------------- 1 | name: Automerge release bump PR 2 | 3 | on: pull_request_target 4 | 5 | jobs: 6 | autoapprove: 7 | runs-on: ubuntu-latest 8 | permissions: 9 | pull-requests: write 10 | if: ${{github.actor == 'flowbuild-bot'}} || ${{github.actor == 'bot-flowbuild'}} 11 | steps: 12 | - name: Autoapproving 13 | uses: hmarr/auto-approve-action@v3 14 | with: 15 | review-message: "Auto approved automated PR" 16 | 17 | automerge: 18 | needs: [autoapprove] 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Automerging 22 | uses: pascalgn/automerge-action@v0.15.6 23 | if: ${{github.actor == 'flowbuild-bot'}} || ${{github.actor == 'bot-flowbuild'}} 24 | env: 25 | GITHUB_TOKEN: "${{ secrets.GH_TOKEN }}" 26 | GITHUB_LOGIN: flowbuild-bot 27 | MERGE_LABELS: "" 28 | MERGE_METHOD: "squash" 29 | MERGE_COMMIT_MESSAGE: "pull-request-title" 30 | MERGE_RETRIES: "5" 31 | MERGE_RETRY_SLEEP: "60000" 32 | MERGE_FILTER_AUTHOR: 'bot-flowbuilld' 33 | MERGE_DELETE_BRANCH: true -------------------------------------------------------------------------------- /src/controllers/cockpit/processTree.js: -------------------------------------------------------------------------------- 1 | const { logger } = require("../../utils/logger"); 2 | const { Tree } = require("@flowbuild/process-tree"); 3 | const { getCockpit } = require("../../engine"); 4 | const { db } = require('../../utils/db') 5 | const tree = new Tree(db); 6 | 7 | const getProcessTree = async (ctx, next) => { 8 | logger.verbose("called getProcessTree"); 9 | 10 | const process_id = ctx.params.id; 11 | const myTree = await tree.getPath(process_id); 12 | 13 | const cockpit = getCockpit(); 14 | const promises = myTree.map(async p => { 15 | const process = await cockpit.fetchProcess(p.processId); 16 | return { 17 | process_id: p.processId, 18 | parent_id: p.parentId, 19 | depth: p.depth, 20 | workflow_name: process.workflow_name, 21 | current_status: process._current_status 22 | } 23 | }) 24 | 25 | const processes = await Promise.all(promises) 26 | 27 | if (myTree) { 28 | ctx.status = 200; 29 | ctx.body = processes; 30 | } else { 31 | ctx.status = 404; 32 | ctx.body = { message: "Not found" }; 33 | } 34 | 35 | return next(); 36 | } 37 | 38 | module.exports = { 39 | getProcessTree 40 | } -------------------------------------------------------------------------------- /src/controllers/token.js: -------------------------------------------------------------------------------- 1 | const { v1: uuid } = require("uuid"); 2 | const { nanoid } = require("nanoid"); 3 | const { createJWTToken } = require("../services/tokenGenerator"); 4 | const { jwtSecret } = require("../utils/jwtSecret"); 5 | const { logger } = require("../utils/logger"); 6 | 7 | const getToken = (ctx, next) => { 8 | logger.verbose("Called getToken"); 9 | const secret = ctx.get("x-secret") || jwtSecret; 10 | const duration = parseInt(ctx.get("x-duration")) || 3600; // default is 1 hour 11 | 12 | const body = ctx.request.body || {}; 13 | if (!body?.actor_id) { 14 | logger.debug("Set a random actor_id"); 15 | body.actor_id = uuid(); 16 | } 17 | if (!body?.claims) { 18 | logger.debug("Set an empty claims list"); 19 | body.claims = []; 20 | } else if (!Array.isArray(body.claims)) { 21 | let claims = []; 22 | claims.push(body.claims); 23 | body.claims = claims; 24 | } 25 | 26 | body.session_id = nanoid(); 27 | 28 | const jwtToken = createJWTToken(body, secret, duration); 29 | ctx.status = 200; 30 | ctx.body = { 31 | jwtToken, 32 | payload: body, 33 | }; 34 | 35 | return next(); 36 | }; 37 | 38 | module.exports = { 39 | getToken, 40 | }; -------------------------------------------------------------------------------- /.github/workflows/stale-pr-issue.yml: -------------------------------------------------------------------------------- 1 | name: Manage stale issues and PRs 2 | 3 | on: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | 7 | jobs: 8 | stale: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/stale@v1.1.0 12 | with: 13 | repo-token: ${{ secrets.GITHUB_TOKEN }} 14 | stale-issue-message: | 15 | This issue has been automatically marked as stale because it has not had recent activity :sleeping: 16 | It will be closed in 30 days if no further activity occurs. To unstale this issue, add a comment with detailed explanation. 17 | Thank you for your contributions :heart: 18 | stale-pr-message: | 19 | This pull request has been automatically marked as stale because it has not had recent activity :sleeping: 20 | It will be closed in 30 days if no further activity occurs. To unstale this pull request, add a comment with detailed explanation. 21 | Thank you for your contributions :heart: 22 | days-before-stale: 60 23 | days-before-close: 30 24 | stale-issue-label: stale 25 | stale-pr-label: stale 26 | exempt-issue-label: keep-open 27 | exempt-pr-label: keep-open -------------------------------------------------------------------------------- /src/samples/blueprints/timersConflicting.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "timerConflicting", 3 | description: "Process that expires before the userTask timer run out", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start process", 12 | next: "USERTASK", 13 | parameters: { 14 | input_schema: {}, 15 | timeout: 5 16 | }, 17 | lane_id: "1", 18 | }, 19 | { 20 | id: "USERTASK", 21 | type: "UserTask", 22 | name: "UserTask with higher duration", 23 | next: "END", 24 | lane_id: "1", 25 | parameters: { 26 | action: "do something", 27 | input: {}, 28 | timeout: 10 29 | }, 30 | }, 31 | { 32 | id: "END", 33 | type: "Finish", 34 | name: "Finish process", 35 | next: null, 36 | lane_id: "1", 37 | }, 38 | ], 39 | lanes: [ 40 | { 41 | id: "1", 42 | name: "the_only_lane", 43 | rule: { $js: "() => true" }, 44 | }, 45 | ], 46 | environment: {}, 47 | }, 48 | }; 49 | -------------------------------------------------------------------------------- /.github/workflows/pr-validation.yml: -------------------------------------------------------------------------------- 1 | name: Node.js CI 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - master 7 | - development 8 | 9 | env: 10 | POSTGRES_USER: postgres 11 | POSTGRES_PASSWORD: postgres 12 | POSTGRES_DB: workflow 13 | POSTGRES_HOST: localhost 14 | POSTGRES_POST: 5432 15 | 16 | jobs: 17 | test: 18 | runs-on: ubuntu-latest 19 | services: 20 | postgres: 21 | image: postgres 22 | env: 23 | POSTGRES_USER: postgres 24 | POSTGRES_PASSWORD: postgres 25 | POSTGRES_DB: workflow 26 | ports: 27 | - 5432:5432 28 | options: >- 29 | --health-cmd pg_isready 30 | --health-interval 10s 31 | --health-timeout 5s 32 | --health-retries 5 33 | steps: 34 | - uses: actions/checkout@v3 35 | - uses: actions/setup-node@v3 36 | with: 37 | node-version: 16 38 | - run: npm install 39 | - name: run migrations 40 | run: npm run migrations:local 41 | - name: run seeds 42 | run: npm run seeds:local 43 | - name: start tests 44 | run: npm run tests 45 | - name: Upload coverage to Codecov 46 | uses: codecov/codecov-action@v3 -------------------------------------------------------------------------------- /src/samples/blueprints/timersConflicting2.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "timerConflicting2", 3 | description: "Process that expires before the intermediate timer run out", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start process", 12 | next: "TIMER", 13 | parameters: { 14 | input_schema: {}, 15 | timeout: 5 16 | }, 17 | lane_id: "1", 18 | }, 19 | { 20 | id: "TIMER", 21 | type: "SystemTask", 22 | category: "timer", 23 | name: "Timer node with higher duration than process", 24 | next: "END", 25 | lane_id: "1", 26 | parameters: { 27 | input: {}, 28 | duration: "PT10S", 29 | }, 30 | }, 31 | { 32 | id: "END", 33 | type: "Finish", 34 | name: "Finish process", 35 | next: null, 36 | lane_id: "1", 37 | }, 38 | ], 39 | lanes: [ 40 | { 41 | id: "1", 42 | name: "the_only_lane", 43 | rule: { $js: "() => true" }, 44 | }, 45 | ], 46 | environment: {}, 47 | }, 48 | }; 49 | -------------------------------------------------------------------------------- /.releaserc.json: -------------------------------------------------------------------------------- 1 | { 2 | "branches": [ 3 | { "name": "master" }, 4 | { "name": "development", "channel": "rc", "prerelease": true }, 5 | { "name": "beta", "prerelease": true }, 6 | { "name": "alpha", "prerelease": true } 7 | ], 8 | "plugins": [ 9 | "@semantic-release/npm", 10 | [ 11 | "@semantic-release/release-notes-generator", 12 | { 13 | "preset": "conventionalcommits" 14 | } 15 | ], 16 | [ 17 | "@semantic-release/commit-analyzer", 18 | { 19 | "preset": "conventionalcommits" 20 | } 21 | ], 22 | [ 23 | "@semantic-release/changelog", 24 | { 25 | "changelogFile": "CHANGELOG.md" 26 | } 27 | ], 28 | [ 29 | "@semantic-release/github", 30 | { 31 | "assets": [ 32 | "CHANGELOG.md" 33 | ] 34 | } 35 | ], 36 | [ 37 | "@semantic-release/git", 38 | { 39 | "assets": [ 40 | "CHANGELOG.md" 41 | ] 42 | } 43 | ] 44 | ] 45 | } -------------------------------------------------------------------------------- /src/samples/blueprints/timersTimeoutProcess.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "timersTimeoutProcess", 3 | description: "Sample implementation of a process with expiration using timeout notation", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start process with expiration", 12 | next: "USERTASK", 13 | parameters: { 14 | input_schema: {}, 15 | timeout: 10 16 | }, 17 | lane_id: "1", 18 | }, 19 | { 20 | id: "USERTASK", 21 | type: "UserTask", 22 | name: "User Task just to keep the process waiting until expiration", 23 | next: "END", 24 | lane_id: "1", 25 | parameters: { 26 | action: "just wait", 27 | input: {}, 28 | }, 29 | }, 30 | { 31 | id: "END", 32 | type: "Finish", 33 | name: "Finish process", 34 | next: null, 35 | lane_id: "1", 36 | }, 37 | ], 38 | lanes: [ 39 | { 40 | id: "1", 41 | name: "the_only_lane", 42 | rule: ["fn", ["&", "args"], true], 43 | }, 44 | ], 45 | environment: {}, 46 | }, 47 | }; 48 | -------------------------------------------------------------------------------- /src/samples/blueprints/timersDurationProcess.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "timersDurationProcess", 3 | description: "Sample implementation of a process with expiration using duration notation", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start process with expiration", 12 | next: "USERTASK", 13 | parameters: { 14 | input_schema: {}, 15 | duration: "PT10S" 16 | }, 17 | lane_id: "1", 18 | }, 19 | { 20 | id: "USERTASK", 21 | type: "UserTask", 22 | name: "User Task just to keep the process waiting until expiration", 23 | next: "END", 24 | lane_id: "1", 25 | parameters: { 26 | action: "just wait", 27 | input: {}, 28 | }, 29 | }, 30 | { 31 | id: "END", 32 | type: "Finish", 33 | name: "Finish process", 34 | next: null, 35 | lane_id: "1", 36 | }, 37 | ], 38 | lanes: [ 39 | { 40 | id: "1", 41 | name: "the_only_lane", 42 | rule: ["fn", ["&", "args"], true], 43 | }, 44 | ], 45 | environment: {}, 46 | }, 47 | }; 48 | -------------------------------------------------------------------------------- /src/samples/grpcdescriptor.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | descriptor: { 3 | "file": [ 4 | { 5 | "name": "test/rpc.proto", 6 | "messageType": [ 7 | { 8 | "name": "HelloRequest", 9 | "field": [ 10 | { 11 | "name": "greeting", 12 | "number": 1, 13 | "label": "LABEL_OPTIONAL", 14 | "type": "TYPE_STRING", 15 | "jsonName": "greeting" 16 | } 17 | ] 18 | }, 19 | { 20 | "name": "HelloResponse", 21 | "field": [ 22 | { 23 | "name": "reply", 24 | "number": 1, 25 | "label": "LABEL_OPTIONAL", 26 | "type": "TYPE_STRING", 27 | "jsonName": "reply" 28 | } 29 | ] 30 | } 31 | ], 32 | "service": [ 33 | { 34 | "name": "HelloService", 35 | "method": [ 36 | { 37 | "name": "SayHello", 38 | "inputType": ".HelloRequest", 39 | "outputType": ".HelloResponse" 40 | } 41 | ] 42 | } 43 | ], 44 | "syntax": "proto3" 45 | } 46 | ] 47 | } 48 | 49 | } -------------------------------------------------------------------------------- /src/controllers/cockpit.js: -------------------------------------------------------------------------------- 1 | const { getCockpit } = require('../engine'); 2 | const { logger } = require('../utils/logger'); 3 | 4 | module.exports.fetchWorkflowsWithProcessStatusCount = async (ctx, next) => { 5 | logger.verbose('[KW] Called fetchWorkflowsWithProcessStatusCount'); 6 | const cockpit = getCockpit(); 7 | const filters = ctx.query; 8 | 9 | const workflows = await cockpit.fetchWorkflowsWithProcessStatusCount(filters); 10 | ctx.status = 200; 11 | ctx.body = { 12 | workflows 13 | }; 14 | 15 | return next(); 16 | }; 17 | 18 | module.exports.setProcessState = async (ctx, next) => { 19 | logger.verbose('[KW] Called setProcessState'); 20 | 21 | const cockpit = getCockpit(); 22 | const process_id = ctx.params.id; 23 | const state_data = ctx.request.body; 24 | const result = await cockpit.setProcessState(process_id, state_data); 25 | ctx.status = 200; 26 | ctx.body = result.serialize(); 27 | 28 | return next(); 29 | }; 30 | 31 | module.exports.runPendingProcess = async (ctx, next) => { 32 | logger.verbose('[KW] Called runPendingProcess'); 33 | 34 | const cockpit = getCockpit(); 35 | const process_id = ctx.params.id; 36 | const actor_data = ctx.request.body; 37 | 38 | await cockpit.runPendingProcess(process_id, actor_data); 39 | ctx.status = 202; 40 | 41 | return next(); 42 | }; 43 | -------------------------------------------------------------------------------- /src/controllers/cockpit/activityManager.js: -------------------------------------------------------------------------------- 1 | const { getEngine, getCockpit } = require("../../engine"); 2 | const { logger } = require("../../utils/logger"); 3 | 4 | const expire = async (ctx, next) => { 5 | logger.verbose("Cockpit ActivityManager expire"); 6 | 7 | const amid = ctx.params.id; 8 | const actor_data = ctx.state.actor_data; 9 | 10 | const engine = getEngine() 11 | 12 | const am = await engine.fetchActivityManager(amid, actor_data) 13 | if(!am) { 14 | ctx.status = 404; 15 | ctx.body = { 16 | message: "activity manager not found" 17 | } 18 | return next() 19 | } 20 | 21 | if(['completed', 'interrupted'].includes(am.activity_status)) { 22 | ctx.status = 409; 23 | ctx.body = { 24 | message: "activity manager cannot be expired", 25 | current_status: am.activity_status 26 | } 27 | return next() 28 | } 29 | 30 | const cockpit = getCockpit(); 31 | 32 | 33 | try { 34 | cockpit.expireActivityManager(amid, actor_data); 35 | ctx.status = 200; 36 | ctx.body = { 37 | message: "Expire command submitted, check process history for current status" 38 | }; 39 | } catch (e) { 40 | ctx.status = 400; 41 | ctx.body = { message: `Failed at ${e.message}`, error: e }; 42 | } 43 | 44 | return next(); 45 | } 46 | 47 | module.exports = { 48 | expire, 49 | } -------------------------------------------------------------------------------- /src/samples/blueprints/activitySchemaValidation.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "schemaValidation", 3 | description: "workflow to test activity schema validation", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "2", 20 | type: "UserTask", 21 | name: "User Task node", 22 | next: "3", 23 | lane_id: "1", 24 | parameters: { 25 | action: "do something", 26 | input: {}, 27 | activity_schema: { 28 | type: 'object', 29 | properties: { 30 | date: { type: 'string', format: 'date'} 31 | }, 32 | required: ['date'] 33 | } 34 | }, 35 | }, 36 | { 37 | id: "3", 38 | type: "Finish", 39 | name: "Finish node", 40 | next: null, 41 | lane_id: "1", 42 | }, 43 | ], 44 | lanes: [ 45 | { 46 | id: "1", 47 | name: "the_only_lane", 48 | rule: ["fn", ["&", "args"], true], 49 | }, 50 | ], 51 | environment: {}, 52 | }, 53 | }; 54 | -------------------------------------------------------------------------------- /src/samples/blueprints/findProcess.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "find_process", 3 | description: "workflow for testing find process", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2", 13 | parameters: { 14 | input_schema: { 15 | type: "object", 16 | properties: { 17 | entity_id: { type: "string" }, 18 | }, 19 | required: ["entity_id"], 20 | }, 21 | }, 22 | lane_id: "1", 23 | }, 24 | { 25 | id: "2", 26 | type: "systemTask", 27 | category: "findProcess", 28 | name: "Find Process", 29 | next: "3", 30 | lane_id: "1", 31 | parameters: { 32 | input: { 33 | entity_id: { $ref: "bag.entity_id" }, 34 | }, 35 | }, 36 | }, 37 | { 38 | id: "3", 39 | type: "Finish", 40 | name: "Finish node", 41 | next: null, 42 | lane_id: "1", 43 | }, 44 | ], 45 | lanes: [ 46 | { 47 | id: "1", 48 | name: "the_only_lane", 49 | rule: ["fn", ["&", "args"], true], 50 | }, 51 | ], 52 | environment: {}, 53 | }, 54 | }; 55 | -------------------------------------------------------------------------------- /src/tests/healthCheck.test.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const axios = require("axios"); 3 | const { startServer } = require("../app"); 4 | const { delay } = require("./utils/auxiliar"); 5 | const { config } = require("./utils/requestConfig"); 6 | 7 | let server; 8 | 9 | const {db} = require("./utils/db") 10 | const fixtures = require("./utils/fixtures") 11 | 12 | beforeAll(() => { 13 | fixtures.createTestEngine(db); 14 | fixtures.createTestCockpit(db); 15 | 16 | server = startServer(3001); 17 | axios.defaults.baseURL = `${config.baseURL}/`; 18 | axios.defaults.headers = config.headers; 19 | axios.defaults.validateStatus = config.validateStatus; 20 | }); 21 | 22 | beforeEach(async () => { 23 | await delay(100); 24 | }); 25 | 26 | afterAll(async () => fixtures.tearDownEnvironment(server, db)); 27 | 28 | describe("GET /", () => { 29 | test("should return 200", async () => { 30 | const response = await axios.get("/"); 31 | 32 | expect(response.status).toBe(200); 33 | }); 34 | }); 35 | 36 | describe("GET /healthcheck", () => { 37 | test("should return 200", async () => { 38 | const response = await axios.get("/"); 39 | 40 | expect(response.status).toBe(200); 41 | expect(response.data).toBeDefined(); 42 | expect(response.data.version).toBeDefined(); 43 | expect(response.data.message).toBeDefined(); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /src/samples/blueprints/notifyUserTask.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "notify_task", 3 | description: "user notify task workflow", 4 | blueprint_spec: { 5 | requirements: [], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2N", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "2N", 20 | type: "UserTask", 21 | name: "User Task node", 22 | next: "3X", 23 | lane_id: "1", 24 | parameters: { 25 | activity_manager: "notify", 26 | action: "notify something", 27 | input: {}, 28 | }, 29 | }, 30 | { 31 | id: "3X", 32 | type: "UserTask", 33 | name: "User Task node", 34 | next: "4E", 35 | lane_id: "1", 36 | parameters: { 37 | action: "do something", 38 | input: {}, 39 | }, 40 | }, 41 | { 42 | id: "4E", 43 | type: "Finish", 44 | name: "Finish node", 45 | next: null, 46 | lane_id: "1", 47 | }, 48 | ], 49 | lanes: [ 50 | { 51 | id: "1", 52 | name: "the_only_lane", 53 | rule: ["fn", ["&", "args"], true], 54 | }, 55 | ], 56 | environment: {}, 57 | }, 58 | }; 59 | -------------------------------------------------------------------------------- /src/samples/blueprints/filterData.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "filter_data", 3 | description: "workflow for testing filterDataNode", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2", 13 | parameters: { 14 | input_schema: { 15 | type: "object", 16 | properties: { 17 | data: { type: "array" }, 18 | primary_keys: { type: "object" }, 19 | }, 20 | required: ["data", "primary_keys"], 21 | }, 22 | }, 23 | lane_id: "1", 24 | }, 25 | { 26 | id: "2", 27 | type: "systemTask", 28 | category: "filterData", 29 | name: "Filter Data", 30 | next: "3", 31 | lane_id: "1", 32 | parameters: { 33 | input: { 34 | data: { $ref: "bag.data" }, 35 | primary_keys: { $ref: "bag.primary_keys" } 36 | }, 37 | }, 38 | }, 39 | { 40 | id: "3", 41 | type: "Finish", 42 | name: "Finish node", 43 | next: null, 44 | lane_id: "1", 45 | }, 46 | ], 47 | lanes: [ 48 | { 49 | id: "1", 50 | name: "the_only_lane", 51 | rule: ["fn", ["&", "args"], true], 52 | }, 53 | ], 54 | environment: {}, 55 | }, 56 | }; 57 | -------------------------------------------------------------------------------- /src/samples/blueprints/indexProcess.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "index_process", 3 | description: "workflow for testing indexNode", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2", 13 | parameters: { 14 | input_schema: { 15 | type: "object", 16 | properties: { 17 | entity_type: { type: "string" }, 18 | entity_id: { type: "string" }, 19 | }, 20 | required: ["entity_type", "entity_id"], 21 | }, 22 | }, 23 | lane_id: "1", 24 | }, 25 | { 26 | id: "2", 27 | type: "systemTask", 28 | category: "createIndex", 29 | name: "Index Process", 30 | next: "3", 31 | lane_id: "1", 32 | parameters: { 33 | input: { 34 | entity_type: { $ref: "bag.entity_type" }, 35 | entity_id: { $ref: "bag.entity_id" }, 36 | }, 37 | }, 38 | }, 39 | { 40 | id: "3", 41 | type: "Finish", 42 | name: "Finish node", 43 | next: null, 44 | lane_id: "1", 45 | }, 46 | ], 47 | lanes: [ 48 | { 49 | id: "1", 50 | name: "the_only_lane", 51 | rule: ["fn", ["&", "args"], true], 52 | }, 53 | ], 54 | environment: {}, 55 | }, 56 | }; 57 | -------------------------------------------------------------------------------- /src/samples/blueprints/customNode.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "custom node workflow", 3 | description: "custom workflow", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "1", 10 | type: "Start", 11 | name: "Start node", 12 | next: "2", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "2", 20 | type: "SystemTask", 21 | category: "CustomTask", 22 | name: "Custom task", 23 | next: "3", 24 | lane_id: "1", 25 | parameters: { 26 | input: {}, 27 | }, 28 | }, 29 | { 30 | id: "3", 31 | type: "UserTask", 32 | name: "User Task node", 33 | next: "4", 34 | lane_id: "1", 35 | parameters: { 36 | action: "do something", 37 | input: { 38 | internal_key: "result.custom_data", 39 | outr: 'valor', 40 | qualqer: 'coisa' 41 | }, 42 | }, 43 | }, 44 | { 45 | id: "4", 46 | type: "Finish", 47 | name: "Finish node", 48 | next: null, 49 | lane_id: "1", 50 | }, 51 | ], 52 | lanes: [ 53 | { 54 | id: "1", 55 | name: "the_only_lane", 56 | rule: ["fn", ["&", "args"], true], 57 | }, 58 | ], 59 | environment: {}, 60 | }, 61 | }; 62 | -------------------------------------------------------------------------------- /db/seeds/blueprints/test_subprocess_child.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | requirements: ["core"], 3 | prepare: [], 4 | environment: {}, 5 | lanes: [ 6 | { 7 | id: "free", 8 | name: "free for all", 9 | rule: ["fn", ["&", "args"], true] 10 | } 11 | ], 12 | nodes: [ 13 | { 14 | id: "1", 15 | name: "Start child process", 16 | type: "Start", 17 | lane_id: "free", 18 | next: "2", 19 | parameters: { 20 | input_schema: {} 21 | } 22 | }, 23 | { 24 | id: "2", 25 | name: "timer wait 15 seconds", 26 | type: "systemTask", 27 | category: "timer", 28 | lane_id: "free", 29 | next: "3", 30 | parameters: { 31 | input: {}, 32 | timeout: 15 33 | } 34 | }, { 35 | id: "3", 36 | name: "bag some data", 37 | type: "systemTask", 38 | category: "setToBag", 39 | lane_id: "free", 40 | next: '99', 41 | parameters: { 42 | input: { 43 | order: { 44 | status: "pending", 45 | qty: 2, 46 | flavors: ["peperoni","supreme"], 47 | comments: "whatever" 48 | } 49 | } 50 | } 51 | }, 52 | { 53 | id: "99", 54 | name: "Finish - child", 55 | type: "Finish", 56 | lane_id: "free", 57 | next: null, 58 | parameters: { 59 | input: { 60 | order: { $ref: 'bag.order' } 61 | } 62 | } 63 | } 64 | ] 65 | } -------------------------------------------------------------------------------- /src/samples/blueprints/timersTimeout.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "timersTimeout", 3 | description: "Sample implementation of timers using timeout notation", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start process", 12 | next: "TIMER", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "TIMER", 20 | type: "SystemTask", 21 | category: "timer", 22 | name: "Intermediate Event Timer with timeout notation", 23 | next: "USERTASK", 24 | lane_id: "1", 25 | parameters: { 26 | input: {}, 27 | timeout: 5, 28 | }, 29 | }, 30 | { 31 | id: "USERTASK", 32 | type: "UserTask", 33 | name: "User Task with timeout notation", 34 | next: "END", 35 | lane_id: "1", 36 | parameters: { 37 | action: "do something", 38 | timeout: 6, 39 | input: {}, 40 | }, 41 | }, 42 | { 43 | id: "END", 44 | type: "Finish", 45 | name: "Finish process", 46 | next: null, 47 | lane_id: "1", 48 | }, 49 | ], 50 | lanes: [ 51 | { 52 | id: "1", 53 | name: "the_only_lane", 54 | rule: ["fn", ["&", "args"], true], 55 | }, 56 | ], 57 | environment: {}, 58 | }, 59 | }; 60 | -------------------------------------------------------------------------------- /src/tests/utils/world.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const axios = require("axios"); 3 | const actualTimeout = setTimeout; 4 | const { logger } = require("../../utils/logger"); 5 | 6 | function wait(ms = 2000) { 7 | return new Promise((resolve) => { 8 | actualTimeout(resolve, ms); 9 | }); 10 | } 11 | class World { 12 | 13 | constructor(obj) { 14 | this.baseURL = obj.baseURL; 15 | this.headers = obj.headers; 16 | } 17 | 18 | async waitProcessStop(pid, timeout = 1000) { 19 | logger.info(`waitProcessStop ${pid}`); 20 | const expectedStatus = ["waiting", "error", "finished"]; 21 | do { 22 | if (timeout === 1000) { 23 | await wait(timeout); 24 | await this.getCurrentState(pid); 25 | } else { 26 | await this.getCurrentState(pid); 27 | await wait(timeout*1200); 28 | } 29 | logger.debug(`process status: ${this.currentStatus}`); 30 | } while (!expectedStatus.includes(this.currentStatus)); 31 | return true; 32 | } 33 | 34 | async getCurrentState(pid) { 35 | logger.info(`getCurrentState ${pid}`); 36 | const response = await axios({ 37 | method: "get", 38 | url: `/processes/${pid}/state`, 39 | baseURL: this.baseURL, 40 | headers: this.headers, 41 | }); 42 | logger.debug("getCurrentState response"); 43 | this.currentState = response.data; 44 | this.currentStatus = response.data.state.status; 45 | this.nodeId = response.data.state.node_id; 46 | return true; 47 | } 48 | } 49 | 50 | module.exports = { World } -------------------------------------------------------------------------------- /src/samples/blueprints/remapData.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "remap_data", 3 | description: "workflow for testing remap data node", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start node", 12 | next: "REMAP_DATA", 13 | parameters: { 14 | input_schema: { 15 | type: "object", 16 | required: ["data", "dictionary"], 17 | properties: { 18 | data: { type: "array" }, 19 | dictionary: { type: "object" }, 20 | } 21 | }, 22 | }, 23 | lane_id: "free", 24 | }, 25 | { 26 | id: "REMAP_DATA", 27 | type: "SystemTask", 28 | category: "remapData", 29 | name: "Remap Data", 30 | next: "END", 31 | lane_id: "free", 32 | parameters: { 33 | input: { 34 | data: { 35 | $ref: "bag.data" 36 | }, 37 | dictionary: { 38 | $ref: "bag.dictionary" 39 | } 40 | }, 41 | }, 42 | }, 43 | { 44 | id: "END", 45 | type: "Finish", 46 | name: "Finish node", 47 | next: null, 48 | lane_id: "free", 49 | }, 50 | ], 51 | lanes: [ 52 | { 53 | id: "free", 54 | name: "the_only_lane", 55 | rule: ["fn", ["&", "args"], true], 56 | }, 57 | ], 58 | environment: {}, 59 | }, 60 | }; 61 | -------------------------------------------------------------------------------- /src/services/broker/mqtt.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const mqtt = require("async-mqtt"); 3 | const { nanoid } = require("nanoid"); 4 | const { logger } = require("../../utils/logger"); 5 | 6 | let client; 7 | async function connect() { 8 | try { 9 | logger.info("trying to connect to MQTT Broker"); 10 | const clientId = `flowBuild_${nanoid(10)}`; 11 | 12 | logger.info(`[mqtt] HOST: ${process.env.MQTT_HOST}, client: ${clientId}, protocol: ${process.env.MQTT_PROTOCOL}`); 13 | 14 | client = mqtt.connect({ 15 | hostname: process.env.MQTT_HOST, 16 | port: process.env.MQTT_PORT, 17 | protocol: process.env.MQTT_PROTOCOL || "ws", 18 | path: process.env.MQTT_PATH || "/mqtt", 19 | clientId: clientId, 20 | username: process.env.MQTT_USERNAME, 21 | password: process.env.MQTT_PASSWORD 22 | }); 23 | 24 | logger.info("[mqtt] connected to MQTT Broker"); 25 | } catch (error) { 26 | logger.error(error); 27 | } 28 | } 29 | 30 | async function publishMessage(topic, message) { 31 | let response; 32 | 33 | logger.info(`[mqtt] publishing message to topic ${topic}`); 34 | if (client) { 35 | response = await client.publish(topic, JSON.stringify(message), { qos: 1 }); 36 | logger.verbose(`[mqtt] Broker messageId: ${response.messageId} on topic ${topic}`); 37 | } else { 38 | logger.info("[mqtt] No client"); 39 | } 40 | 41 | return response; 42 | } 43 | 44 | function getClient() { 45 | return client; 46 | } 47 | 48 | module.exports = { 49 | getClient, 50 | publishMessage: publishMessage, 51 | connect: connect, 52 | }; 53 | -------------------------------------------------------------------------------- /db/seeds/blueprints/test_subprocess_parent.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | requirements: ["core"], 3 | prepare: [], 4 | environment: {}, 5 | lanes: [ 6 | { 7 | id: "free", 8 | name: "free for all", 9 | rule: ["fn", ["&", "args"], true] 10 | } 11 | ], 12 | nodes: [ 13 | { 14 | id: "1", 15 | name: "Start parent process", 16 | type: "Start", 17 | lane_id: "free", 18 | next: "2", 19 | parameters: { 20 | input_schema: {} 21 | } 22 | }, { 23 | id: "2", 24 | name: "bag some data", 25 | type: "systemTask", 26 | category: "setToBag", 27 | lane_id: "free", 28 | next: "3", 29 | parameters: { 30 | input: { 31 | someData: "some data" 32 | } 33 | } 34 | }, { 35 | id: "3", 36 | name: "start subProcess", 37 | type: "subProcess", 38 | lane_id: "free", 39 | next: '4', 40 | parameters: { 41 | input: {}, 42 | workflow_name: 'test_subprocess_child', 43 | valid_response: 'finished', 44 | actor_data: { $ref: 'actor_data' } 45 | } 46 | }, { 47 | id: "4", 48 | name: "bag subprocess data", 49 | type: "systemTask", 50 | category: "setToBag", 51 | lane_id: "free", 52 | next: "99", 53 | parameters: { 54 | input: { 55 | subProcessData: { $ref: "result" } 56 | } 57 | } 58 | }, { 59 | id: "99", 60 | name: "Finish - parent", 61 | type: "Finish", 62 | lane_id: "free", 63 | next: null 64 | } 65 | ] 66 | } -------------------------------------------------------------------------------- /src/samples/blueprints/testTreeLeaf.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "testTreeLeaf", 3 | description: "test process dependency", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start node", 12 | parameters: { 13 | input_schema: {}, 14 | }, 15 | next: "START-PROCESS", 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "START-PROCESS", 20 | type: "SystemTask", 21 | category: "startProcess", 22 | name: "subprocess start", 23 | next: "SUB-PROCESS", 24 | lane_id: "1", 25 | parameters: { 26 | workflow_name: "pizza1", 27 | actor_data: { $ref: "actor_data" }, 28 | input: {} 29 | } 30 | }, 31 | { 32 | id: "SUB-PROCESS", 33 | type: "SubProcess", 34 | name: "subprocess start", 35 | next: "END", 36 | lane_id: "1", 37 | parameters: { 38 | workflow_name: "test_subprocess_parent", 39 | valid_response: "finished", 40 | actor_data: { $ref: "actor_data" }, 41 | input: {} 42 | } 43 | }, 44 | { 45 | id: "END", 46 | type: "Finish", 47 | name: "Finish node", 48 | next: null, 49 | lane_id: "1", 50 | }, 51 | ], 52 | lanes: [ 53 | { 54 | id: "1", 55 | name: "the_only_lane", 56 | rule: ["fn", ["&", "args"], true], 57 | }, 58 | ], 59 | environment: {}, 60 | }, 61 | }; 62 | -------------------------------------------------------------------------------- /src/services/broker/kafka.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const { Kafka } = require("kafkajs"); 3 | const { logger } = require("../../utils/logger"); 4 | 5 | const sasl = { 6 | mechanism: "plain", 7 | username: process.env.KAFKA_CLUSTER_API_KEY, 8 | password: process.env.KAFKA_API_SECRET, 9 | }; 10 | 11 | const ssl = !!sasl; 12 | 13 | let client; 14 | async function connect() { 15 | try { 16 | logger.info("trying to connect to Kafka"); 17 | const kafka = new Kafka({ 18 | clientID: "flowbuild", 19 | brokers: [process.env.KAFKA_BOOTSTRAP_SERVER], 20 | ssl, 21 | sasl, 22 | }); 23 | client = kafka.producer(); 24 | await client.connect(); 25 | } catch (error) { 26 | logger.error(error); 27 | } 28 | } 29 | 30 | async function publishMessage({ topic, message, key }) { 31 | logger.info(`[kafka] publishing message to topic ${topic}`); 32 | if (!client) { 33 | logger.info("[kafka] No client"); 34 | return undefined; 35 | } 36 | const messageBuf = Buffer.from(JSON.stringify(message)); 37 | const result = await client.send({ 38 | topic, 39 | messages: [ 40 | { 41 | key, 42 | value: messageBuf, 43 | timestamp: Date.now(), 44 | }, 45 | ], 46 | }); 47 | //const result = await client.produce(topic, -1, messageBuf, key, Date.now()) 48 | return result; 49 | } 50 | 51 | function getClient() { 52 | return client; 53 | } 54 | 55 | async function disconnect() { 56 | if (client._isConnected) { 57 | await client.die(); 58 | } 59 | } 60 | 61 | module.exports = { 62 | getClient, 63 | publishMessage: publishMessage, 64 | connect: connect, 65 | disconnect, 66 | }; 67 | -------------------------------------------------------------------------------- /src/controllers/package.js: -------------------------------------------------------------------------------- 1 | const { getCockpit } = require("../engine"); 2 | const { logger } = require("../utils/logger"); 3 | 4 | const savePackage = async (ctx, next) => { 5 | logger.debug("called savePackage"); 6 | const cockpit = getCockpit(); 7 | const input_package = ctx.request.body; 8 | try { 9 | const package_ = await cockpit.savePackage( 10 | input_package.name, 11 | input_package.description, 12 | input_package.code 13 | ); 14 | ctx.status = 201; 15 | ctx.body = { 16 | package_id: package_.id, 17 | package_url: `${ctx.header.host}${ctx.url}/${package_.id}`, 18 | }; 19 | } catch (err) { 20 | ctx.status = 400; 21 | ctx.body = { error: err }; 22 | } 23 | 24 | return next(); 25 | }; 26 | 27 | const fetchPackage = async (ctx, next) => { 28 | logger.debug("called fetchPackage"); 29 | const cockpit = getCockpit(); 30 | const package_id = ctx.params.id; 31 | const package_ = await cockpit.fetchPackage(package_id); 32 | if (package_) { 33 | ctx.status = 200; 34 | ctx.body = package_.serialize(); 35 | } else { 36 | ctx.status = 404; 37 | } 38 | 39 | return next(); 40 | }; 41 | 42 | const deletePackage = async (ctx, next) => { 43 | logger.debug("called deletePackage"); 44 | const cockpit = getCockpit(); 45 | const package_id = ctx.params.id; 46 | const num_deleted = await cockpit.deletePackage(package_id); 47 | if (num_deleted == 0) { 48 | ctx.status = 404; 49 | } else { 50 | ctx.status = 202; 51 | ctx.body = { 52 | deleted: num_deleted 53 | } 54 | } 55 | 56 | return next(); 57 | }; 58 | 59 | module.exports = { 60 | savePackage, 61 | fetchPackage, 62 | deletePackage, 63 | }; 64 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | 3 | services: 4 | postgres: 5 | image: postgres:14.5 6 | container_name: flowbuild_db 7 | restart: always 8 | env_file: 9 | - ./.env.docker 10 | ports: 11 | - 5432:5432 12 | 13 | app: 14 | image: node:18.12 15 | env_file: 16 | - ./.env.docker 17 | container_name: flowbuild_app 18 | restart: on-failure:10 # necessary due to possibility of postgres not being ready when service runs 19 | depends_on: 20 | - postgres 21 | ports: 22 | - 3000:3000 23 | volumes: 24 | - .:/usr/app 25 | - /usr/app/node_modules 26 | working_dir: /usr/app 27 | #command: ./scripts/start_dev.sh 28 | #command: bash -c " npm update && npm install knex -g && npm install nodemon -g && npm install && npm rebuild && npm run migrations && npm run seeds && npm run start " 29 | command: bash -c " npm install && npm audit fix && npm run migrations && npm run seeds && npm run start " 30 | healthcheck: 31 | test: ["CMD", "curl", "-f", "http://localhost:3000/healthcheck"] 32 | interval: 60s 33 | timeout: 10s 34 | retries: 3 35 | start_period: 60s 36 | 37 | socket: 38 | image: hivemq/hivemq4:latest 39 | container_name: flowbuild_mqtt 40 | restart: on-failure:10 41 | ports: 42 | - 8081:8080 43 | - 1883:1883 44 | - 8000:8000 45 | 46 | # optional broker 47 | # rabbitmq: 48 | # image: rabbitmq:3-management-alpine 49 | # container_name: rabbitmq 50 | # ports: 51 | # - 5672:5672 52 | # - 15672:15672 53 | 54 | redis: 55 | image: redis:7-bullseye 56 | container_name: flowbuild_redis 57 | restart: always 58 | ports: 59 | - 6379:6379 -------------------------------------------------------------------------------- /src/samples/blueprints/kafka.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "kafkaTest", 3 | description: "Sample implementation of process with a kafka publish node", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start node", 12 | next: "CONFIG", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "CONFIG", 20 | name: "Set message, topic and event", 21 | next: "KAFKA", 22 | type: "SystemTask", 23 | lane_id: "1", 24 | category: "setToBag", 25 | parameters: { 26 | input: { 27 | message: { 28 | hello: "world", 29 | some: "data", 30 | }, 31 | topic: "mt-topic", 32 | event: "sample blueprint" 33 | }, 34 | }, 35 | }, 36 | { 37 | id: "KAFKA", 38 | type: "SystemTask", 39 | category: "kafkaPublish", 40 | name: "kafka publish node", 41 | next: "END", 42 | lane_id: "1", 43 | parameters: { 44 | input: { 45 | message: { $ref: 'bag.message' }, 46 | topic: { $ref: 'bag.topic' }, 47 | event: { $ref: 'bag.event' } 48 | } 49 | }, 50 | }, 51 | { 52 | id: "END", 53 | type: "Finish", 54 | name: "Finish node", 55 | next: null, 56 | lane_id: "1", 57 | }, 58 | ], 59 | lanes: [ 60 | { 61 | id: "1", 62 | name: "the_only_lane", 63 | rule: { $js: "() => true" }, 64 | }, 65 | ], 66 | environment: {}, 67 | }, 68 | }; 69 | -------------------------------------------------------------------------------- /src/controllers/cockpit/nodes.js: -------------------------------------------------------------------------------- 1 | const { getNode, NodeUtils } = require("@flowbuild/engine") 2 | const { getCockpit } = require("../../engine"); 3 | 4 | const getNodes = async (ctx, next) => { 5 | const types = NodeUtils.getNodeTypes(); 6 | const categories = NodeUtils.getNodeCategories(); 7 | 8 | const cockpit = getCockpit(); 9 | const actor_data = ctx.state.actor_data; 10 | const workflows = await cockpit.getWorkflowsForActor(actor_data); 11 | const nodes = workflows.flatMap(w => w.blueprint_spec.nodes); 12 | const typeKeys = Object.keys(types); 13 | const categoryKeys = Object.keys(categories) 14 | 15 | ctx.status = 200; 16 | ctx.body = { 17 | types: typeKeys.map(type => { return { 18 | type: type, 19 | nodes: nodes.filter(node => node.type.toLowerCase() === type).length 20 | }}), 21 | categories: categoryKeys.map(category => { return { 22 | category: category, 23 | nodes: nodes.filter(node => node.category?.toLowerCase() === category).length 24 | }}) 25 | } 26 | return next() 27 | } 28 | 29 | const fetchNode = async (ctx,next) => { 30 | let body = ctx.request.body; 31 | 32 | if(body.type.toLowerCase() === 'systemtask' && !body.category) { 33 | ctx.status = 400; 34 | ctx.body = { 35 | message: "Invalid Request Body", 36 | error: [{ 37 | message: "systemTasks must have required property category" 38 | }] 39 | } 40 | return next(); 41 | } 42 | 43 | try { 44 | const node = getNode(ctx.request.body) 45 | ctx.status = 200; 46 | ctx.body = { 47 | schema: node.constructor.schema 48 | } 49 | return next(); 50 | } catch (e) { 51 | console.log(e) 52 | ctx.status = 404; 53 | ctx.body = { 54 | message: e.toString() 55 | } 56 | } 57 | 58 | } 59 | 60 | module.exports = { 61 | getNodes, 62 | fetchNode 63 | } -------------------------------------------------------------------------------- /src/tests/utils/process_request.js: -------------------------------------------------------------------------------- 1 | const request = require("supertest"); 2 | 3 | const process_dtos = { 4 | continue: { 5 | input: { 6 | custom_data: "goes_here", 7 | }, 8 | }, 9 | set_state: { 10 | process_state: { 11 | step_number: 99, 12 | node_id: "99", 13 | next_node_id: "100", 14 | bag: { set_state_bag: "goes_here" }, 15 | external_input: { set_state_external_input: "goes_here" }, 16 | result: { set_state_result: "goes_here" }, 17 | error: "set state error goes here", 18 | status: "set state status goes here", 19 | }, 20 | }, 21 | }; 22 | 23 | const processRequests = (server, auth_header) => { 24 | return { 25 | fetch: async (process_id) => { 26 | return request(server) 27 | .get(`/processes/${process_id}`) 28 | .set(...auth_header); 29 | }, 30 | fetchList: async (filters = {}) => { 31 | return request(server) 32 | .get("/processes") 33 | .query(filters) 34 | .set(...auth_header); 35 | }, 36 | fetchStateHistory: async (process_id) => { 37 | return request(server) 38 | .get(`/processes/${process_id}/history`) 39 | .set(...auth_header); 40 | }, 41 | fetchState: async (process_id) => { 42 | return request(server) 43 | .get(`/processes/${process_id}`) 44 | .set(...auth_header); 45 | }, 46 | runProcess: async (process_id, body) => { 47 | return request(server) 48 | .post(`/processes/${process_id}/run`) 49 | .send(body) 50 | .set(...auth_header); 51 | }, 52 | abort: async (process_id) => { 53 | return request(server) 54 | .post(`/processes/${process_id}/abort`) 55 | .set(...auth_header); 56 | }, 57 | }; 58 | }; 59 | 60 | module.exports = { 61 | process_dtos, 62 | processRequests, 63 | }; 64 | -------------------------------------------------------------------------------- /src/utils/engineLogger.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const { createLogger, format, transports } = require("winston"); 3 | const broker = require("../services/broker/index"); 4 | const { ENGINE_LOGS_BROKER } = process.env; 5 | 6 | const engineLogger = createLogger({ 7 | transports: [ 8 | new transports.Console({ 9 | level: process.env.ENGINE_LOG_LEVEL || "info", 10 | format: format.combine( 11 | format.colorize(), 12 | format.timestamp(), 13 | format.label({ label: "ENGINE", message: true }), 14 | format.align(), 15 | format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`) 16 | ), 17 | }), 18 | ], 19 | }); 20 | 21 | const startLogger = () => { 22 | engineLogger.info("startLogger"); 23 | 24 | let logLevel = "silly"; 25 | let logMessage; 26 | 27 | if(process.env.PUBLISH_ENGINE_LOGS === "true") { 28 | emitter.onAny(function(event, message, variables) { 29 | const topic = `/logs`; 30 | const msg = { 31 | event, 32 | message, 33 | variables, 34 | timestamp: new Date(), 35 | }; 36 | 37 | broker.publishMessage({ topic, message: msg }, ENGINE_LOGS_BROKER || "MQTT"); 38 | }); 39 | } 40 | 41 | emitter.on("PROCESS.START_NODE_RUN", (message) => { 42 | logLevel = "verbose"; 43 | logMessage = message; 44 | engineLogger[logLevel](logMessage); 45 | }); 46 | 47 | emitter.on("EXECUTION_LOOP.ROLLBACK", (message) => { 48 | logLevel = "warn"; 49 | logMessage = message; 50 | engineLogger[logLevel](logMessage); 51 | }); 52 | 53 | emitter.on("ENGINE.CONTRUCTOR", (message) => { 54 | logLevel = "warn"; 55 | logMessage = message; 56 | engineLogger[logLevel](logMessage); 57 | }); 58 | 59 | }; 60 | 61 | module.exports = { 62 | startLogger, 63 | engineLogger, 64 | }; 65 | -------------------------------------------------------------------------------- /src/samples/blueprints/scriptNode.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "user_script_task workflow", 3 | description: "function lisp workflow", 4 | blueprint_spec: { 5 | requirements: ["core", "test_package"], 6 | prepare: [ 7 | "do", 8 | ["def", "test_function", ["fn", ["&", "args"], { new_bag: "New Bag" }]], 9 | null, 10 | ], 11 | nodes: [ 12 | { 13 | id: "1", 14 | type: "Start", 15 | name: "Start node", 16 | next: "2", 17 | parameters: { 18 | input_schema: {}, 19 | }, 20 | lane_id: "1", 21 | }, 22 | { 23 | id: "2", 24 | type: "ScriptTask", 25 | name: "Service node", 26 | next: "3", 27 | lane_id: "1", 28 | parameters: { 29 | input: {}, 30 | script: { 31 | function: ["fn", ["&", "args"], ["test_function"]], 32 | }, 33 | }, 34 | }, 35 | { 36 | id: "3", 37 | type: "UserTask", 38 | name: "User Task node", 39 | next: "4", 40 | lane_id: "1", 41 | parameters: { 42 | action: "do something", 43 | input: {}, 44 | }, 45 | }, 46 | { 47 | id: "4", 48 | type: "SystemTask", 49 | category: "SetToBag", 50 | name: "Set to bag node", 51 | next: "5", 52 | lane_id: "1", 53 | parameters: { 54 | input: { 55 | any: { $ref: "result.any" }, 56 | }, 57 | }, 58 | }, 59 | { 60 | id: "5", 61 | type: "Finish", 62 | name: "Finish node", 63 | next: null, 64 | lane_id: "1", 65 | }, 66 | ], 67 | lanes: [ 68 | { 69 | id: "1", 70 | name: "simpleton", 71 | rule: ["fn", ["&", "args"], true], 72 | }, 73 | ], 74 | environment: {}, 75 | }, 76 | }; 77 | -------------------------------------------------------------------------------- /src/tests/utils/task_requests.js: -------------------------------------------------------------------------------- 1 | const request = require('supertest'); 2 | 3 | const taskRequests = (server, auth_header) => { 4 | return { 5 | getAvailableForActor: async (filters = {}) => { 6 | return request(server) 7 | .get("/processes/available") 8 | .query(filters) 9 | .set(...auth_header); 10 | }, 11 | getDoneForActor: async (filters = {}) => { 12 | return request(server) 13 | .get("/processes/done") 14 | .query(filters) 15 | .set(...auth_header); 16 | }, 17 | getActivityById: async (activity_manager_id) => { 18 | return request(server) 19 | .get(`/processes/activityManager/${activity_manager_id}`) 20 | .set(...auth_header); 21 | }, 22 | getActivityForActor: async (process_id) => { 23 | return request(server) 24 | .get(`/processes/${process_id}/activity`) 25 | .set(...auth_header); 26 | }, 27 | commitActivity: async (process_id, external_input) => { 28 | return request(server) 29 | .post(`/processes/${process_id}/commit`) 30 | .send(external_input) 31 | .set(...auth_header); 32 | }, 33 | pushActivity: async (process_id) => { 34 | return request(server) 35 | .post(`/processes/${process_id}/push`) 36 | .send({}) 37 | .set(...auth_header); 38 | }, 39 | submitActivity: async (activity_manager_id, external_input) => { 40 | return request(server) 41 | .post(`/activity_manager/${activity_manager_id}/submit`) 42 | .send(external_input) 43 | .set(...auth_header); 44 | }, 45 | commitActivityByActivityManager: async (activity_manager_id, external_input) => { 46 | return request(server) 47 | .post(`/activity_manager/${activity_manager_id}/commit`) 48 | .send(external_input) 49 | .set(...auth_header); 50 | } 51 | }; 52 | }; 53 | 54 | module.exports = { 55 | taskRequests 56 | }; 57 | -------------------------------------------------------------------------------- /src/services/broker/rabbitMQ.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const amqp = require("amqplib"); 3 | const { logger } = require("../../utils/logger"); 4 | const { BROKER_PASSWORD, BROKER_USERNAME, BROKER_HOST, BROKER_QUEUE } = process.env; 5 | 6 | let channel; 7 | async function connect() { 8 | try { 9 | logger.info("trying to connect to RABBITMQ Broker"); 10 | logger.info(`[rabbitMQ] HOST: ${BROKER_HOST}`); 11 | const conn = await amqp.connect(`amqp://${BROKER_USERNAME}:${BROKER_PASSWORD}@${BROKER_HOST}`); 12 | 13 | await createQueue(conn); 14 | 15 | channel = await conn.createChannel(); 16 | 17 | logger.info('[rabbitMQ] connected to RABBITMQ Broker'); 18 | } catch (error) { 19 | logger.error(`[rabbitMQ] Error at RABBITMQ connect ${JSON.stringify(error)}`); 20 | } 21 | } 22 | 23 | async function publishMessage(content) { 24 | logger.info('[rabbitMQ] Called publishMessage'); 25 | const message = { 26 | input: { 27 | activityManagerId: content?._id, 28 | processId: content?._process_id, 29 | ...content?._props?.result, 30 | }, 31 | action: content?._props?.action, 32 | schema: content?._parameters, 33 | }; 34 | 35 | if (channel) { 36 | logger.info(`[rabbitMQ] publishing message to queue ${BROKER_QUEUE}`); 37 | await channel.sendToQueue(BROKER_QUEUE, Buffer.from(JSON.stringify(message))); 38 | logger.verbose(`[rabbitMQ] Broker message on queue ${BROKER_QUEUE}`); 39 | } else { 40 | logger.info("[rabbitMQ] No channel to publish message"); 41 | } 42 | } 43 | 44 | function getChannel() { 45 | return channel; 46 | } 47 | 48 | async function createQueue(conn) { 49 | const channelCreated = await conn.createChannel(); 50 | 51 | await channelCreated.assertQueue(BROKER_QUEUE, { 52 | durable: true, 53 | arguments: { 54 | 'x-queue-type': 'classic', 55 | }, 56 | }); 57 | } 58 | 59 | module.exports = { 60 | publishMessage, 61 | connect, 62 | getChannel, 63 | } -------------------------------------------------------------------------------- /src/controllers/diagram.js: -------------------------------------------------------------------------------- 1 | const { getEngine } = require('../engine'); 2 | const { buildXmlDiagram, buildBlueprintFromBpmn } = require('@flowbuild/nodejs-diagram-builder'); 3 | const { logger } = require('../utils/logger'); 4 | const { validate } = require("uuid"); 5 | const getRawBody = require('raw-body'); 6 | 7 | const buildDiagram = async (ctx, next) => { 8 | logger.verbose('Called buildDiagram'); 9 | const engine = getEngine(); 10 | let blueprint = ctx.request.body; 11 | const workflowId = blueprint?.workflow_id; 12 | let diagram; 13 | 14 | if(workflowId) { 15 | const is_valid = validate(workflowId); 16 | if (!is_valid) { 17 | ctx.status = 400; 18 | ctx.body = { 19 | message: "Invalid uuid", 20 | }; 21 | return; 22 | } 23 | const workflow = await engine.fetchWorkflow(workflowId); 24 | if(!workflow) { 25 | ctx.status = 404; 26 | ctx.body = { message: "No such workflow" }; 27 | return; 28 | } 29 | 30 | blueprint.name = workflow._name; 31 | blueprint.description = workflow._description; 32 | blueprint.blueprint_spec = workflow._blueprint_spec; 33 | } 34 | 35 | try { 36 | diagram = await buildXmlDiagram(blueprint) 37 | ctx.status = 200; 38 | ctx.body = diagram; 39 | } catch (err) { 40 | ctx.status = 400; 41 | ctx.body = { message: `Failed at ${err.message}`, error: err }; 42 | } 43 | 44 | return next(); 45 | }; 46 | 47 | const buildBlueprint = async (ctx, next) => { 48 | logger.verbose('Called buildBlueprint'); 49 | 50 | const diagram = await getRawBody(ctx.req) 51 | 52 | try { 53 | const result = await buildBlueprintFromBpmn(diagram); 54 | ctx.status = 200; 55 | ctx.body = result; 56 | } catch (err) { 57 | ctx.status = 400; 58 | ctx.body = { message: `Failed at ${err.message}`, error: err }; 59 | } 60 | 61 | return next; 62 | 63 | } 64 | 65 | module.exports = { 66 | buildDiagram, 67 | buildBlueprint 68 | } -------------------------------------------------------------------------------- /src/controllers/connection.js: -------------------------------------------------------------------------------- 1 | const { publishMessage } = require("../services/broker") 2 | const { utils: { ENGINE_ID } } = require("@flowbuild/engine"); 3 | 4 | const sendBeacon = async (ctx, next) => { 5 | console.log('sendBeacon called') 6 | const actorId = ctx.state?.actor_data?.actor_id || ''; 7 | 8 | const token = ctx.request?.body?.token || ""; 9 | 10 | if (actorId) { 11 | const payload = { 12 | timestamp: Date.now(), 13 | engine_id: ENGINE_ID, 14 | mqtt_host: process.env.MQTT_HOST, 15 | flowbuild_host: process.env.FLOWBUILD_URL, 16 | token 17 | }; 18 | 19 | const emittedTo = [] 20 | if (process.env.MQTT) { 21 | const mqtt_namespace = process.env.MQTT_NAMESPACE; 22 | const mqttTopic = mqtt_namespace 23 | ? `/${mqtt_namespace}/beacon/${actorId}` 24 | : `/beacon/${actorId}` 25 | await publishMessage({ topic: mqttTopic, message: payload }, "MQTT"); 26 | emittedTo.push("MQTT"); 27 | } 28 | 29 | if (process.env.KAFKA) { 30 | const kafka_namespace = process.env.KAFKA_NAMESPACE; 31 | const kafkaTopic = kafka_namespace 32 | ? `${kafka_namespace}.beacon` 33 | : `beacon` 34 | await publishMessage({ context: { topic: kafkaTopic, message: payload } }, "KAFKA"); 35 | emittedTo.push("KAFKA"); 36 | } 37 | 38 | if (process.env.AMQP) { 39 | await publishMessage({ context: { message: payload } }, "AMQP"); 40 | emittedTo.push("AMQP"); 41 | } 42 | 43 | if (!emittedTo.length) { 44 | ctx.status = 500; 45 | ctx.body = { 46 | message: `No broker is enable on Flowbuild Server` 47 | } 48 | return 49 | } 50 | ctx.status = 200; 51 | ctx.body = { 52 | message: `emitted message to ${emittedTo}` 53 | } 54 | return 55 | } 56 | ctx.status = 400; 57 | ctx.body = { 58 | message: `invalid actor_id` 59 | } 60 | return 61 | } 62 | 63 | module.exports = { 64 | sendBeacon 65 | } -------------------------------------------------------------------------------- /src/samples/blueprints/testTreeRoot.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "testTree", 3 | description: "test process dependency", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start node", 12 | parameters: { 13 | input_schema: {}, 14 | }, 15 | next: "START-PROCESS", 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "START-PROCESS", 20 | type: "SystemTask", 21 | category: "startProcess", 22 | name: "subprocess start", 23 | next: "LEAF", 24 | lane_id: "1", 25 | parameters: { 26 | workflow_name: "test_workflow", 27 | actor_data: { $ref: "actor_data" }, 28 | input: {} 29 | } 30 | }, 31 | { 32 | id: "LEAF", 33 | type: "SystemTask", 34 | category: "startProcess", 35 | name: "subprocess start", 36 | next: "SUB-PROCESS", 37 | lane_id: "1", 38 | parameters: { 39 | workflow_name: "testTreeLeaf", 40 | actor_data: { $ref: "actor_data" }, 41 | input: {} 42 | } 43 | }, 44 | { 45 | id: "SUB-PROCESS", 46 | type: "SubProcess", 47 | name: "subprocess start", 48 | next: "END", 49 | lane_id: "1", 50 | parameters: { 51 | workflow_name: "basic", 52 | valid_response: "finished", 53 | actor_data: { $ref: "actor_data" }, 54 | input: {} 55 | } 56 | }, 57 | { 58 | id: "END", 59 | type: "Finish", 60 | name: "Finish node", 61 | next: null, 62 | lane_id: "1", 63 | }, 64 | ], 65 | lanes: [ 66 | { 67 | id: "1", 68 | name: "the_only_lane", 69 | rule: ["fn", ["&", "args"], true], 70 | }, 71 | ], 72 | environment: {}, 73 | }, 74 | }; 75 | -------------------------------------------------------------------------------- /src/samples/blueprints/timersDuration.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "timersDuration", 3 | description: "Sample implementation of timers using duration notation", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start node", 12 | next: "CONFIG", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "CONFIG", 20 | name: "Set DueDate", 21 | next: "TIMER", 22 | type: "SystemTask", 23 | lane_id: "1", 24 | category: "setToBag", 25 | parameters: { 26 | input: { 27 | date1: "PT6S", 28 | }, 29 | }, 30 | }, 31 | { 32 | id: "TIMER", 33 | type: "SystemTask", 34 | category: "timer", 35 | name: "Intermediate Event Timer with duration notation", 36 | next: "USERTASK", 37 | lane_id: "1", 38 | parameters: { 39 | input: {}, 40 | duration: "PT4S", 41 | }, 42 | }, 43 | { 44 | id: "USERTASK", 45 | type: "UserTask", 46 | name: "User Task with boundary event, duration notation & $ref", 47 | next: "END", 48 | lane_id: "1", 49 | parameters: { 50 | action: "do something", 51 | input: {}, 52 | }, 53 | events: [ 54 | { 55 | family: "target", 56 | category: "timer", 57 | duration: { $ref: "bag.date1" }, 58 | }, 59 | ], 60 | }, 61 | { 62 | id: "END", 63 | type: "Finish", 64 | name: "Finish node", 65 | next: null, 66 | lane_id: "1", 67 | }, 68 | ], 69 | lanes: [ 70 | { 71 | id: "1", 72 | name: "the_only_lane", 73 | rule: { $js: "() => true" }, 74 | }, 75 | ], 76 | environment: {}, 77 | }, 78 | }; 79 | -------------------------------------------------------------------------------- /scripts/export.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | const path = require("path"); 3 | const { logger } = require("../src/utils/logger"); 4 | 5 | const directory = "src/samples/blueprints"; 6 | 7 | if (!fs.existsSync("export")) { 8 | fs.mkdirSync("export"); 9 | } 10 | if (!fs.existsSync("export/blueprints")) { 11 | fs.mkdirSync("export/blueprints"); 12 | } else { 13 | fs.readdirSync("export/blueprints", (err, files) => { 14 | if (err) throw err; 15 | 16 | for (const file of files) { 17 | fs.unlinkSync(path.join("export/blueprints", file), (err) => { 18 | if (err) throw err; 19 | }); 20 | } 21 | }); 22 | } 23 | 24 | logger.debug("starting export"); 25 | 26 | fs.readdir(directory, async (err, files) => { 27 | if (err) { 28 | logger.error("Unable to find blueprints directory"); 29 | process.exit(1); 30 | } 31 | files.forEach(async (file) => { 32 | if (path.extname(`../${directory}/${file}`) === ".js") { 33 | const scriptName = path.basename(`../${directory}/${file}`, ".js"); 34 | const spec = require(`../${directory}/${scriptName}`); 35 | fs.writeFileSync( 36 | `export/blueprints/${spec.name}.json`, 37 | JSON.stringify(spec, null, 2) 38 | ); 39 | 40 | logger.info(`Exporting ${file} to ${spec.name}.json!`); 41 | 42 | return { 43 | name: spec.name, 44 | created_at: new Date(), 45 | version: 0, 46 | }; 47 | } 48 | if ( 49 | path.extname(`../${directory}/${file}`) === ".json" && 50 | file !== "summary.json" 51 | ) { 52 | let bp; 53 | const data = fs.readFileSync(`src/blueprints/${file}`); 54 | bp = JSON.parse(data); 55 | 56 | fs.writeFileSync( 57 | `export/blueprints/${bp.name}.json`, 58 | JSON.stringify(bp, null, 2) 59 | ); 60 | 61 | logger.info(`Exporting ${file} to ${bp.name}.json!`); 62 | 63 | return { 64 | name: bp.name, 65 | created_at: new Date(), 66 | version: 0, 67 | }; 68 | } 69 | }); 70 | }); 71 | -------------------------------------------------------------------------------- /src/validators/base.js: -------------------------------------------------------------------------------- 1 | const Ajv = require("ajv"); 2 | const addFormats = require("ajv-formats"); 3 | const { logger } = require("../utils/logger"); 4 | const { validate } = require("uuid"); 5 | 6 | const validateBodyWithSchema = (schema) => { 7 | return async (ctx, next) => { 8 | logger.debug("called validateBodySchema"); 9 | const _ajv = new Ajv({ allErrors: true, allowUnionTypes: true }); 10 | addFormats(_ajv); 11 | const validateSchema = _ajv.compile(schema); 12 | const is_valid = await validateSchema(ctx.request.body); 13 | if (!is_valid) { 14 | logger.debug("invalid schema # %i errors", validateSchema.errors.length); 15 | ctx.status = 400; 16 | ctx.body = { 17 | message: "Invalid Request Body", 18 | error: validateSchema.errors.map((e) => { 19 | let response; 20 | response = { 21 | field: e.instancePath, 22 | message: e.message, 23 | }; 24 | return response; 25 | }), 26 | }; 27 | return; 28 | } 29 | return await next(); 30 | }; 31 | }; 32 | 33 | validateDataWithSchema = async (schema, data) => { 34 | logger.silly("called validateSchema"); 35 | const _ajv = new Ajv({ allErrors: true }); 36 | addFormats(_ajv); 37 | const validateSchema = _ajv.compile(schema); 38 | const is_valid = await validateSchema(data); 39 | return { 40 | is_valid: is_valid, 41 | errors: validateSchema.errors, 42 | }; 43 | }; 44 | 45 | validateUUID = async (ctx, next) => { 46 | const id = ctx.params.id || ctx.request.query.workflow_id; 47 | logger.debug(`validating id [${id}]`); 48 | if (id) { 49 | const is_valid = validate(id); 50 | if (!is_valid) { 51 | ctx.status = 400; 52 | ctx.body = { 53 | message: "Invalid uuid", 54 | }; 55 | } else { 56 | return await next(); 57 | } 58 | } else { 59 | return next(); 60 | } 61 | }; 62 | 63 | module.exports = { 64 | validateBodyWithSchema: validateBodyWithSchema, 65 | validateUUID, 66 | validateDataWithSchema, 67 | }; 68 | -------------------------------------------------------------------------------- /src/nodes/validateSchemaNode.js: -------------------------------------------------------------------------------- 1 | const { ProcessStatus, Nodes } = require("@flowbuild/engine"); 2 | const Ajv = require("ajv"); 3 | const addFormats = require("ajv-formats"); 4 | const { logger } = require("../utils/logger"); 5 | 6 | class ValidateSchemaNode extends Nodes.SystemTaskNode { 7 | static get schema() { 8 | return { 9 | type: "object", 10 | required: ["id", "name", "next", "type", "lane_id", "parameters"], 11 | properties: { 12 | id: { type: "string" }, 13 | name: { type: "string" }, 14 | next: { type: "string" }, 15 | type: { type: "string" }, 16 | category: { type: "string" }, 17 | lane_id: { type: "string" }, 18 | parameters: { 19 | type: "object", 20 | required: ["input"], 21 | properties: { 22 | input: { 23 | type: "object", 24 | required: ["schema", "data"], 25 | properties: { 26 | schema: { type: "object" }, 27 | data: { type: "object" }, 28 | }, 29 | }, 30 | }, 31 | }, 32 | }, 33 | }; 34 | } 35 | 36 | static validate(spec) { 37 | const ajv = new Ajv({ allErrors: true }); 38 | addFormats(ajv); 39 | const validate = ajv.compile(ValidateSchemaNode.schema); 40 | const validation = validate(spec); 41 | return [validation, JSON.stringify(validate.errors)]; 42 | } 43 | 44 | validate() { 45 | return ValidateSchemaNode.validate(this._spec); 46 | } 47 | 48 | async _run(executionData) { 49 | try { 50 | const ajv = new Ajv({ allErrors: true }); 51 | addFormats(ajv); 52 | const validate = ajv.compile(executionData.schema); 53 | const validation = validate(executionData.data); 54 | return [ 55 | { 56 | data: { 57 | is_valid: validation, 58 | errors: validate.errors, 59 | }, 60 | }, 61 | ProcessStatus.RUNNING, 62 | ]; 63 | } catch (err) { 64 | logger.error("[validateSchemaNode] Node failed", err); 65 | throw err; 66 | } 67 | } 68 | } 69 | 70 | module.exports = ValidateSchemaNode; 71 | -------------------------------------------------------------------------------- /src/nodes/index.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const { logger } = require("../utils/logger"); 3 | const { addSystemTaskCategory } = require("@flowbuild/engine"); 4 | const createIndexNode = require("./createIndexNode"); 5 | const retrieveProcessNode = require("./retrieveProcessNode"); 6 | const tokenizeNode = require("./tokenizeNode"); 7 | const validateSchemaNode = require("./validateSchemaNode"); 8 | const createUuidNode = require("./createUuidNode"); 9 | const BasicAuthNode = require("./basicAuthNode"); 10 | const remapDataNode = require("./remapDataNode"); 11 | const filterDataNode = require("./filterDataNode"); 12 | const DeepCompareNode = require("./deepCompareNode"); 13 | const GrpcNode = require("./grpcNode"); 14 | const KafkaPublishNode = require("./kafkaPublishNode"); 15 | const GraphQlNode = require("./graphqlNode"); 16 | 17 | const setCustomNodes = () => { 18 | addSystemTaskCategory({ createIndex: createIndexNode }); 19 | logger.info("added createIndexNode"); 20 | addSystemTaskCategory({ findProcess: retrieveProcessNode }); 21 | logger.info("added retrieveProcessNode"); 22 | addSystemTaskCategory({ tokenize: tokenizeNode }); 23 | logger.info("added tokenizeNode"); 24 | addSystemTaskCategory({ validateSchema: validateSchemaNode }); 25 | logger.info("added validateSchemaNode"); 26 | addSystemTaskCategory({ createUuid: createUuidNode }); 27 | logger.info("added createUuidNode"); 28 | addSystemTaskCategory({ basicAuth: BasicAuthNode }); 29 | logger.info("added basicAuthNode"); 30 | addSystemTaskCategory({ remapData: remapDataNode }); 31 | logger.info("added remapDataNode"); 32 | addSystemTaskCategory({ filterData: filterDataNode }); 33 | logger.info("added filterDataNode"); 34 | addSystemTaskCategory({ deepCompare: DeepCompareNode }); 35 | logger.info("added deepCompareNode"); 36 | addSystemTaskCategory({ grpc: GrpcNode }); 37 | logger.info("added grpcNode"); 38 | if (process.env.KAFKA) { 39 | addSystemTaskCategory({ kafkaPublish: KafkaPublishNode }); 40 | logger.info("added kafkaPublishNode"); 41 | } 42 | addSystemTaskCategory({ graphQl: GraphQlNode }); 43 | logger.info("added graphQlNode"); 44 | }; 45 | 46 | module.exports.setCustomNodes = setCustomNodes; 47 | -------------------------------------------------------------------------------- /src/samples/blueprints/basicAuth.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "basicAuth", 3 | description: "blueprint for testing basic auth node", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start node", 12 | next: "BASIC", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "BASIC", 20 | type: "systemTask", 21 | category: "basicAuth", 22 | name: "HTTP basic auth node", 23 | next: "BASIC-REF", 24 | lane_id: "1", 25 | parameters: { 26 | input: {}, 27 | request: { 28 | verb: "GET", 29 | baseUrl: 'https://postman-echo.com', 30 | route: '/basic-auth', 31 | auth: { 32 | username: 'postman', 33 | password: 'password' 34 | } 35 | } 36 | }, 37 | }, 38 | { 39 | id: "BASIC-REF", 40 | type: "systemTask", 41 | category: "basicAuth", 42 | name: "HTTP basic auth node", 43 | next: "END", 44 | lane_id: "1", 45 | parameters: { 46 | input: {}, 47 | request: { 48 | verb: "GET", 49 | baseUrl: { $ref: 'parameters.baseUrl' }, 50 | route: { $mustache: '{{{parameters.route}}}-{{{parameters.project}}' }, 51 | auth: { 52 | username: { $ref: 'parameters.username' }, 53 | password: { $ref: 'parameters.password' }, 54 | } 55 | } 56 | }, 57 | }, 58 | { 59 | id: "END", 60 | type: "Finish", 61 | name: "Finish node", 62 | next: null, 63 | lane_id: "1", 64 | }, 65 | ], 66 | lanes: [ 67 | { 68 | id: "1", 69 | name: "the_only_lane", 70 | rule: ["fn", ["&", "args"], true], 71 | }, 72 | ], 73 | environment: {}, 74 | parameters: { 75 | username: 'postman', 76 | password: 'password', 77 | baseUrl: 'https://postman-echo.com', 78 | route: '/basic', 79 | project: 'auth' 80 | } 81 | }, 82 | }; 83 | -------------------------------------------------------------------------------- /src/validators/schemas/workflow.schema.js: -------------------------------------------------------------------------------- 1 | const workflowSchema = { 2 | type: "object", 3 | properties: { 4 | name: { type: "string" }, 5 | description: { type: "string" }, 6 | blueprint_spec: { 7 | type: "object", 8 | properties: { 9 | requirements: { 10 | type: "array", 11 | items: { type: "string" }, 12 | uniqueItems: true, 13 | }, 14 | prepare: { 15 | type: "array", 16 | items: { type: "string" }, 17 | uniqueItems: true, 18 | }, 19 | environment: { type: "object" }, 20 | parameters: { type: "object" }, 21 | lanes: { 22 | type: "array", 23 | items: { 24 | type: "object", 25 | properties: { 26 | id: { type: "string" }, 27 | name: { type: "string" }, 28 | rule: { type: "array" }, 29 | }, 30 | additionalProperties: false, 31 | required: ["id", "name", "rule"], 32 | }, 33 | uniqueItems: true, 34 | minItems: 1, 35 | }, 36 | nodes: { 37 | type: "array", 38 | items: { 39 | type: "object", 40 | properties: { 41 | id: { type: "string" }, 42 | name: { type: "string" }, 43 | type: { type: "string" }, 44 | category: { type: "string" }, 45 | lane_id: { type: "string" }, 46 | next: { 47 | oneOf: [ 48 | { type: "object" }, 49 | { type: "string" }, 50 | { type: "null" }, 51 | ], 52 | }, 53 | parameters: { type: "object" }, 54 | }, 55 | required: ["id", "name", "type", "lane_id", "next"], 56 | }, 57 | minItems: 2, 58 | }, 59 | }, 60 | additionalProperties: false, 61 | required: [ 62 | "requirements", 63 | "prepare", 64 | "environment", 65 | "lanes", 66 | "nodes", 67 | ], 68 | }, 69 | additionalProperties: false, 70 | }, 71 | additionalProperties: false, 72 | required: ["name", "description", "blueprint_spec"] 73 | }; 74 | 75 | module.exports = { 76 | workflowSchema, 77 | }; 78 | -------------------------------------------------------------------------------- /src/utils/tracing.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const { NodeSDK } = require("@opentelemetry/sdk-node"); 3 | const { getNodeAutoInstrumentations } = require("@opentelemetry/auto-instrumentations-node"); 4 | const { Resource } = require('@opentelemetry/resources'); 5 | const { SemanticResourceAttributes } = require('@opentelemetry/semantic-conventions'); 6 | const { BasicTracerProvider, SimpleSpanProcessor } = require('@opentelemetry/sdk-trace-base'); 7 | const { OTLPTraceExporter } = require('@opentelemetry/exporter-trace-otlp-grpc'); 8 | const grpc = require('@grpc/grpc-js'); 9 | const { logger } = require('./logger'); 10 | 11 | const openTelemetry = process.env.OTEL_ENABLED 12 | 13 | const metadata = new grpc.Metadata(); 14 | const collectorOptions = {} 15 | 16 | if(openTelemetry === true || openTelemetry === "true") { 17 | if(process.env.NEW_RELIC_ENABLED === true || process.env.NEW_RELIC_ENABLED === "true") { 18 | logger.info("Enabling New Relic") 19 | metadata.set('api-key', process.env.NEW_RELIC_API_KEY ) 20 | collectorOptions.metadata = metadata; 21 | collectorOptions.credentials = grpc.credentials.createSsl() 22 | } 23 | //http://localhost:4317 is the default value for the otlp grpc package' 24 | collectorOptions.url = process.env.OTEL_COLLECTOR_URL || 'http://localhost:4317'; 25 | } 26 | 27 | 28 | const exporter = new OTLPTraceExporter(collectorOptions); 29 | 30 | const provider = new BasicTracerProvider({ 31 | resource: new Resource({ 32 | [SemanticResourceAttributes.SERVICE_NAME]: process.env.OTEL_SERVICE_NAME 33 | }) 34 | }); 35 | provider.addSpanProcessor(new SimpleSpanProcessor(exporter)); 36 | 37 | const sdk = new NodeSDK({ 38 | traceExporter: exporter, 39 | instrumentations: [getNodeAutoInstrumentations()], 40 | serviceName: "flowbuild-local" 41 | }); 42 | 43 | if(openTelemetry === true || openTelemetry === "true") { 44 | sdk.start().then(() => { 45 | logger.info("Open Telemetry Started") 46 | logger.info(`Service Name: ${process.env.OTEL_SERVICE_NAME}`) 47 | }) 48 | 49 | provider.register(); 50 | ['SIGINT', 'SIGTERM'].forEach(signal => { 51 | logger.info(`Open Telemetry Provider ${signal}`) 52 | process.on(signal, () => provider.shutdown().catch(console.error)); 53 | }); 54 | } else { 55 | logger.info("Open Telemetry Disabled") 56 | } 57 | 58 | -------------------------------------------------------------------------------- /src/tests/token.test.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const { v1: uuid } = require("uuid"); 3 | const axios = require("axios"); 4 | const { startServer } = require("../app"); 5 | const { delay } = require("./utils/auxiliar"); 6 | const { config } = require("./utils/requestConfig"); 7 | 8 | let server; 9 | 10 | beforeAll(() => { 11 | process.env.ENGINE_HEARTBEAT = false; 12 | server = startServer(3001); 13 | axios.defaults.baseURL = `${config.baseURL}/token`; 14 | axios.defaults.headers = config.headers; 15 | axios.defaults.validateStatus = config.validateStatus; 16 | }); 17 | 18 | beforeEach(async () => { 19 | await delay(100); 20 | }); 21 | 22 | afterAll(async () => { 23 | await server.close(); 24 | }); 25 | 26 | describe("POST /", () => { 27 | test("should return 200 without payload", async () => { 28 | const response = await axios.post("/"); 29 | 30 | expect(response.status).toBe(200); 31 | expect(response.data.jwtToken).toBeDefined(); 32 | expect(response.data.payload.actor_id).toBeDefined(); 33 | 34 | const duration = response.data.payload.exp - response.data.payload.iat; 35 | expect(duration).toBe(3600); 36 | }); 37 | 38 | test("should use provided duration", async () => { 39 | const definedDuration = 6000; 40 | 41 | const response = await axios.post("/", {}, { headers: { "x-duration": definedDuration } }); 42 | 43 | expect(response.status).toBe(200); 44 | expect(response.data.jwtToken).toBeDefined(); 45 | expect(response.data.payload.actor_id).toBeDefined(); 46 | 47 | const duration = response.data.payload.exp - response.data.payload.iat; 48 | expect(duration).toBe(definedDuration); 49 | }); 50 | 51 | test("should use provided actor_id", async () => { 52 | const actorId = uuid(); 53 | const response = await axios.post("/", { actor_id: actorId }); 54 | 55 | expect(response.status).toBe(200); 56 | expect(response.data.jwtToken).toBeDefined(); 57 | expect(response.data.payload.actor_id).toBe(actorId); 58 | }); 59 | 60 | test("should use provided claims", async () => { 61 | const claims = ["a", "b"]; 62 | const response = await axios.post("/", { claims }); 63 | 64 | expect(response.status).toBe(200); 65 | expect(response.data.jwtToken).toBeDefined(); 66 | expect(response.data.payload.claims).toStrictEqual(claims); 67 | }); 68 | }); 69 | -------------------------------------------------------------------------------- /src/samples/blueprints/timersDueDate.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "timersDueDate", 3 | description: "Sample implementation of timers using dueDate notation", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "START", 10 | type: "Start", 11 | name: "Start node", 12 | next: "CONFIG", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "1", 17 | }, 18 | { 19 | id: "CONFIG", 20 | name: "Put dates in bag, date 2 needs to be later than date1", 21 | next: "TIMER", 22 | type: "SystemTask", 23 | lane_id: "1", 24 | category: "setToBag", 25 | parameters: { 26 | input: { 27 | date1: { 28 | $js: "() => { const curDate = new Date(); return new Date(curDate.getTime() + 5 * 1000) };", 29 | }, 30 | date2: { 31 | $js: "() => { const curDate = new Date(); return new Date(curDate.getTime() + 10 * 1000) };", 32 | }, 33 | }, 34 | }, 35 | }, 36 | { 37 | id: "TIMER", 38 | type: "SystemTask", 39 | category: "timer", 40 | name: "Intermediate Event Timer with dueDate notation & $ref", 41 | next: "USERTASK", 42 | lane_id: "1", 43 | parameters: { 44 | input: {}, 45 | dueDate: { $ref: "bag.date1" }, 46 | }, 47 | }, 48 | { 49 | id: "USERTASK", 50 | type: "UserTask", 51 | name: "User Task with boundary event, duration notation & $ref", 52 | next: "END", 53 | lane_id: "1", 54 | parameters: { 55 | action: "do something", 56 | input: {}, 57 | }, 58 | events: [ 59 | { 60 | family: "target", 61 | category: "timer", 62 | dueDate: { $ref: "bag.date2" }, 63 | }, 64 | ], 65 | }, 66 | { 67 | id: "END", 68 | type: "Finish", 69 | name: "Finish node", 70 | next: null, 71 | lane_id: "1", 72 | }, 73 | ], 74 | lanes: [ 75 | { 76 | id: "1", 77 | name: "the_only_lane", 78 | rule: { $js: "() => true" }, 79 | }, 80 | ], 81 | environment: {}, 82 | }, 83 | }; 84 | -------------------------------------------------------------------------------- /db/seeds/seed-test-workflow.js: -------------------------------------------------------------------------------- 1 | const test_workflow_blueprint = require("./blueprints/test_workflow_blueprint"); 2 | const pizza1_blueprint = require("./blueprints/pizza1_blueprint"); 3 | const pizza2_blueprint = require("./blueprints/pizza2_blueprint"); 4 | const test_schemas_blueprint = require("./blueprints/test_schemas_blueprint"); 5 | const test_subprocess_parent = require("./blueprints/test_subprocess_parent"); 6 | const test_subprocess_child = require("./blueprints/test_subprocess_child"); 7 | 8 | exports.seed = function(knex) { 9 | // Inserts seed entries 10 | return knex('workflow').insert([ 11 | { 12 | id: "d373bef0-1152-11ea-9576-9584815cab84", 13 | created_at: new Date(), 14 | name: "test_workflow", 15 | description: "Workflow para rodar testes sobre a aplicação", 16 | blueprint_spec: test_workflow_blueprint, 17 | version: 1, 18 | }, 19 | { 20 | id: "2210847e-8b61-4af7-9df7-73fd2b0bb24d", 21 | created_at: new Date(), 22 | name: "test_schemas", 23 | description: "Workflow para rodar testes de schemas (input, activity, result)", 24 | blueprint_spec: test_schemas_blueprint, 25 | version: 1, 26 | }, 27 | { 28 | id: "fd4db5f9-0d50-4f53-8801-b12464f0dc52", 29 | created_at: new Date(), 30 | name: "test_subprocess_parent", 31 | description: "Workflow para rodar testes de subprocess, processo pai", 32 | blueprint_spec: test_subprocess_parent, 33 | version: 1, 34 | }, 35 | { 36 | id: "5fefa640-e264-4481-a437-2adc3ceb6efa", 37 | created_at: new Date(), 38 | name: "test_subprocess_child", 39 | description: "Workflow para rodar testes de subprocess, processo filho", 40 | blueprint_spec: test_subprocess_child, 41 | version: 1, 42 | }, 43 | { 44 | id: "7be513f4-98dc-43e2-8f3a-66e68a61aca8", 45 | created_at: new Date(), 46 | name: "pizza1", 47 | description: "Cookbook somente com systemTasks, mostrando o uso das notacoes de atalho", 48 | version: 1, 49 | blueprint_spec: pizza1_blueprint 50 | }, 51 | { 52 | id: "8fc66458-1137-4c1a-9aef-5dcdca9a19f6", 53 | created_at: new Date(), 54 | name: "pizza2", 55 | description: "Cookbook com userTasks e lanes", 56 | version: 1, 57 | blueprint_spec: pizza2_blueprint 58 | } 59 | ]).onConflict().ignore(); 60 | }; 61 | -------------------------------------------------------------------------------- /src/nodes/retrieveProcessNode.js: -------------------------------------------------------------------------------- 1 | 2 | const { ProcessStatus, Nodes } = require("@flowbuild/engine"); 3 | const Ajv = require("ajv"); 4 | const addFormats = require("ajv-formats"); 5 | const { Index } = require("@flowbuild/indexer"); 6 | const { logger } = require("../utils/logger"); 7 | const { db } = require("../tests/utils/db"); 8 | 9 | class RetrieveProcessesNode extends Nodes.SystemTaskNode { 10 | 11 | static get schema() { 12 | return { 13 | type: "object", 14 | required: ["id", "name", "next", "type", "lane_id", "parameters"], 15 | properties: { 16 | id: { type: "string" }, 17 | name: { type: "string" }, 18 | next: { type: "string" }, 19 | type: { type: "string" }, 20 | category: { type: "string" }, 21 | lane_id: { type: "string" }, 22 | parameters: { 23 | type: "object", 24 | properties: { 25 | input: { 26 | type: "object", 27 | required: ["entity_id"], 28 | properties: { 29 | entity_id: { 30 | oneOf: [ 31 | { type: "string", format: "uuid" }, 32 | { 33 | type: "object", 34 | properties: { 35 | "$ref": { type: "string" }, 36 | } 37 | } 38 | ] 39 | }, 40 | }, 41 | }, 42 | }, 43 | }, 44 | }, 45 | }; 46 | } 47 | 48 | static validate(spec) { 49 | const ajv = new Ajv({ allErrors: true }); 50 | addFormats(ajv); 51 | const validate = ajv.compile(RetrieveProcessesNode.schema); 52 | const validation = validate(spec); 53 | return [validation, JSON.stringify(validate.errors)]; 54 | } 55 | 56 | validate() { 57 | return RetrieveProcessesNode.validate(this._spec); 58 | } 59 | 60 | async _run(executionData) { 61 | try { 62 | logger.debug("[Indexer] retrieveProcesses node"); 63 | const _idx = new Index(db); 64 | const result = await _idx.fetchProcessByEntity(executionData.entity_id, executionData.limit); 65 | return [{ data: result }, ProcessStatus.RUNNING]; 66 | } catch (err) { 67 | logger.error("retrieveProcesses node failed", err); 68 | throw err; 69 | } 70 | } 71 | } 72 | 73 | module.exports = RetrieveProcessesNode; 74 | -------------------------------------------------------------------------------- /src/services/compareBlueprints.js: -------------------------------------------------------------------------------- 1 | const _ = require("lodash"); 2 | const { getEngine } = require("../engine"); 3 | 4 | const compareBlueprints = async (workflow_name, blueprint_spec) => { 5 | const engine = getEngine(); 6 | let result = {}; 7 | 8 | try { 9 | await engine.validateBlueprint(blueprint_spec); 10 | const current_workflow = await engine.fetchWorkflowByName(workflow_name); 11 | if (current_workflow) { 12 | const cur_wf_ordered_nodes = current_workflow.blueprint_spec.nodes.sort((a, b) => { 13 | return a.id > b.id ? -1 : 0; 14 | }); 15 | const bp_ordered_nodes = blueprint_spec.nodes.sort((a, b) => { 16 | return a.id > b.id ? -1 : 0; 17 | }); 18 | 19 | const cur_wf_ordered_lanes = current_workflow.blueprint_spec.lanes.sort((a, b) => { 20 | return a.id > b.id ? -1 : 0; 21 | }); 22 | const bp_ordered_lanes = blueprint_spec.lanes.sort((a, b) => { 23 | return a.id > b.id ? -1 : 0; 24 | }); 25 | 26 | const nodes = _.isEqual(cur_wf_ordered_nodes, bp_ordered_nodes); 27 | const lanes = _.isEqual(cur_wf_ordered_lanes, bp_ordered_lanes); 28 | const prepare = _.isEqual(current_workflow.blueprint_spec.prepare, blueprint_spec.prepare); 29 | const environment = _.isEqual(current_workflow.blueprint_spec.environment, blueprint_spec.environment); 30 | const requirements = _.isEqual(current_workflow.blueprint_spec.requirements, blueprint_spec.requirements); 31 | 32 | if (nodes && lanes && prepare && environment && requirements) { 33 | result = { 34 | changes: false, 35 | current_workflow: { 36 | id: current_workflow._id, 37 | version: current_workflow._version, 38 | }, 39 | }; 40 | } else { 41 | result = { 42 | changes: true, 43 | current_workflow: { 44 | id: current_workflow._id, 45 | version: current_workflow._version, 46 | }, 47 | comparison: { 48 | nodes: nodes, 49 | lanes: lanes, 50 | prepare: prepare, 51 | environment: environment, 52 | requirements: requirements, 53 | }, 54 | }; 55 | } 56 | } else { 57 | result = { 58 | changes: true, 59 | }; 60 | } 61 | } catch (err) { 62 | result.error = err; 63 | } 64 | 65 | return result; 66 | }; 67 | 68 | module.exports = { 69 | compareBlueprints, 70 | }; 71 | -------------------------------------------------------------------------------- /postman/collections/Auth & Healthcheck_d80d7616-3f76-4733-8ad6-4738ad200128.json: -------------------------------------------------------------------------------- 1 | { 2 | "info": { 3 | "_postman_id": "d36f89a2-0ad9-4cf3-b56b-1a874b9ac814", 4 | "name": "Auth & Healthcheck", 5 | "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", 6 | "_uid": "2387160-d36f89a2-0ad9-4cf3-b56b-1a874b9ac814" 7 | }, 8 | "item": [ 9 | { 10 | "name": "Healthcheck", 11 | "event": [ 12 | { 13 | "listen": "test", 14 | "script": { 15 | "id": "d442ea7d-c468-4af7-9c4b-5045dd1d3be3", 16 | "exec": [ 17 | "" 18 | ], 19 | "type": "text/javascript" 20 | } 21 | } 22 | ], 23 | "id": "278398f7-a131-4813-899f-ee050c479487", 24 | "protocolProfileBehavior": { 25 | "disableBodyPruning": true 26 | }, 27 | "request": { 28 | "auth": { 29 | "type": "noauth" 30 | }, 31 | "method": "GET", 32 | "header": [], 33 | "url": { 34 | "raw": "{{host}}/healthcheck", 35 | "host": [ 36 | "{{host}}" 37 | ], 38 | "path": [ 39 | "healthcheck" 40 | ] 41 | } 42 | }, 43 | "response": [] 44 | }, 45 | { 46 | "name": "Token", 47 | "event": [ 48 | { 49 | "listen": "test", 50 | "script": { 51 | "id": "5159752c-2dca-457b-903e-cf222c651cfb", 52 | "exec": [ 53 | "const response = pm.response.json();\t\t\t\t\t\t\r", 54 | "pm.environment.set(\"token\",response.jwtToken);" 55 | ], 56 | "type": "text/javascript" 57 | } 58 | } 59 | ], 60 | "id": "6f3efd75-46ee-48e9-9627-722e13e35faf", 61 | "protocolProfileBehavior": { 62 | "disableBodyPruning": true 63 | }, 64 | "request": { 65 | "method": "POST", 66 | "header": [ 67 | { 68 | "key": "x-secret", 69 | "value": "1234", 70 | "type": "string" 71 | }, 72 | { 73 | "key": "x-duration", 74 | "value": "36000000", 75 | "type": "string" 76 | } 77 | ], 78 | "body": { 79 | "mode": "raw", 80 | "raw": "{\r\n \"actor_id\": \"{{$guid}}\",\r\n \"claims\": [\"whatever\"]\r\n}", 81 | "options": { 82 | "raw": { 83 | "language": "json" 84 | } 85 | } 86 | }, 87 | "url": { 88 | "raw": "{{host}}/token", 89 | "host": [ 90 | "{{host}}" 91 | ], 92 | "path": [ 93 | "token" 94 | ] 95 | } 96 | }, 97 | "response": [] 98 | } 99 | ] 100 | } -------------------------------------------------------------------------------- /src/samples/blueprints/grpcNode.js: -------------------------------------------------------------------------------- 1 | const { descriptor } = require("../grpcdescriptor"); 2 | 3 | module.exports = { 4 | name: "grpc_node", 5 | description: "workflow for testing grpc node", 6 | blueprint_spec: { 7 | requirements: ["core"], 8 | prepare: [], 9 | nodes: [ 10 | { 11 | id: "START", 12 | type: "Start", 13 | name: "Start node", 14 | next: "GRPC_REFLECT", 15 | parameters: { 16 | input_schema: { 17 | type: "object", 18 | required: ["payload"], 19 | properties: { 20 | server: { type: "string" }, 21 | service: { type: "string" }, 22 | method: { type: "string" }, 23 | payload: { type: "object" }, 24 | }, 25 | }, 26 | }, 27 | lane_id: "free", 28 | }, 29 | { 30 | id: "GRPC_REFLECT", 31 | type: "SystemTask", 32 | category: "grpc", 33 | name: "gRPC call using server reflection", 34 | next: "GRPC_DESCRIPT", 35 | lane_id: "free", 36 | parameters: { 37 | input: { 38 | server: { $js: "({bag}) => bag.server || 'grpc.postman-echo.com'" }, 39 | service: { $js: "({bag}) => bag.service || 'HelloService'" }, 40 | method: { $js: "({bag}) => bag.method || 'SayHello'" }, 41 | payload: { $ref: "bag.payload" }, 42 | useReflection: true, 43 | }, 44 | }, 45 | }, 46 | { 47 | id: "GRPC_DESCRIPT", 48 | type: "SystemTask", 49 | category: "grpc", 50 | name: "gRPC call using JSON descriptor", 51 | next: "END", 52 | lane_id: "free", 53 | parameters: { 54 | input: { 55 | server: { $js: "({bag}) => bag.server || 'grpc.postman-echo.com'" }, 56 | service: { $js: "({bag}) => bag.service || 'HelloService'" }, 57 | method: { $js: "({bag}) => bag.method || 'SayHello'" }, 58 | payload: { $ref: "bag.payload" }, 59 | useReflection: false, 60 | descriptor, 61 | }, 62 | }, 63 | }, 64 | { 65 | id: "END", 66 | type: "Finish", 67 | name: "Finish node", 68 | next: null, 69 | lane_id: "free", 70 | }, 71 | ], 72 | lanes: [ 73 | { 74 | id: "free", 75 | name: "the_only_lane", 76 | rule: ["fn", ["&", "args"], true], 77 | }, 78 | ], 79 | environment: {}, 80 | }, 81 | }; 82 | -------------------------------------------------------------------------------- /knexfile.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const path = require("path"); 3 | const BASE_PATH = path.join(__dirname, "db"); 4 | 5 | module.exports = { 6 | test: { 7 | client: "pg", 8 | connection: { 9 | host: process.env.POSTGRES_HOST || "0.0.0.0", 10 | port: process.env.POSTGRES_PORT || "5432", 11 | user: process.env.POSTGRES_USER || "postgres", 12 | password: process.env.POSTGRES_PASSWORD || "postgres", 13 | database: process.env.POSTGRES_DATABASE || "workflow", 14 | }, 15 | migrations: { 16 | directory: path.join(BASE_PATH, "migrations"), 17 | }, 18 | pool: { 19 | min: 0, 20 | max: parseInt(process.env.DB_MAX_POOL_CONNECTION ?? 10, 10), 21 | }, 22 | seeds: { 23 | directory: path.join(BASE_PATH, "seeds"), 24 | }, 25 | }, 26 | docker: { 27 | client: "pg", 28 | connection: { 29 | host: "flowbuild_db", 30 | user: "postgres", 31 | password: process.env.POSTGRES_PASSWORD || "postgres", 32 | database: "workflow", 33 | }, 34 | pool: { 35 | min: 0, 36 | max: parseInt(process.env.DB_MAX_POOL_CONNECTION ?? 10, 10), 37 | }, 38 | migrations: { 39 | directory: path.join(BASE_PATH, "migrations"), 40 | }, 41 | seeds: { 42 | directory: path.join(BASE_PATH, "seeds"), 43 | }, 44 | }, 45 | dockerLocal: { 46 | client: "pg", 47 | connection: { 48 | host: "localhost", 49 | user: "postgres", 50 | password: process.env.POSTGRES_PASSWORD || "postgres", 51 | database: "workflow", 52 | port: 5432, 53 | }, 54 | pool: { 55 | min: 0, 56 | max: 40, 57 | acquireTimeoutMillis: 60000, 58 | idleTimeoutMillis: 600000, 59 | }, 60 | migrations: { 61 | directory: path.join(BASE_PATH, "migrations"), 62 | }, 63 | seeds: { 64 | directory: path.join(BASE_PATH, "seeds"), 65 | }, 66 | }, 67 | prod: { 68 | client: "pg", 69 | connection: { 70 | host: process.env.POSTGRES_HOST, 71 | port: process.env.POSTGRES_PORT, 72 | user: process.env.POSTGRES_USER, 73 | password: process.env.POSTGRES_PASSWORD, 74 | database: process.env.POSTGRES_DATABASE, 75 | ssl: { rejectUnauthorized: false }, 76 | }, 77 | pool: { 78 | min: 0, 79 | max: parseInt(process.env.DB_MAX_POOL_CONNECTION ?? 10, 10), 80 | }, 81 | migrations: { 82 | directory: path.join(BASE_PATH, "migrations"), 83 | }, 84 | seeds: { 85 | directory: path.join(BASE_PATH, "seeds"), 86 | }, 87 | }, 88 | }; 89 | -------------------------------------------------------------------------------- /src/validators/schemas/workflow.js: -------------------------------------------------------------------------------- 1 | const workflowSchema = { 2 | type: "object", 3 | properties: { 4 | workflow_id: { type: "string", format: "uuid" }, 5 | name: { type: "string" }, 6 | description: { type: "string" }, 7 | blueprint_spec: { 8 | type: "object", 9 | properties: { 10 | requirements: { 11 | type: "array", 12 | items: { type: "string" }, 13 | uniqueItems: true, 14 | }, 15 | prepare: { 16 | type: "array", 17 | items: { type: "string" }, 18 | uniqueItems: true, 19 | }, 20 | environment: { type: "object" }, 21 | parameters: { type: "object" }, 22 | lanes: { 23 | type: "array", 24 | items: { 25 | type: "object", 26 | properties: { 27 | id: { type: "string" }, 28 | name: { type: "string" }, 29 | rule: { 30 | oneOf: [ 31 | { type: "array" }, 32 | { 33 | type: "object", 34 | required: ["$js"], 35 | properties: { 36 | $js: { "type": 'string' } 37 | } 38 | } 39 | ] 40 | } 41 | }, 42 | additionalProperties: false, 43 | required: ["id", "name", "rule"], 44 | }, 45 | uniqueItems: true, 46 | minItems: 1, 47 | }, 48 | nodes: { 49 | type: "array", 50 | items: { 51 | type: "object", 52 | properties: { 53 | id: { type: "string" }, 54 | name: { type: "string" }, 55 | type: { type: "string" }, 56 | category: { type: "string" }, 57 | lane_id: { type: "string" }, 58 | next: { 59 | oneOf: [ 60 | { type: "object" }, 61 | { type: "string" }, 62 | { type: "null" }, 63 | ], 64 | }, 65 | parameters: { type: "object" }, 66 | }, 67 | required: ["id", "name", "type", "lane_id", "next"], 68 | }, 69 | minItems: 2, 70 | }, 71 | }, 72 | additionalProperties: false, 73 | required: ["requirements", "prepare", "environment", "lanes", "nodes"], 74 | }, 75 | additionalProperties: false, 76 | }, 77 | additionalProperties: false, 78 | required: ["name", "description", "blueprint_spec"], 79 | }; 80 | 81 | module.exports = { 82 | workflowSchema, 83 | }; 84 | -------------------------------------------------------------------------------- /src/middlewares/actordata.js: -------------------------------------------------------------------------------- 1 | const { logger } = require("../utils/logger"); 2 | const _ = require("lodash"); 3 | 4 | const customMap = { 5 | actor_id: process.env.JWT_PATH_ACTOR_ID || "actor_id", 6 | claims: process.env.JWT_PATH_CLAIMS || "claims", 7 | session_id: process.env.JWT_PATH_SESSION_ID || "session_id", 8 | }; 9 | 10 | const mapDefaultData = (ctx) => { 11 | const actor_id = _.get(ctx.state.user, customMap.actor_id); 12 | const claims = _.get(ctx.state.user, customMap.claims); 13 | const session_id = _.get(ctx.state.user, customMap.session_id); 14 | const trace = { 15 | tracestate: ctx.request.headers.tracestate, 16 | traceparent: ctx.request.headers.traceparent, 17 | }; 18 | 19 | return { actor_id, claims, session_id, trace }; 20 | }; 21 | 22 | const mapExtraData = (user) => { 23 | let keys; 24 | if (!process.env.JWT_EXTRA_KEYS) { 25 | const allkeys = Object.keys(user); 26 | const defaultKeys = Object.values(customMap); 27 | keys = allkeys.filter((key) => !defaultKeys.includes(key)); 28 | } else { 29 | keys = process.env.JWT_EXTRA_KEYS.split(","); 30 | } 31 | 32 | let extData = {}; 33 | if (keys && keys.length > 0) { 34 | for (const key of keys) { 35 | if (user[key]) { 36 | extData[key] = user[key]; 37 | } 38 | } 39 | } 40 | 41 | return extData; 42 | }; 43 | 44 | const captureActorData = async (ctx, next) => { 45 | logger.debug("captureActorData"); 46 | if (!ctx.state.user) { 47 | logger.debug("empty token payload"); 48 | ctx.status = 401; 49 | ctx.body = { 50 | message: "User data not found", 51 | error: ctx.state.jwtOriginalError, 52 | }; 53 | return; 54 | } 55 | 56 | const defaultData = mapDefaultData(ctx); 57 | 58 | if (!defaultData.actor_id) { 59 | logger.debug("no actor_id"); 60 | ctx.status = 401; 61 | ctx.body = { message: "Actor id not found" }; 62 | return; 63 | } 64 | 65 | if (!Array.isArray(defaultData.claims)) { 66 | logger.debug("invalid claims"); 67 | ctx.status = 401; 68 | ctx.body = { message: "Invalid claims" }; 69 | return; 70 | } 71 | 72 | if (ctx.state.actor_data) { 73 | ctx.state.actor_data["actor_id"] = defaultData.actor_id; 74 | ctx.state.actor_data["claims"] = defaultData.claims; 75 | ctx.state.actor_data.trace = defaultData.trace; 76 | ctx.state.actor_data["session_id"] = defaultData.session_id; 77 | } else { 78 | ctx.state.actor_data = defaultData; 79 | } 80 | 81 | ctx.state.actor_data.extData = mapExtraData(ctx.state.user); 82 | 83 | return next(); 84 | }; 85 | 86 | module.exports = { 87 | captureActorData, 88 | }; 89 | -------------------------------------------------------------------------------- /src/tests/cockpitBlueprintValidation.test.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const axios = require("axios"); 3 | const { db } = require("./utils/db"); 4 | const { startServer } = require("../app"); 5 | const workflowSamples = require("../samples/workflows"); 6 | 7 | const { cleanDb } = require("./utils/auxiliar"); 8 | const { config } = require("./utils/requestConfig"); 9 | 10 | const { tearDownEnvironment, createTestEngine, createTestCockpit } = require("./utils/fixtures"); 11 | const { setDbConnection } = require("../services/cockpit"); 12 | 13 | let server; 14 | 15 | const prefix = "/cockpit"; 16 | 17 | beforeAll(async () => { 18 | createTestEngine(db); 19 | createTestCockpit(db); 20 | 21 | server = startServer(3001); 22 | axios.defaults.baseURL = `${config.baseURL}`; 23 | axios.defaults.headers = config.headers; 24 | axios.defaults.validateStatus = config.validateStatus; 25 | setDbConnection(db); 26 | 27 | await cleanDb(); 28 | 29 | await axios.post("/workflows", workflowSamples.basicStartFinish); 30 | await axios.post("/workflows", workflowSamples.singleUserTask); 31 | }); 32 | 33 | afterAll(async () => tearDownEnvironment(server, db)); 34 | 35 | describe("POST /workflows/validate", () => { 36 | const route = `${prefix}/workflows/validate/`; 37 | 38 | test("Should return 200 for a valid blueprint", async () => { 39 | const response = await axios.post(route, workflowSamples.basicStartFinish); 40 | expect(response.status).toEqual(200); 41 | expect(response.data.message).toBe("Blueprint is valid"); 42 | }); 43 | 44 | test("Should return 400 for an invalid blueprint", async () => { 45 | const response = await axios.post(route, workflowSamples.invalidMissingNode); 46 | expect(response.status).toEqual(400); 47 | expect(response.data.message).toBe("Failed at are_all_nodes_present"); 48 | }); 49 | 50 | test("Should return 400 for an empty object", async () => { 51 | const payload = { 52 | name: "whatever", 53 | description: "whatever", 54 | blueprint_spec: {}, 55 | }; 56 | const response = await axios.post("/cockpit/workflows/validate", payload); 57 | expect(response.status).toEqual(400); 58 | expect(response.data.message).toBe("Invalid Request Body"); 59 | }); 60 | }); 61 | 62 | describe("POST /workflows/compare", () => { 63 | test("Invalid Blueprint", async () => {}); 64 | 65 | test("Same Blueprint", async () => {}); 66 | 67 | test("Reorder Nodes should not invalidate", async () => {}); 68 | 69 | test("Diferent Nodes", async () => {}); 70 | 71 | test("Nonexistent workflow", async () => {}); 72 | }); 73 | 74 | describe("POST /workflows/update", () => {}); 75 | -------------------------------------------------------------------------------- /src/nodes/tokenizeNode.js: -------------------------------------------------------------------------------- 1 | const { ProcessStatus, Nodes } = require("@flowbuild/engine"); 2 | const Ajv = require("ajv"); 3 | const addFormats = require("ajv-formats"); 4 | const { logger } = require("../utils/logger"); 5 | const { v1: uuid } = require("uuid"); 6 | const { nanoid } = require("nanoid"); 7 | const { createJWTToken } = require("../services/tokenGenerator"); 8 | const { jwtSecret } = require("../utils/jwtSecret"); 9 | 10 | class TokenizeNode extends Nodes.SystemTaskNode { 11 | static get schema() { 12 | return { 13 | type: "object", 14 | required: ["id", "name", "next", "type", "lane_id", "parameters"], 15 | properties: { 16 | id: { type: "string" }, 17 | name: { type: "string" }, 18 | next: { type: "string" }, 19 | type: { type: "string" }, 20 | category: { type: "string" }, 21 | lane_id: { type: "string" }, 22 | parameters: { 23 | type: "object", 24 | required: ["input"], 25 | properties: { 26 | input: { type: "object" }, 27 | }, 28 | }, 29 | }, 30 | }; 31 | } 32 | 33 | static validate(spec) { 34 | const ajv = new Ajv({ allErrors: true }); 35 | addFormats(ajv); 36 | const validate = ajv.compile(TokenizeNode.schema); 37 | const validation = validate(spec); 38 | return [validation, JSON.stringify(validate.errors)]; 39 | } 40 | 41 | validate() { 42 | return TokenizeNode.validate(this._spec); 43 | } 44 | 45 | async _run(executionData) { 46 | try { 47 | const duration = parseInt(executionData.duration) || 3600; 48 | let secret = executionData.secret; 49 | 50 | if (!executionData.secret) { 51 | logger.debug("[tokenizeNode] Using default secret"); 52 | secret = jwtSecret; 53 | } 54 | if (!executionData.session_id) { 55 | logger.debug("[tokenizeNode] Set a random session_id"); 56 | executionData.session_id = nanoid(); 57 | } 58 | if (!executionData.actor_id) { 59 | logger.debug("[tokenizeNode] Set a random actor_id"); 60 | executionData.actor_id = uuid(); 61 | } 62 | if (!executionData.claims) { 63 | logger.debug("[tokenizeNode] Set an empty claims list"); 64 | executionData.claims = []; 65 | } 66 | const jwtToken = createJWTToken(executionData, secret, duration); 67 | const result = { 68 | jwtToken, 69 | payload: executionData, 70 | }; 71 | return [{ data: result }, ProcessStatus.RUNNING]; 72 | } catch (err) { 73 | logger.error("[tokenizeNode] Node failed", err); 74 | throw err; 75 | } 76 | } 77 | } 78 | 79 | module.exports = TokenizeNode; 80 | -------------------------------------------------------------------------------- /db/seeds/blueprints/pizza1_blueprint.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "requirements": ["core"], 3 | "prepare": [], 4 | "nodes": [ 5 | { 6 | "id": "1", 7 | "type": "Start", 8 | "name": "Start Pizza 1 WF", 9 | "next": "2", 10 | "parameters": { 11 | "input_schema": {} 12 | }, 13 | "lane_id": "1" 14 | }, 15 | { 16 | "id": "2", 17 | "type": "SystemTask", 18 | "name": "Order Pizza", 19 | "category": "setToBag", 20 | "next": "3", 21 | "lane_id": "1", 22 | "parameters": { 23 | "input": { 24 | "client": { "$ref": "bag.name" }, 25 | "client1": "teste", 26 | "pizzas": { 27 | "qty": 2, 28 | "flavors": [ 29 | "mussarela","pepperoni" 30 | ], 31 | "olives": false 32 | } 33 | } 34 | } 35 | }, 36 | { 37 | "id": "3", 38 | "type": "SystemTask", 39 | "name": "Take the order", 40 | "category": "setToBag", 41 | "next": "4", 42 | "lane_id": "1", 43 | "parameters": { 44 | "input": { 45 | "orderNo": { "$js": "() => Math.floor(Math.random() * 100); "} 46 | } 47 | } 48 | }, 49 | { 50 | "id": "4", 51 | "type": "SystemTask", 52 | "name": "Prepare Pizza", 53 | "category": "Timer", 54 | "next": "5", 55 | "lane_id": "1", 56 | "parameters": { 57 | "input": {}, 58 | "timeout": 5 59 | } 60 | }, 61 | { 62 | "id": "5", 63 | "type": "SystemTask", 64 | "category": "SetToBag", 65 | "name": "Bring Pizza", 66 | "next": "6", 67 | "lane_id": "1", 68 | "parameters": { 69 | "input": { 70 | "comment": { "$mustache": "check if there are {{bag.pizzas.qty}} pizzas in the bag" } 71 | } 72 | } 73 | }, 74 | { 75 | "id": "6", 76 | "type": "SystemTask", 77 | "category": "setToBag", 78 | "name": "Receive Pizza", 79 | "next": "7", 80 | "lane_id": "1", 81 | "parameters": { 82 | "input": { 83 | "confirm": { "$ref": "bag.orderNo" } 84 | } 85 | } 86 | }, 87 | { 88 | "id": "7", 89 | "type": "Finish", 90 | "name": "Finish node", 91 | "next": null, 92 | "lane_id": "1" 93 | } 94 | ], 95 | "lanes": [ 96 | { 97 | "id": "1", 98 | "name": "the_only_lane", 99 | "rule": ["fn", ["&", "args"], true] 100 | } 101 | ], 102 | "environment": {} 103 | } 104 | -------------------------------------------------------------------------------- /src/tests/utils/cockpit_requests.js: -------------------------------------------------------------------------------- 1 | const supertest = require("supertest"); 2 | const samples = require("./samples"); 3 | 4 | let server; 5 | function setAuthorization(request, token = `Bearer ${samples.valid_token}`) { 6 | return request.set("Authorization", token); 7 | } 8 | module.exports = { 9 | setServer: (value) => (server = value), 10 | fetchWorkflowsWithProcessStatusCount: (filter) => { 11 | const request = supertest(server).get("/cockpit/workflows/stats").query(filter); 12 | return setAuthorization(request); 13 | }, 14 | setProcessState: (process_id, state_data) => { 15 | const request = supertest(server).post(`/cockpit/processes/${process_id}/state`).send(state_data); 16 | return setAuthorization(request); 17 | }, 18 | runPendingProcess: (process_id, actor_data) => { 19 | const request = supertest(server).post(`/cockpit/processes/${process_id}/state/run`).send(actor_data); 20 | return setAuthorization(request); 21 | }, 22 | getProcessesByWorkflowId: (workflow_id) => { 23 | const request = supertest(server).get(`/cockpit/workflows/${workflow_id}/processes`); 24 | return setAuthorization(request); 25 | }, 26 | getProcessesByWorkflowName: (workflow_name) => { 27 | const request = supertest(server).get(`/cockpit/workflows/name/${workflow_name}/processes`); 28 | return setAuthorization(request); 29 | }, 30 | validateBlueprint: (blueprint_spec) => { 31 | const request = supertest(server).post("/cockpit/workflows/validate").send(blueprint_spec); 32 | return setAuthorization(request); 33 | }, 34 | compareBlueprint: (blueprint_spec) => { 35 | const request = supertest(server).post("/cockpit/workflows/compare").send(blueprint_spec); 36 | return setAuthorization(request); 37 | }, 38 | getProcessStateByNodeId: (process_id, node_id) => { 39 | const request = supertest(server).get(`/cockpit/processes/${process_id}/state/${node_id}`); 40 | return setAuthorization(request); 41 | }, 42 | transferProcessState: (process_id, state_id) => { 43 | const request = supertest(server).post(`/cockpit/processes/${process_id}/set/${state_id}`); 44 | return setAuthorization(request); 45 | }, 46 | getProcessState: (state_id) => { 47 | const request = supertest(server).get(`/cockpit/processes/state/${state_id}`); 48 | return setAuthorization(request); 49 | }, 50 | getStatesFromNode: (workflow_name, node_id) => { 51 | const request = supertest(server).get(`/cockpit/workflows/name/${workflow_name}/states/${node_id}`); 52 | return setAuthorization(request); 53 | }, 54 | getProcessExecution: (process_id) => { 55 | const request = supertest(server).get(`/cockpit/processes/${process_id}/execution`); 56 | return setAuthorization(request); 57 | }, 58 | }; 59 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 8 | 9 | **What type of PR is this?** 10 | > Uncomment only one ` /kind <>` line, hit enter to put that in a new line, and remove leading whitespace from that line: 11 | > 12 | > /kind bug 13 | > /kind cleanup 14 | > /kind deprecation 15 | > /kind design 16 | > /kind documentation 17 | > /kind failing-test 18 | > /kind feature 19 | 20 | **What this PR does / why we need it**: 21 | 22 | **Which issue(s) this PR fixes**: 23 | 28 | Fixes # 29 | 30 | **Special notes for your reviewer**: 31 | 32 | **Does this PR introduce a user-facing change?**: 33 | 40 | ```release-note 41 | 42 | ``` 43 | 44 | **Additional documentation e.g., usage docs, etc.**: 45 | 46 | 61 | ```docs 62 | ``` 63 | -------------------------------------------------------------------------------- /src/tests/utils/auxiliar.js: -------------------------------------------------------------------------------- 1 | const _ = require("lodash"); 2 | const { db } = require("../utils/db"); 3 | 4 | const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); 5 | 6 | const cleanDb = async () => { 7 | await db.raw("truncate table workflow cascade"); 8 | await db("timer").del(); 9 | await db("index").del(); 10 | await db("environment_variable").del(); 11 | }; 12 | 13 | const validateWorkflow = (base, target) => { 14 | expect(base.id).toBe(target.id); 15 | expect(base.name).toBe(target.name); 16 | expect(base.description).toBe(target.description); 17 | //expect(base.blueprint_spec).toMatchObject(target.blueprint_spec); blueprint_spec should not be returned on listing 18 | expect(target.created_at).toBeDefined(); 19 | }; 20 | 21 | const validateProcess = (process, workflow_id) => { 22 | expect(process.id).toBeDefined(); 23 | expect(process.created_at).toBeDefined(); 24 | expect(process.workflow_id).toBe(workflow_id); 25 | expect(process.blueprint_spec).toBeUndefined(); 26 | }; 27 | 28 | const validateProcessState = ( 29 | state, 30 | process_id, 31 | step_number, 32 | node_id, 33 | next_node_id, 34 | bag, 35 | external_input, 36 | result, 37 | error, 38 | status 39 | ) => { 40 | expect(state.id).toBeDefined(); 41 | expect(state.created_at).toBeDefined(); 42 | expect(state.process_id).toBe(process_id); 43 | expect(state.step_number).toBe(step_number); 44 | expect(state.node_id).toBe(node_id); 45 | expect(state.next_node_id).toBe(next_node_id); 46 | expect(state.bag).toMatchObject(bag); 47 | expect(state.external_input).toMatchObject(external_input); 48 | expect(state.result).toMatchObject(result); 49 | expect(state.error).toBe(error); 50 | expect(state.status).toBe(status); 51 | }; 52 | 53 | const validateTask = (task, workflow_name, process_status, node_name, lane_name, process_step_number) => { 54 | expect(task.workflow_name).toBe(workflow_name); 55 | expect(task.step_number).toBe(process_step_number); 56 | }; 57 | 58 | // this function assumes the position of the node in blueprint_spec 59 | // is enough to infer its corresponding step_number, which doesn't 60 | // hold true for processes in general. 61 | const validateTaskWithWorkflow = (task, workflow, process_status) => { 62 | const blueprint_spec = workflow.blueprint_spec; 63 | const node_idx = _.findIndex(blueprint_spec.nodes, { id: task.node_id }); 64 | const node = blueprint_spec.nodes[node_idx]; 65 | const step_number = node_idx + 2; 66 | const lane = _.find(blueprint_spec.lanes, { id: node.lane_id }); 67 | validateTask(task, workflow.name, process_status, node.id, lane.name, step_number); 68 | }; 69 | 70 | module.exports = { 71 | validateWorkflow, 72 | validateProcess, 73 | validateProcessState, 74 | validateTask, 75 | validateTaskWithWorkflow, 76 | delay, 77 | cleanDb, 78 | }; 79 | -------------------------------------------------------------------------------- /src/nodes/createUuidNode.js: -------------------------------------------------------------------------------- 1 | const { ProcessStatus, Nodes } = require("@flowbuild/engine"); 2 | const Ajv = require("ajv"); 3 | const addFormats = require("ajv-formats"); 4 | const { logger } = require("../utils/logger"); 5 | const { nanoid } = require("nanoid"); 6 | const uuid = require("uuid"); 7 | 8 | class CreateUuidNode extends Nodes.SystemTaskNode { 9 | static get schema() { 10 | return { 11 | type: "object", 12 | required: ["id", "name", "next", "type", "lane_id", "parameters"], 13 | properties: { 14 | id: { type: "string" }, 15 | name: { type: "string" }, 16 | next: { type: "string" }, 17 | type: { type: "string" }, 18 | category: { type: "string" }, 19 | lane_id: { type: "string" }, 20 | parameters: { 21 | type: "object", 22 | properties: { 23 | input: { 24 | type: "object", 25 | required: ["type"], 26 | properties: { 27 | type: { type: "string", enum: ["uuid", "nanoid"] }, 28 | options: { 29 | type: "object", 30 | properties: { 31 | version: { type: "string", enum: ["v1", "v4"] }, 32 | size: { 33 | oneOf: [{ type: "integer" }, { type: "object" }], 34 | }, 35 | }, 36 | }, 37 | }, 38 | }, 39 | }, 40 | }, 41 | }, 42 | }; 43 | } 44 | 45 | static validate(spec) { 46 | const ajv = new Ajv({ allErrors: true }); 47 | addFormats(ajv); 48 | const validate = ajv.compile(CreateUuidNode.schema); 49 | const validation = validate(spec); 50 | return [validation, JSON.stringify(validate.errors)]; 51 | } 52 | 53 | validate() { 54 | return CreateUuidNode.validate(this._spec); 55 | } 56 | 57 | async _run(executionData) { 58 | let result = {}; 59 | 60 | function getVersion(uVersion) { 61 | if (!uVersion) { 62 | return uuid.v1(); 63 | } 64 | 65 | const version = { 66 | v1: () => uuid.v1(), 67 | v4: () => uuid.v4(), 68 | }; 69 | 70 | return version[uVersion]() || uuid.v1(); 71 | } 72 | 73 | try { 74 | if (executionData.type === "nanoid") { 75 | if (executionData.options?.size) { 76 | result.id = nanoid(executionData.options.size); 77 | } else { 78 | result.id = nanoid(); 79 | } 80 | } else { 81 | result.id = getVersion(executionData.options?.version); 82 | } 83 | } catch (error) { 84 | logger.error("NODE.ERROR", `ERROR AT NID [${this.id}] | CREATE UUID | unexpected error`, { 85 | node_id: this.id, 86 | error: error, 87 | }); 88 | throw new Error(error); 89 | } 90 | 91 | return [result, ProcessStatus.RUNNING]; 92 | } 93 | } 94 | 95 | module.exports = CreateUuidNode; 96 | -------------------------------------------------------------------------------- /res/examples/blueprints/pizza1_blueprint.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pizza1", 3 | "description": "pizza1 blueprint example, only systemTasks", 4 | "blueprint_spec": { 5 | "requirements": ["core"], 6 | "prepare": [], 7 | "nodes": [ 8 | { 9 | "id": "1", 10 | "type": "Start", 11 | "name": "Start Pizza 1 WF", 12 | "next": "2", 13 | "parameters": { 14 | "input_schema": {} 15 | }, 16 | "lane_id": "1" 17 | }, 18 | { 19 | "id": "2", 20 | "type": "SystemTask", 21 | "name": "Order Pizza", 22 | "category": "setToBag", 23 | "next": "3", 24 | "lane_id": "1", 25 | "parameters": { 26 | "input": { 27 | "client": { "$ref": "bag.name" }, 28 | "client1": "teste", 29 | "pizzas": { 30 | "qty": 2, 31 | "flavors": ["mussarela", "pepperoni"], 32 | "olives": false 33 | } 34 | } 35 | } 36 | }, 37 | { 38 | "id": "3", 39 | "type": "SystemTask", 40 | "name": "Take the order", 41 | "category": "setToBag", 42 | "next": "4", 43 | "lane_id": "1", 44 | "parameters": { 45 | "input": { 46 | "orderNo": { "$js": "() => Math.floor(Math.random() * 100); " } 47 | } 48 | } 49 | }, 50 | { 51 | "id": "4", 52 | "type": "SystemTask", 53 | "name": "Prepare Pizza", 54 | "category": "Timer", 55 | "next": "5", 56 | "lane_id": "1", 57 | "parameters": { 58 | "input": {}, 59 | "timeout": 5 60 | } 61 | }, 62 | { 63 | "id": "5", 64 | "type": "SystemTask", 65 | "category": "SetToBag", 66 | "name": "Bring Pizza", 67 | "next": "6", 68 | "lane_id": "1", 69 | "parameters": { 70 | "input": { 71 | "comment": { 72 | "$mustache": "check if there are {{bag.pizzas.qty}} pizzas in the bag" 73 | } 74 | } 75 | } 76 | }, 77 | { 78 | "id": "6", 79 | "type": "SystemTask", 80 | "category": "setToBag", 81 | "name": "Receive Pizza", 82 | "next": "7", 83 | "lane_id": "1", 84 | "parameters": { 85 | "input": { 86 | "confirm": { "$ref": "bag.orderNo" } 87 | } 88 | } 89 | }, 90 | { 91 | "id": "7", 92 | "type": "Finish", 93 | "name": "Finish node", 94 | "next": null, 95 | "lane_id": "1" 96 | } 97 | ], 98 | "lanes": [ 99 | { 100 | "id": "1", 101 | "name": "the_only_lane", 102 | "rule": ["fn", ["&", "args"], true] 103 | } 104 | ], 105 | "environment": {} 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/nodes/basicAuthNode.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const { Nodes, ProcessStatus, utils } = require("@flowbuild/engine"); 3 | const { merge } = require("lodash"); 4 | const Ajv = require("ajv"); 5 | const addFormats = require("ajv-formats"); 6 | const axios = require("axios").default; 7 | 8 | class BasicAuthNode extends Nodes.SystemTaskNode { 9 | static get schema() { 10 | return merge(super.schema, { 11 | type: "object", 12 | properties: { 13 | next: { type: "string" }, 14 | parameters: { 15 | type: "object", 16 | properties: { 17 | request: { 18 | type: "object", 19 | required: ['verb','baseUrl','route','auth'], 20 | properties: { 21 | verb: { type: "string" }, 22 | baseUrl: { 23 | oneOf: [{ type: "string" }, { type: "object" }], 24 | }, 25 | route: { 26 | oneOf: [{ type: "string" }, { type: "object" }], 27 | }, 28 | auth: { 29 | type: "object", 30 | required: ['username', 'password'], 31 | properties: { 32 | username: { 33 | oneOf: [{ type: "string" }, { type: "object" }], 34 | }, 35 | password: { 36 | oneOf: [{ type: "string" }, { type: "object" }], 37 | }, 38 | }, 39 | }, 40 | headers: { type: "object" }, 41 | }, 42 | }, 43 | }, 44 | }, 45 | }, 46 | }); 47 | } 48 | 49 | static validate(spec) { 50 | const ajv = new Ajv({ allErrors: true }); 51 | addFormats(ajv); 52 | const validate = ajv.compile(BasicAuthNode.schema); 53 | const validation = validate(spec); 54 | return [validation, JSON.stringify(validate.errors)]; 55 | } 56 | 57 | validate() { 58 | return BasicAuthNode.validate(this._spec); 59 | } 60 | 61 | async _run(executionData) { 62 | const { verb, baseUrl, route, auth, headers } = this.request; 63 | console.log(this.request) 64 | const result = await axios({ 65 | method: verb, 66 | url: route, 67 | baseURL: baseUrl, 68 | auth, 69 | headers, 70 | data: executionData, 71 | maxContentLength: process.env.MAX_CONTENT_LENGTH || 20000, 72 | maxBodyLength: process.env.MAX_BODY_LENGTH || 20000, 73 | timeout: process.env.TIMEOUT || 30000, 74 | }); 75 | return [{ status: result.status, data: result.data }, ProcessStatus.RUNNING]; 76 | } 77 | 78 | _preProcessing({ bag, input, actor_data, environment, parameters }) { 79 | this.request = utils.prepare(this._spec.parameters.request, { bag, result: input, actor_data, environment, parameters }); 80 | return super._preProcessing({ bag, input, actor_data, environment, parameters }); 81 | } 82 | } 83 | 84 | module.exports = BasicAuthNode; 85 | -------------------------------------------------------------------------------- /res/pizza.md: -------------------------------------------------------------------------------- 1 | # Pizza Order 2 | 3 | O pedido de pizza é um caso frequentemente utilizado para exemplificar diagramas de processo em BPMN. 4 | 5 | Apresentamos aqui 2 exemplos de fluxos de pedido de pizza, para demonstrar os diferentes tipos de nós e algumas das principais funcionalidades do FlowBuild. 6 | 7 | Alguns materiais disponíveis: 8 | - coleção para postman com as rotas disponíveis neste projeto 9 | - variáveis de ambiente para a coleção do postman 10 | - diagrama BPMN do fluxo pizza1 e pizza2 11 | 12 | ## Geração de tokens 13 | 14 | Para acessar a API, é necessário ter um token válido, os requisitos mínimos para gerar um token: 15 | 16 | Algorithm: HS256 17 | 18 | Payload (conteúdo mínimo) 19 | - exp: timestamp 20 | - actor_id: string 21 | - claims: arrayOf(string) 22 | 23 | Verify Signature Secret: 1234 24 | 25 | Sugestão: https://jwt.io/ 26 | 27 | --- 28 | 29 | ## Pizza 1 30 | 31 | Este fluxo é composto por 7 nós em uma única lane. 32 | A lane exige somente um token válido, não expirado. 33 | 34 | Cada nó tem apresenta diferentes recursos do flowBuild. 35 | Todos os nós são tarefas de sistema do tipo setToBag, que tem como objetivo persistir dados no processo. 36 | 37 | #### Node 2 38 | Guarda os dados do pedido e apresenta o intérprete $ref que nos permite referenciar dados que estejam no token (actor_data), no result ou bag do processo. 39 | 40 | #### Node 3 41 | Apresenta o intérprete $js, que permite executar uma função javascript - nesse caso específico gerar um número aleatório. 42 | 43 | #### Node 4 44 | Apresenta um timerNode, que para o processo pela quantidade de segundos descrita no campo timeout 45 | 46 | #### Node 5 47 | Apresenta o intérprete [$mustache](http://mustache.github.io/) 48 | 49 | #### Node 6 50 | Outra demonstração do intérprete $ref 51 | 52 | --- 53 | 54 | ## Pizza 2 55 | 56 | Este fluxo é composto por 12 nós em 2 lanes. 57 | A lane 1, assim como no Pizza 1, exige somente um token válido, não expirado. 58 | A lane 2 necessita que o token utilizado contenha, no campo **claims** a string *restaurant* 59 | 60 | Abaixo o que apresentamos em cada nó. 61 | 62 | ### Node 2 63 | Apresenta uma UserTask. O processo fica parado, esperando a conclusão da tarefa, registrada através da chamada /submit. 64 | É esperado que na submissão da tarefa os campos qty, flavors e comments sejam enviados. 65 | 66 | ### Node 3 67 | Apresenta uma chamada HTTP, que pode ser utilizado para integrar com qualquer tipo de serviço via chamada REST. 68 | Demonstra a aplicação prática do intérprete $ref. 69 | O endpoint utilizado representa uma simples demonstração criada em https://mockapi.io/ 70 | 71 | ### Node 4 72 | Apresenta a aplicação prática de um setToBag. Guarda na bag do processo a resposta da request do nó anterior. 73 | 74 | ### Node 5 75 | Nova userTask, porém no necessita que o token utilizada cumpra as regras da lane 2. 76 | 77 | ### Node 6 78 | Repete uma chamada HTTP, demonstrando o uso prático do $mustache, para construção da URL. 79 | 80 | ### Node 7 81 | Nova userTask, novamente na lane 1. 82 | Exige que a submissão contenha o campo is_order_ok. 83 | 84 | ### Node 8 85 | Apresenta um FlowNode, que roteia o fluxo em função do valor do campo is_order_ok do nó 7. 86 | 87 | -------------------------------------------------------------------------------- /src/controllers/healthcheck.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const pkg = require('../../package.json'); 3 | const { logger } = require('../utils/logger'); 4 | const { getClient } = require('../services/broker/mqtt'); 5 | const { getEngine, getCockpit } = require("../engine"); 6 | 7 | const healthCheck = async (ctx, next) => { 8 | logger.verbose('Called healthCheck'); 9 | 10 | const engine = getEngine(); 11 | const cockpit = getCockpit(); 12 | const mqttClient = getClient(); 13 | 14 | const expiredTimers = await cockpit.fetchTimersReady(); 15 | const activeTimers = await cockpit.fetchTimersActive(); 16 | 17 | let rabbitMQ, broker = undefined; 18 | if (process.env.AMQP === "true") { 19 | rabbitMQ = { 20 | status: process.env.AMQP, 21 | hostname: process.env.BROKER_HOST, 22 | queue: process.env.BROKER_QUEUE 23 | } 24 | 25 | broker = { 26 | activityManager: process.env.ACTIVITY_MANAGER_BROKER, 27 | processState: process.env.PROCESS_STATE_BROKER, 28 | engineLogs: process.env.ENGINE_LOGS_BROKER 29 | } 30 | } 31 | 32 | ctx.body = { 33 | message: 'Flowbuild API is fine!', 34 | version: pkg.version, 35 | engine: { 36 | version: pkg.dependencies['@flowbuild/engine'], 37 | latestEvent: engine.emitter.event 38 | }, 39 | timers: { 40 | batch: process.env.TIMER_BATCH, 41 | queue: process.env.TIMER_QUEUE, 42 | ready: process.env.TIMER_BATCH === 0 ? expiredTimers.length : "engine timer processing disabled", 43 | active: activeTimers.length 44 | }, 45 | 'diagram-builder': pkg.dependencies['@flowbuild/nodejs-diagram-builder'], 46 | 'indexer': pkg.dependencies['@flowbuild/indexer'], 47 | mqtt: { 48 | status: process.env.MQTT, 49 | hostname: mqttClient?._client?.options?.hostname, 50 | protocol: mqttClient?._client?.options?.protocol, 51 | client: mqttClient?._client?.options?.clientId 52 | }, 53 | rabbitMQ, 54 | configuration: { 55 | logLevels: { 56 | engine: process.env.ENGINE_LOG_LEVEL, 57 | server: process.env.KOA_LOG_LEVEL, 58 | pushStateEvents: process.env.PUBLISH_STATE_EVENTS, 59 | pushEngineLogs: process.env.PUBLISH_ENGINE_LOGS, 60 | pushServerLogs: process.env.PUBLISH_SERVER_LOGS 61 | }, 62 | engine: { 63 | heartbeat: process.env.ENGINE_HEARTBEAT, 64 | maxStepNumber: process.env.MAX_STEP_NUMBER 65 | }, 66 | httpNodes: { 67 | maxLength: process.env.MAX_CONTENT_LENGTH, 68 | timeout: process.env.HTTP_TIMEOUT, 69 | maxBody: process.env.MAX_BODY_LENGTH 70 | }, 71 | OpenTelemetry: { 72 | status: process.env.OTEL_ENABLED === "true" ? "enabled" : "disabled", 73 | serviceName: process.env.OTEL_SERVICE_NAME, 74 | newRelic: process.env.NEW_RELIC_ENABLED === "true" ? "active" : "inactive", 75 | collector: process.env.OTEL_COLLECTOR_URL 76 | }, 77 | broker 78 | } 79 | } 80 | 81 | if(process.env.MAX_READY_TIMERS && expiredTimers.length > process.env.MAX_READY_TIMERS) { 82 | ctx.status = 409; 83 | return next(); 84 | } 85 | 86 | ctx.status = 200; 87 | 88 | return next(); 89 | }; 90 | 91 | module.exports = { 92 | healthCheck 93 | } -------------------------------------------------------------------------------- /src/samples/blueprints/createUuid.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "create_uuid", 3 | description: "workflow for testing create uuid node", 4 | blueprint_spec: { 5 | requirements: ["core"], 6 | prepare: [], 7 | nodes: [ 8 | { 9 | id: "0", 10 | type: "Start", 11 | name: "Start node", 12 | next: "v1", 13 | parameters: { 14 | input_schema: {}, 15 | }, 16 | lane_id: "free", 17 | }, 18 | { 19 | id: "v1", 20 | type: "systemTask", 21 | category: "createUuid", 22 | name: "UUID v1", 23 | next: "v3", 24 | lane_id: "free", 25 | parameters: { 26 | input: { 27 | type: "uuid", 28 | options: { 29 | version: "v1" 30 | }, 31 | }, 32 | }, 33 | }, 34 | { 35 | id: "v3", 36 | type: "systemTask", 37 | category: "createUuid", 38 | name: "UUID v3", 39 | next: "v4", 40 | lane_id: "free", 41 | parameters: { 42 | input: { 43 | type: "uuid", 44 | options: { 45 | version: "v3" 46 | }, 47 | }, 48 | }, 49 | }, 50 | { 51 | id: "v4", 52 | type: "systemTask", 53 | category: "createUuid", 54 | name: "UUID v5", 55 | next: "v5", 56 | lane_id: "free", 57 | parameters: { 58 | input: { 59 | type: "uuid", 60 | options: { 61 | version: "v4" 62 | }, 63 | }, 64 | }, 65 | }, 66 | { 67 | id: "v5", 68 | type: "systemTask", 69 | category: "createUuid", 70 | name: "UUID v5", 71 | next: "nano", 72 | lane_id: "free", 73 | parameters: { 74 | input: { 75 | type: "uuid", 76 | options: { 77 | version: "v5" 78 | }, 79 | }, 80 | }, 81 | }, 82 | { 83 | id: "nano", 84 | type: "systemTask", 85 | category: "createUuid", 86 | name: "UUID v5", 87 | next: "nano10", 88 | lane_id: "free", 89 | parameters: { 90 | input: { 91 | type: "nanoid" 92 | }, 93 | }, 94 | }, 95 | { 96 | id: "nano10", 97 | type: "systemTask", 98 | category: "createUuid", 99 | name: "UUID v5", 100 | next: "end", 101 | lane_id: "free", 102 | parameters: { 103 | input: { 104 | type: "nanoid", 105 | options: { 106 | size: 10 107 | } 108 | }, 109 | }, 110 | }, 111 | { 112 | id: "end", 113 | type: "Finish", 114 | name: "Finish node", 115 | next: null, 116 | lane_id: "free", 117 | }, 118 | ], 119 | lanes: [ 120 | { 121 | id: "free", 122 | name: "the_only_lane", 123 | rule: ["fn", ["&", "args"], true], 124 | }, 125 | ], 126 | environment: {}, 127 | }, 128 | }; 129 | -------------------------------------------------------------------------------- /db/seeds/blueprints/test_workflow_blueprint.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "requirements": ["core", "test_workflow_package"], 3 | "prepare": [], 4 | "nodes": [ 5 | { 6 | "id": "1", 7 | "type": "Start", 8 | "name": "Start node", 9 | "next": "2", 10 | "parameters": { 11 | "input_schema": {}, 12 | }, 13 | "lane_id": "1" 14 | }, 15 | { 16 | "id": "2", 17 | "type": "UserTask", 18 | "name": "User Task", 19 | "next": "3", 20 | "lane_id": "1", 21 | "parameters": { 22 | "action": "do something", 23 | "input": { 24 | "dates": {"$ref": "bag.dates"}, 25 | "uuids": {"$ref": "bag.uuids"} 26 | } 27 | } 28 | }, 29 | { 30 | "id": "3", 31 | "type": "SystemTask", 32 | "category": "SetToBag", 33 | "name": "Set To Bag Task", 34 | "next": "4", 35 | "lane_id": "1", 36 | "parameters": { 37 | "input": { 38 | "keyword": {"$ref": "result.keyword"}, 39 | "n_js": {"$ref": "result.n_js"}, 40 | "n_interp": {"$ref": "result.n_interp"} 41 | } 42 | } 43 | }, 44 | { 45 | "id": "4", 46 | "type": "ScriptTask", 47 | "name": "Scripted Task", 48 | "next": "5", 49 | "lane_id": "1", 50 | "parameters": { 51 | "input": { 52 | "n_interp": {"$ref": "bag.n_interp"} 53 | }, 54 | "script": { 55 | "package": "test_workflow_package", 56 | "function": "lisp_test_task", 57 | "type": "js" 58 | } 59 | } 60 | }, 61 | { 62 | "id": "5", 63 | "type": "SystemTask", 64 | "category": "SetToBag", 65 | "name": "Set To Bag Task", 66 | "next": "6", 67 | "lane_id": "1", 68 | "parameters": { 69 | "input": { 70 | "dates": {"$ref": "result.dates"} 71 | } 72 | } 73 | }, 74 | { 75 | "id": "6", 76 | "type": "SystemTask", 77 | "category": "SetToBag", 78 | "name": "Set To Bag Task", 79 | "next": "7", 80 | "lane_id": "1", 81 | "parameters": { 82 | "input": { 83 | "uuids": {"$ref": "result.uuids"} 84 | } 85 | } 86 | }, 87 | { 88 | "id": "7", 89 | "type": "Flow", 90 | "name": "Flow node", 91 | "next": { 92 | "default": "2", 93 | "end": "8" 94 | }, 95 | "lane_id": "1", 96 | "parameters": { 97 | "input": { 98 | "keyword": {"$ref": "bag.keyword"} 99 | } 100 | } 101 | }, 102 | { 103 | "id": "8", 104 | "type": "Finish", 105 | "name": "Finish node", 106 | "next": null, 107 | "lane_id": "1" 108 | } 109 | ], 110 | "lanes": [ 111 | { 112 | "id": "1", 113 | "name": "the_only_lane", 114 | "rule": ["fn", ["&", "args"], true] 115 | } 116 | ], 117 | "environment": {}, 118 | } 119 | -------------------------------------------------------------------------------- /src/nodes/kafkaPublishNode.js: -------------------------------------------------------------------------------- 1 | const kafka = require("../services/broker/kafka"); 2 | const { ProcessStatus, Nodes } = require("@flowbuild/engine"); 3 | const Ajv = require("ajv"); 4 | const addFormats = require("ajv-formats"); 5 | const { logger } = require("../utils/logger"); 6 | 7 | class KafkaPublishNode extends Nodes.SystemTaskNode { 8 | static get schema() { 9 | return { 10 | type: "object", 11 | required: ["id", "name", "next", "type", "lane_id", "parameters"], 12 | properties: { 13 | id: { type: "string" }, 14 | name: { type: "string" }, 15 | category: { type: "string" }, 16 | type: { type: "string" }, 17 | next: { type: "string" }, 18 | parameters: { 19 | type: "object", 20 | required: ["input"], 21 | properties: { 22 | input: { 23 | type: "object", 24 | required: ["message", "event", "topic"], 25 | properties: { 26 | message: { type: "object" }, 27 | event: { oneOf: [{ type: "object" }, { type: "string" }] }, 28 | topic: { oneOf: [{ type: "object" }, { type: "string" }] }, 29 | }, 30 | }, 31 | }, 32 | }, 33 | lane_id: { type: "string" }, 34 | }, 35 | }; 36 | } 37 | 38 | static validate(spec, schema = null) { 39 | const ajv = new Ajv({ allErrors: true }); 40 | addFormats(ajv); 41 | const validationSchema = schema || KafkaPublishNode.schema; 42 | const validate = ajv.compile(validationSchema); 43 | const validation = validate(spec); 44 | return [validation, JSON.stringify(validate.errors)]; 45 | } 46 | 47 | validate() { 48 | return KafkaPublishNode.validate(this._spec); 49 | } 50 | 51 | static validateExecutionData(spec) { 52 | const schema = { 53 | type: "object", 54 | required: ["message", "event", "topic"], 55 | properties: { 56 | message: { type: "object" }, 57 | event: { type: "string" }, 58 | topic: { type: "string" }, 59 | }, 60 | }; 61 | return KafkaPublishNode.validate(spec, schema); 62 | } 63 | 64 | _preProcessing({ bag, input, actor_data, environment, parameters }) { 65 | const executionData = super._preProcessing({ bag, input, actor_data, environment, parameters }); 66 | return { ...executionData, ...{ process_id: parameters.process_id } }; 67 | } 68 | 69 | async _run(executionData) { 70 | try { 71 | logger.debug("KafkaPublish Node running"); 72 | const [is_valid, validation_errors] = KafkaPublishNode.validateExecutionData(executionData); 73 | if (!is_valid) { 74 | const errors = JSON.parse(validation_errors).map((err) => `field '${err.instancePath}' ${err.message}`); 75 | throw JSON.stringify(errors); 76 | } 77 | const { message, event, topic } = executionData; 78 | const result = await kafka.publishMessage({ 79 | topic, 80 | message: { ...message, ...{ processId: executionData.process_id } }, 81 | key: event, 82 | }); 83 | 84 | return [{ data: result }, ProcessStatus.RUNNING]; 85 | } catch (err) { 86 | logger.error("KafkaPublish node failed", err); 87 | throw err; 88 | } 89 | } 90 | } 91 | 92 | module.exports = KafkaPublishNode; 93 | -------------------------------------------------------------------------------- /src/nodes/graphqlNode.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const { Nodes, ProcessStatus, utils } = require("@flowbuild/engine"); 3 | const Ajv = require("ajv"); 4 | const addFormats = require("ajv-formats"); 5 | const axios = require("axios").default; 6 | const { query, mutation } = require("gql-query-builder"); 7 | 8 | class GraphQlNode extends Nodes.SystemTaskNode { 9 | static get schema() { 10 | let mySchema = super.schema; 11 | mySchema.properties.parameters = { 12 | type: "object", 13 | required: ["request", "input"], 14 | properties: { 15 | request: { 16 | type: "object", 17 | required: ["baseUrl", "verb"], 18 | properties: { 19 | baseUrl: { oneOf: [{ type: "string" }, { type: "object" }] }, 20 | route: { oneOf: [{ type: "string" }, { type: "object" }] }, 21 | verb: { oneOf: [{ type: "string" }, { type: "object" }] }, 22 | headers: { type: "object" }, 23 | }, 24 | }, 25 | input: { 26 | type: "object", 27 | required: ["action", "operation"], 28 | properties: { 29 | action: { oneOf: [{ type: "string" }, { type: "object" }] }, 30 | operation: { oneOf: [{ type: "string" }, { type: "object" }] }, 31 | fields: { oneOf: [{ type: "array" }, { type: "object" }] }, 32 | variables: { type: "object" }, 33 | }, 34 | }, 35 | }, 36 | }; 37 | 38 | return mySchema; 39 | } 40 | 41 | static validate(spec) { 42 | const ajv = new Ajv({ allErrors: true }); 43 | addFormats(ajv); 44 | const validate = ajv.compile(GraphQlNode.schema); 45 | const validation = validate(spec); 46 | return [validation, JSON.stringify(validate.errors)]; 47 | } 48 | 49 | validate() { 50 | return GraphQlNode.validate(this._spec); 51 | } 52 | 53 | async _run(executionData) { 54 | const { request, input } = executionData; 55 | 56 | const actions = { 57 | query: (input) => { 58 | return query({ 59 | operation: input.operation, 60 | fields: input.fields, 61 | variables: input.variables, 62 | }); 63 | }, 64 | mutation: (input) => { 65 | return mutation({ 66 | operation: input.operation, 67 | fields: input.fields, 68 | variables: input.variables, 69 | }); 70 | }, 71 | }; 72 | 73 | const gqlData = actions[input.action](input); 74 | 75 | const requestConfig = { 76 | method: request.verb, 77 | url: request.route, 78 | baseURL: request.baseUrl, 79 | data: gqlData, 80 | headers: request.headers, 81 | validateStatus: function (status) { 82 | return status <= 599; 83 | }, 84 | }; 85 | 86 | const result = await axios(requestConfig); 87 | 88 | return [ 89 | { status: result.status, data: result.data }, 90 | ProcessStatus.RUNNING, 91 | ]; 92 | } 93 | 94 | _preProcessing({ bag, input, actor_data, environment, parameters }) { 95 | return utils.prepare(this._spec.parameters, { 96 | bag, 97 | result: input, 98 | actor_data, 99 | environment, 100 | parameters, 101 | }); 102 | } 103 | } 104 | 105 | module.exports = GraphQlNode; 106 | -------------------------------------------------------------------------------- /src/tests/activityManagerValidation.test.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const axios = require("axios"); 3 | const { db } = require("./utils/db"); 4 | const { startServer } = require("../app"); 5 | const { delay, cleanDb } = require("./utils/auxiliar"); 6 | const { config } = require("./utils/requestConfig"); 7 | const { tearDownEnvironment, createTestEngine, createTestCockpit } = require("./utils/fixtures"); 8 | 9 | const logger = (...args) => process.env.TESTS_VERBOSE ? logger(...args) : undefined 10 | 11 | //SAMPLES 12 | const activitySchemaValidation = require("../samples/blueprints/activitySchemaValidation"); 13 | 14 | let server; 15 | let activityManagerId; 16 | 17 | beforeAll(async () => { 18 | createTestEngine(db); 19 | createTestCockpit(db); 20 | 21 | server = startServer(3001); 22 | axios.defaults.baseURL = config.baseURL; 23 | axios.defaults.headers = config.headers; 24 | axios.defaults.validateStatus = config.validateStatus; 25 | 26 | await cleanDb(); 27 | 28 | //CRIAR O WORKFLOW 29 | await axios.post("/workflows", activitySchemaValidation); 30 | }); 31 | 32 | beforeEach(async () => { 33 | //INICIAR O PROCESSO 34 | const process = await axios.post( 35 | `/workflows/name/${activitySchemaValidation.name}/start`, 36 | {} 37 | ); 38 | const processId = process.data.process_id; 39 | await delay(200) 40 | logger(`PID ${processId}`); 41 | //OBTER O ID DO ACTIVITY_MANAGER 42 | const activityManager = await axios.get(`/processes/${processId}/activity`); 43 | logger(`AMID ${activityManager.data.id}`); 44 | activityManagerId = activityManager.data.id; 45 | }); 46 | 47 | afterAll(async () => tearDownEnvironment(server, db)); 48 | 49 | describe("Validation @ POST activity_manager/:id/submit", () => { 50 | test.each([ 51 | {}, 52 | { 53 | date: 1 54 | }, 55 | { 56 | date: 'whatever' 57 | }, 58 | { 59 | date: '2020-30-05' 60 | }, 61 | { 62 | date: '2020-19-05' 63 | }, 64 | ])("Should return 400 for an invalid date", async (payload) => { 65 | let response = await axios.post(`activity_manager/${activityManagerId}/submit`, payload); 66 | expect(response.status).toEqual(400); 67 | }); 68 | 69 | test("Should return 202 for a valid date", async () => { 70 | const payload = { date: '2021-09-15' }; 71 | 72 | let response = await axios.post(`activity_manager/${activityManagerId}/submit`, payload); 73 | logger(response) 74 | expect(response.status).toBe(202); 75 | }); 76 | }); 77 | 78 | describe("Validation @ POST activity_manager/:id/commit", () => { 79 | test.each([ 80 | {}, 81 | { 82 | date: 1 83 | }, 84 | { 85 | date: 'whatever' 86 | }, 87 | { 88 | date: '2020-30-05' 89 | }, 90 | { 91 | date: '2020-19-05' 92 | }, 93 | ])("Should return 400 for an invalid date", async (payload) => { 94 | let response = await axios.post(`activity_manager/${activityManagerId}/commit`, payload); 95 | expect(response.status).toEqual(400); 96 | }); 97 | 98 | test("Should return 200 for a valid date", async () => { 99 | const payload = { date: '2021-09-15' }; 100 | 101 | let response = await axios.post(`activity_manager/${activityManagerId}/commit`, payload); 102 | logger(response) 103 | expect(response.status).toBe(200); 104 | }); 105 | }); -------------------------------------------------------------------------------- /src/app.js: -------------------------------------------------------------------------------- 1 | const Koa = require("koa"); 2 | const cors = require("koa2-cors"); 3 | const koaLogger = require("koa-logger-winston"); 4 | const jwt = require("koa-jwt"); 5 | const { userAgent } = require("koa-useragent"); 6 | const helmet = require("koa-helmet"); 7 | const serve = require("koa-static"); 8 | const pathToSwaggerUi = require("swagger-ui-dist").absolutePath(); 9 | 10 | const freeRouter = require("./routers/freeRouter"); 11 | const mainRouter = require("./routers/mainRouter"); 12 | const cockpitRouter = require("./routers/cockpitRouter"); 13 | 14 | const { setEngine, getEngine, setCockpit, getCockpit } = require("./engine"); 15 | const { Engine, Cockpit } = require("@flowbuild/engine"); 16 | const cockpitService = require("./services/cockpit"); 17 | const { setCustomNodes } = require("../src/nodes"); 18 | 19 | const _log = require("./utils/logger"); 20 | const elog = require("./utils/engineLogger"); 21 | const listeners = require("./utils/engineListener"); 22 | const broker = require("./services/broker/index"); 23 | const { db } = require("./utils/db"); 24 | const { jwtSecret, jwtAlgorithms, jwtPassthrough } = require("./utils/jwtSecret"); 25 | const { setPersist } = require("./middlewares/persist"); 26 | 27 | const startServer = (port) => { 28 | const engineLogLevel = process.env.ENGINE_LOG_LEVEL || "warn"; 29 | elog.startLogger(); 30 | let engine = getEngine(); 31 | if (!engine) { 32 | engine = new Engine("knex", db, engineLogLevel); 33 | setEngine(engine); 34 | } 35 | let cockpit = getCockpit(); 36 | if (!cockpit) { 37 | cockpit = new Cockpit("knex", db, engineLogLevel); 38 | setCockpit(cockpit); 39 | cockpitService.setDbConnection(db); 40 | } 41 | setCustomNodes(); 42 | 43 | broker.connect(); 44 | 45 | listeners.activateNotifiers(engine); 46 | 47 | const crypto = engine.buildCrypto("aes-256-cbc", { 48 | key: process.env.CRYPTO_KEY || "12345678901234567890123456789012", 49 | }); 50 | engine.setCrypto(crypto); 51 | 52 | const app = new Koa(); 53 | const corsOptions = { 54 | origin: "*", 55 | allowMethods: ["GET", "POST", "DELETE", "PUT", "PATCH"], 56 | allowHeaders: ["Content-Type", "Authorization", "Accept", "x-duration", "x-secret"], 57 | }; 58 | app.use(cors(corsOptions)); 59 | app.use(helmet()); 60 | app.use(setPersist(db)); 61 | app.use(userAgent); 62 | app.proxy = true; 63 | 64 | app.use(koaLogger(_log.logger)); 65 | 66 | app.use(serve(pathToSwaggerUi, { index: false })); 67 | app.use(serve("public/swagger-ui", { index: false })); 68 | app.use(serve("res/swagger", { index: false })); 69 | 70 | app.use(freeRouter({ corsOptions }).routes()); 71 | 72 | app.use( 73 | mainRouter({ 74 | corsOptions, 75 | middlewares: [ 76 | jwt({ 77 | passthrough: jwtPassthrough, 78 | secret: jwtSecret, 79 | debug: true, 80 | algorithms: [jwtAlgorithms] 81 | }), 82 | ], 83 | }).routes() 84 | ); 85 | 86 | app.use( 87 | cockpitRouter({ 88 | corsOptions, 89 | middlewares: [jwt({ secret: jwtSecret, debug: true })], 90 | }) 91 | .prefix("/cockpit") 92 | .routes() 93 | ); 94 | 95 | return app.listen(port, function () { 96 | _log.logger.info("Flowbuild API Server running"); 97 | }); 98 | }; 99 | 100 | module.exports = { 101 | startServer, 102 | }; 103 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flowbuild/workflow", 3 | "version": "2.32.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "tests": "ENGINE_LOG_LEVEL=error KOA_LOG_LEVEL=error jest --coverage --runInBand --forceExit", 8 | "test:single": "jest -i --detectOpenHandles --forceExit", 9 | "seeds": "knex --env ${KNEX_ENV} --knexfile knexfile.js seed:run", 10 | "seeds:local": "knex --env dockerLocal --knexfile knexfile.js seed:run", 11 | "migrations": "knex --env ${KNEX_ENV} --knexfile knexfile.js migrate:latest", 12 | "migrations:local": "knex --env dockerLocal --knexfile knexfile.js migrate:latest", 13 | "start": "node src/server.js", 14 | "dev": "nodemon src/server.js", 15 | "get-version": "echo $npm_package_version", 16 | "release": "semantic-release", 17 | "export": "node ./scripts/export.js", 18 | "lint": "eslint ./src --ext .js --max-warnings=0" 19 | }, 20 | "devDependencies": { 21 | "@semantic-release/changelog": "6.0.3", 22 | "@semantic-release/git": "10.0.1", 23 | "@semantic-release/npm": "10.0.2", 24 | "conventional-changelog-conventionalcommits": "5.0.0", 25 | "eslint": "8.43.0", 26 | "jest": "29.5.0", 27 | "nodemon": "2.0.22", 28 | "semantic-release": "20.1.3", 29 | "supertest": "6.3.3" 30 | }, 31 | "dependencies": { 32 | "@flowbuild/engine": "2.32.1", 33 | "@flowbuild/indexer": "1.0.2", 34 | "@flowbuild/nodejs-diagram-builder": "1.1.0", 35 | "@flowbuild/process-tree": "1.0.4", 36 | "@grpc/grpc-js": "1.8.13", 37 | "@grpc/proto-loader": "0.7.6", 38 | "@koa/router": "12.0.0", 39 | "@opentelemetry/api": "1.4.1", 40 | "@opentelemetry/auto-instrumentations-node": "0.36.4", 41 | "@opentelemetry/exporter-trace-otlp-grpc": "0.36.1", 42 | "@opentelemetry/instrumentation-koa": "0.34.2", 43 | "@opentelemetry/resources": "1.10.1", 44 | "@opentelemetry/sdk-node": "0.36.1", 45 | "@opentelemetry/semantic-conventions": "1.10.1", 46 | "@supercharge/request-ip": "1.2.0", 47 | "ajv": "8.12.0", 48 | "ajv-formats": "2.1.1", 49 | "amqplib": "0.10.3", 50 | "async-mqtt": "2.6.3", 51 | "axios": "1.4.0", 52 | "dotenv": "16.3.1", 53 | "gql-query-builder": "3.8.0", 54 | "grpc-client-promise-wrapper": "1.0.5", 55 | "grpc-reflection-js": "0.1.2", 56 | "jsrsasign": "10.7.0", 57 | "kafkajs": "^2.2.4", 58 | "knex": "2.4.2", 59 | "koa": "2.14.2", 60 | "koa-bodyparser": "4.4.0", 61 | "koa-compose": "4.1.0", 62 | "koa-helmet": "6.1.0", 63 | "koa-jwt": "4.0.4", 64 | "koa-logger": "3.2.1", 65 | "koa-logger-winston": "0.0.2", 66 | "koa-static": "5.0.0", 67 | "koa-useragent": "4.1.0", 68 | "koa2-cors": "2.0.6", 69 | "lodash": "4.17.21", 70 | "nanoid": "3.3.4", 71 | "newrelic": "9.14.1", 72 | "npm": "9.6.2", 73 | "pg": "8.11.0", 74 | "raw-body": "2.5.2", 75 | "swagger-ui-dist": "4.17.0", 76 | "uuid": "8.3.2", 77 | "winston": "3.9.0", 78 | "ws": "8.13.0" 79 | }, 80 | "keywords": [], 81 | "author": "FDTE-DSD", 82 | "license": "MIT", 83 | "publishConfig": { 84 | "access": "public" 85 | }, 86 | "repository": { 87 | "type": "git", 88 | "url": "https://github.com/flow-build/workflow.git" 89 | }, 90 | "nodemonConfig": { 91 | "ext": "js", 92 | "ignore": [ 93 | "**/samples/**" 94 | ] 95 | }, 96 | "homepage": "https://github.com/flow-build/workflow#readme" 97 | } 98 | --------------------------------------------------------------------------------