├── .adr-dir ├── app ├── main.js └── config │ └── consumer-config.js ├── .env.dev ├── test ├── unit │ └── examlpe.spec.js ├── support │ ├── create-topics.js │ └── server.js └── integration │ └── create-topic-produce-consume.js ├── Dockerfile ├── .vscode └── settings.json ├── .dockerignore ├── dispatcher.yml ├── .eslintrc.json ├── doc └── architecture │ └── decisions │ └── 0001-record-architecture-decisions.md ├── dev.yml ├── Makefile ├── package.json ├── .gitignore ├── .circleci └── config.yml ├── kafka-webhook-dispatcher-initail-archicteccture.drawio └── README.md /.adr-dir: -------------------------------------------------------------------------------- 1 | doc/architecture/decisions 2 | -------------------------------------------------------------------------------- /app/main.js: -------------------------------------------------------------------------------- 1 | console.log('No code until we have CI, DevOps') 2 | -------------------------------------------------------------------------------- /.env.dev: -------------------------------------------------------------------------------- 1 | EULA=https://dl.lenses.stream/d/?id=b96f331c-45be-4a1f-b802-8be0d41e05de 2 | SAMPLEDATA=0 3 | RUNNING_SAMPLEDATA=0 4 | -------------------------------------------------------------------------------- /test/unit/examlpe.spec.js: -------------------------------------------------------------------------------- 1 | function sum(a, b) { 2 | return a + b 3 | } 4 | 5 | test('adds 1 + 2 to equal 3', () => { 6 | expect(sum(1, 2)).toBe(3) 7 | }) 8 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:11.15.0-alpine AS dev 2 | 3 | WORKDIR /app 4 | 5 | COPY package*.json ./ 6 | 7 | RUN npm install 8 | 9 | COPY . . 10 | 11 | CMD [ "npm", "start" ] AS prod 12 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "cSpell.words": [ 3 | "Logstash", 4 | "URL's", 5 | "performant", 6 | "roundrobin", 7 | "uuid's", 8 | "webhook", 9 | "webhooks" 10 | ] 11 | } -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # External Dependencies 2 | node_modules 3 | 4 | # Static Files 5 | README.md 6 | package-lock.json 7 | .gitignore 8 | 9 | # Dev Tools 10 | Makefile 11 | dev.yml 12 | dispatcher.yml 13 | 14 | # Linters and Code Standardisers 15 | .eslintrc.json 16 | .vscode 17 | -------------------------------------------------------------------------------- /dispatcher.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | nodejs: 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | command: npm start 9 | env_file: 10 | - .env 11 | networks: 12 | shared_net: 13 | default: 14 | 15 | networks: 16 | shared_net: 17 | driver: bridge 18 | default: 19 | driver: bridge 20 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "node": true, 4 | "commonjs": true, 5 | "es6": true, 6 | "jest/globals": true 7 | }, 8 | "extends": ["eslint:recommended"], 9 | "globals": { 10 | "Atomics": "readonly", 11 | "SharedArrayBuffer": "readonly" 12 | }, 13 | "parserOptions": { 14 | "ecmaVersion": 2018, 15 | "sourceType": "module" 16 | }, 17 | "rules": { 18 | "semi": [2, "never"], 19 | "quotes": [2, "single"] 20 | }, 21 | "plugins": ["jest"] 22 | } 23 | -------------------------------------------------------------------------------- /doc/architecture/decisions/0001-record-architecture-decisions.md: -------------------------------------------------------------------------------- 1 | # 1. Record architecture decisions 2 | 3 | Date: 2019-07-01 4 | 5 | ## Status 6 | 7 | Accepted 8 | 9 | ## Context 10 | 11 | We need to record the architectural decisions made on this project. 12 | 13 | ## Decision 14 | 15 | We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). 16 | 17 | ## Consequences 18 | 19 | See Michael Nygard's article, linked above. For a lightweight ADR toolset, see Nat Pryce's [adr-tools](https://github.com/npryce/adr-tools). 20 | -------------------------------------------------------------------------------- /dev.yml: -------------------------------------------------------------------------------- 1 | 2 | version: '3.5' 3 | 4 | services: 5 | postgres: 6 | image: postgres:11.2-alpine 7 | ports: 8 | - 5432 9 | volumes: 10 | - postgres_data:/var/lib/postgresql 11 | networks: 12 | shared_net: 13 | default: 14 | 15 | kafka: 16 | image: landoop/kafka-lenses-dev 17 | env_file: 18 | - .env.dev 19 | networks: 20 | shared_net: 21 | default: 22 | ports: 23 | - 3030:3030 24 | - 9092:9092 25 | - 2181:2181 26 | - 8081:8081 27 | - 9581:9581 28 | - 9582:9582 29 | - 9584:9584 30 | - 9585:9585 31 | 32 | volumes: 33 | postgres_data: 34 | 35 | networks: 36 | shared_net: 37 | driver: bridge 38 | default: 39 | driver: bridge 40 | -------------------------------------------------------------------------------- /app/config/consumer-config.js: -------------------------------------------------------------------------------- 1 | // Default Kafka Options used in this application 2 | 3 | const clientOptions = { 4 | kafkaHost: process.env.KAFKA_HOST 5 | } 6 | 7 | const payloads = [{ 8 | topic: process.env.WEBHOOK_TOPIC_NAME 9 | }] 10 | 11 | const consumerOptions = { 12 | groupId: process.env.WEBHOOK_CONSUMER_GROUP_ID 13 | } 14 | 15 | var consumerGroupOptions = { 16 | kafkaHost: process.env.KAFKA_HOST, 17 | ssl: process.env.WEBHOOK_CONSUMER_GROUP_ID_SSL, 18 | groupId: process.env.WEBHOOK_CONSUMER_GROUP_ID, 19 | sessionTimeout: 15000, 20 | protocol: ['roundrobin'], 21 | fromOffset: 'latest', // default 22 | commitOffsetsOnFirstJoin: true, 23 | outOfRangeOffset: 'earliest' 24 | } 25 | 26 | module.exports = { clientOptions, payloads, consumerOptions, consumerGroupOptions } 27 | -------------------------------------------------------------------------------- /test/support/create-topics.js: -------------------------------------------------------------------------------- 1 | // Libraries 2 | const kafka = require('kafka-node') 3 | 4 | // Config 5 | const config = require('../../app/config/consumer-config') 6 | 7 | const client = new kafka.KafkaClient(config.clientOptions) 8 | 9 | const topicsToCreate = [{ 10 | topic: process.env.WEBHOOK_TOPIC_NAME, 11 | partitions: parseInt(process.env.WEBHOOK_TOPIC_PARTITION_COUNT), 12 | replicationFactor: parseInt(process.env.WEBHOOK_TOPIC_REPLICATION_FACTOR) 13 | }] 14 | 15 | 16 | console.log('\tCreating topics', topicsToCreate) 17 | client.createTopics(topicsToCreate, (error, result) => { 18 | console.log('Creating topic error', error) 19 | console.log('Creating topic result', result) 20 | }) 21 | 22 | console.log('\tExiting in 10 seconds') 23 | setTimeout(() => { 24 | process.exit(0) 25 | }, 10000) 26 | console.log('------') 27 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # 2 | # This is a make file for running commands in this application 3 | # 4 | COMPOSE=docker-compose -f dev.yml -f dispatcher.yml 5 | COMPOSE_DISPACTH=docker-compose -f dispatcher.yml 6 | 7 | init: ## Initialise the project 8 | make build dev-up create-topics 9 | 10 | build: ## Build all required services 11 | ${COMPOSE} build 12 | 13 | up: ## Start the application 14 | ${COMPOSE} up --d 15 | 16 | %-up: ## Boot only the given compose 17 | docker-compose -f $*.yml up --d 18 | 19 | stop: ## Stop the application 20 | ${COMPOSE} stop 21 | 22 | down: ## Remoe all containers and volumes 23 | $(COMPOSE) down 24 | 25 | restart: ## Restart all the containers 26 | make stop 27 | make up 28 | 29 | tail: ## Tail the application logs 30 | ${COMPOSE} logs --tail="100" -f 31 | 32 | produce: ## Produce some messages so that we can test 33 | ${COMPOSE_DISPACTH} run nodejs npm run produce 34 | 35 | create-topics: ## Produce some messages so that we can test 36 | ${COMPOSE_DISPACTH} run nodejs npm run setup 37 | 38 | integration-test: ## Run the integration tests 39 | make build dev-up 40 | ${COMPOSE_DISPACTH} run nodejs npm run test:integration 41 | -------------------------------------------------------------------------------- /test/support/server.js: -------------------------------------------------------------------------------- 1 | // Simple Webhook Server 2 | // 3 | // This server is a simple express serer designed to mimic responses from webhooks 4 | 5 | // Libraries 6 | import express from 'express' 7 | import { json } from 'body-parser' 8 | 9 | // Config 10 | const app = express().use(json()) // creates http server 11 | const token = 'test' // type here your verification token 12 | 13 | // Setup routes 14 | app.get('/', (req, res) => { 15 | // check if verification token is correct 16 | if (req.query.token !== token) { 17 | return res.sendStatus(401) 18 | } 19 | 20 | // return challenge 21 | return res.end(req.query.challenge) 22 | }) 23 | 24 | app.post('/', (req, res) => { 25 | // check if verification token is correct 26 | if (req.query.token !== token) { 27 | return res.sendStatus(401) 28 | } 29 | 30 | // print request body 31 | console.log(req.body) 32 | 33 | // return a text response 34 | const data = { 35 | responses: [ 36 | { 37 | type: 'text', 38 | elements: ['Hi', 'Hello'] 39 | } 40 | ] 41 | } 42 | 43 | res.json(data) 44 | }) 45 | 46 | app.listen(3000, () => console.log('[BotEngine] Webhook is listening')) 47 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafka-webhook-dispatcher", 3 | "version": "1.0.0", 4 | "description": "Recieve kafka messages and dispatch webhooks", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "node app/main.js", 8 | "test": "jest", 9 | "test:integration": "node test/integration/create-topic-produce-consume.js" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/simonireilly/kafka-webhook-dispatcher.git" 14 | }, 15 | "keywords": [ 16 | "kafka", 17 | "webhook", 18 | "nodejs" 19 | ], 20 | "author": "simonireilly", 21 | "license": "MIT", 22 | "bugs": { 23 | "url": "https://github.com/simonireilly/kafka-webhook-dispatcher/issues" 24 | }, 25 | "homepage": "https://github.com/simonireilly/kafka-webhook-dispatcher#readme", 26 | "engines": { 27 | "node": ">=10.0.0", 28 | "npm": ">=6.0.0" 29 | }, 30 | "dependencies": { 31 | "body-parser": "^1.19.0", 32 | "kafka-node": "^4.1.3" 33 | }, 34 | "devDependencies": { 35 | "eslint": "^6.0.1", 36 | "eslint-config-standard": "^12.0.0", 37 | "eslint-plugin-import": "^2.18.0", 38 | "eslint-plugin-jest": "^22.7.1", 39 | "eslint-plugin-node": "^9.1.0", 40 | "eslint-plugin-promise": "^4.2.1", 41 | "eslint-plugin-standard": "^4.0.0", 42 | "express": "^4.17.1", 43 | "jest": "^24.8.0" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Optional REPL history 57 | .node_repl_history 58 | 59 | # Output of 'npm pack' 60 | *.tgz 61 | 62 | # Yarn Integrity file 63 | .yarn-integrity 64 | 65 | # dotenv environment variables file 66 | .env 67 | .env.test 68 | 69 | # parcel-bundler cache (https://parceljs.org/) 70 | .cache 71 | 72 | # next.js build output 73 | .next 74 | 75 | # nuxt.js build output 76 | .nuxt 77 | 78 | # vuepress build output 79 | .vuepress/dist 80 | 81 | # Serverless directories 82 | .serverless/ 83 | 84 | # FuseBox cache 85 | .fusebox/ 86 | 87 | # DynamoDB Local files 88 | .dynamodb/ 89 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | executors: 4 | node-only: 5 | working_directory: ~/app 6 | docker: 7 | - image: circleci/node:11.15.0 8 | environment: 9 | KAFKA_HOST: localhost:9092 10 | WEBHOOK_TOPIC_NAME: webhooks 11 | WEBHOOK_TOPIC_OFFSET: 0 12 | WEBHOOK_TOPIC_PARTITION: 0 13 | WEBHOOK_TOPIC_PARTITION_COUNT: 1 14 | WEBHOOK_TOPIC_REPLICATION_FACTOR: 1 15 | WEBHOOK_CONSUMER_GROUP_ID: node-webhook-dispatcher 16 | kafka-node: 17 | working_directory: ~/app 18 | docker: 19 | - image: circleci/node:11.15.0 20 | environment: 21 | KAFKA_HOST: localhost:9092 22 | WEBHOOK_TOPIC_NAME: webhooks 23 | WEBHOOK_TOPIC_OFFSET: 0 24 | WEBHOOK_TOPIC_PARTITION: 0 25 | WEBHOOK_TOPIC_PARTITION_COUNT: 1 26 | WEBHOOK_TOPIC_REPLICATION_FACTOR: 1 27 | WEBHOOK_CONSUMER_GROUP_ID: node-webhook-dispatcher 28 | - image: wurstmeister/zookeeper 29 | - image: wurstmeister/kafka:2.11-0.11.0.3 30 | environment: 31 | KAFKA_ADVERTISED_HOST_NAME: localhost 32 | KAFKA_ADVERTISED_PORT: 9092 33 | KAFKA_PORT: 9092 34 | KAFKA_ZOOKEEPER_CONNECT: localhost:2181 35 | KAFKA_DELETE_TOPIC_ENABLE: true 36 | 37 | commands: 38 | prepare-node: 39 | description: "Install dependencies from cache for web-app" 40 | steps: 41 | - run: 42 | name: update-npm 43 | command: 'sudo npm install -g npm@latest' 44 | - restore_cache: 45 | key: dependency-cache-{{ checksum "package.json" }} 46 | - run: 47 | name: install-npm-wee 48 | command: npm install 49 | - save_cache: 50 | key: dependency-cache-{{ checksum "package.json" }} 51 | paths: 52 | - ./node_modules 53 | 54 | jobs: 55 | integration: 56 | executor: kafka-node 57 | steps: 58 | - checkout 59 | - prepare-node 60 | - run: 61 | name: test 62 | command: npm run test:integration 63 | 64 | unit: 65 | working_directory: ~/app 66 | executor: node-only 67 | steps: 68 | - checkout 69 | - prepare-node 70 | - run: 71 | name: test 72 | command: npm run test 73 | 74 | workflows: 75 | version: 2 76 | test: 77 | jobs: 78 | - unit 79 | - integration 80 | -------------------------------------------------------------------------------- /test/integration/create-topic-produce-consume.js: -------------------------------------------------------------------------------- 1 | // Libraries 2 | const kafka = require('kafka-node') 3 | 4 | // Config 5 | const config = require('../../app/config/consumer-config') 6 | 7 | // Setup - Constants 8 | const testTopicName = `test-create-topic-produce-consume-${Date.now()}` 9 | 10 | const topicsToCreate = [{ 11 | topic: testTopicName, 12 | partitions: parseInt(process.env.WEBHOOK_TOPIC_PARTITION_COUNT), 13 | replicationFactor: parseInt(process.env.WEBHOOK_TOPIC_REPLICATION_FACTOR) 14 | }] 15 | 16 | // Messages to send 17 | const payloads = [{ 18 | topic: testTopicName, 19 | messages: `Test message produced ${Date.now()}`, 20 | partition: parseInt(process.env.WEBHOOK_TOPIC_PARTITION) 21 | }] 22 | 23 | // Client setup 24 | const client = new kafka.KafkaClient(config.clientOptions) 25 | 26 | // Create the topic if it does not exist 27 | client.createTopics(topicsToCreate, (error, result) => { 28 | if(error !== null) { 29 | console.log('\nCreating topic error', error) 30 | process.exit(1) 31 | } else { 32 | console.log('\nCreating topic result', result) 33 | } 34 | }) 35 | 36 | // Consumer/Producer Setup 37 | const consumer = new kafka.Consumer(client, [{ topic: testTopicName }], config.consumerOptions) 38 | const producer = new kafka.Producer(client) 39 | 40 | // Produce a new message 41 | producer.on('ready', function () { 42 | producer.send(payloads, function (err, data) { 43 | if (err !== null) { 44 | console.log('\nProducer: Message err', { 45 | err, 46 | payloads 47 | }) 48 | } 49 | console.log('\nProducer: Message sent', { 50 | data, 51 | payloads 52 | }) 53 | }) 54 | }) 55 | 56 | producer.on('error', function (err) { 57 | console.log('\nProducer: Message error', err) 58 | process.exit(1) 59 | }) 60 | 61 | // Attach a consumer to the test topic 62 | // 63 | // Allow it to read through all messages 64 | // 65 | // We process exit within the message processing, so the offset 66 | // is not committed - this is fine as we will likely tear down 67 | // between integration tests, and can create topics for each test 68 | // 69 | try { 70 | consumer.on('message', async function (message) { 71 | console.log('\nConsumer: Message received', message) 72 | if(message.value == payloads[0].messages) { 73 | console.log('\nTests Passed') 74 | process.exit(0) 75 | } else { 76 | return true 77 | } 78 | }) 79 | 80 | consumer.on('error', function (err) { 81 | console.log('\nConsumer: Message error', err) 82 | process.exit(1) 83 | }) 84 | 85 | } catch (e) { 86 | console.log('\nConsumer: Error catch', e) 87 | process.exit(1) 88 | } 89 | -------------------------------------------------------------------------------- /kafka-webhook-dispatcher-initail-archicteccture.drawio: -------------------------------------------------------------------------------- 1 | 7Vxtc6M2EP41nmk/OIN498fETtrrOHM3yaRpP3WwkW0ajHwgJ/b9+kog3iQ55gIG7DSZSdAigdDus/toJRho4/Xut9DZrO6RC/2Bqri7gTYZqKpq2hb5RyV7JtEMLZEsQ89NZCAXPHo/IBMqTLr1XBiVKmKEfOxtysI5CgI4xyWZE4borVxtgfzyXTfOEgqCx7nji9Jnz8WrRGobSi7/HXrLVXpnoLAzayetzATRynHRW0Gk3Q60cYgQTo7WuzH06eil45K0uztwNutYCANcpcGPf25tuHm2ln88zr79WE8X9+PpUE+u8ur4W/bAA9X0yfVuNrTLeM/Gwfy+pf28wXCHh47vLYOBdk1qhPGjkztrN+Sv7wVwuGLDQU8DjfTJyM4vUICHUazj+Ky92eWXJkdL+v8ZzlYIvZDzEy/aOHi+giEpjFEQbdf0MOkfedBN2oQNcNZbNUTbwIX0wemd31Yeho8bZ07PvhFDJbIVXvukBMgh7RUzPGCT8isMsUcs4Dp5zAlGtMHC8/0x8lEY30FbGPSXyCMcohdYOHMzob/sugV58pP1lt4F7g7qEmQWQrAF0RricE+qsAaWxYyKwcoyWfktt1HTZMpdFexT1VlFh+FimV07Nx1ywKxHbknP++++/UV9eENj/wnv7300Gw01rX+m9Ce5vOtgDwUXaECCtUhs6qABaaOyAWmGfWV0bUKq3YgJvWs/B40nthrOfh7gHHqvF+l+alkPMEvGA7p3PpagA+iSOM6KKMQrtESB49/m0puylvI6U0RHPNbNvxDjPVOOs8WorDm48/BftPmVwUp/F85MduzKcWGfFgLyvIVGtJi1ooW8WVxK2/FWcki7EXZCfE2JDzkx950o8uap+M7z064fNIIIbcM5fA+kjIQ54RLi91TC3AnVw7s2FUKfuOjXMt9q3rcIrmUi4pZQtA09nO+J+3AJ2I+Cd5bY0HSWCZz5yzK2rK9bTJ2QBOVUn4TZzL1g+cDQAdRcNoULXpQWG8CuYZWwq45UAbwAWCJ4gXUq8AIgKKJF8IICdHMgy8F7YiUexZ5WEXupQ+4J9kRm+BwSVMVTOUrzHezMnAg2F0a7BViZWcW8gwdYKisCzDxZcNTPLzgWQ2MhUtYMjh9Hnn6eUU+c3j89fZlcCNBUjZvDqJaINLVNpAH1XJDWdSRLI9RRQOm9wlOWy+s9VWnClfbaMMCoV5aR9rvgar/BMPIiTDV/J7rcFVrPttE5uVue2KgisbEl3tY+mbc1hCEX0Rm4BybDorjxQT8011eOYPCgso6CQq2IiYLKDInKUlll6LA7fENegAtpolRDqcWMOFNIvAFrVVy6OHIhzeAulAyMcKHYrLLHruH2O43rn8Ltj6paOOiX2x8JPuhLMA/hGsb2e+d4/jaEcf54G0tmtG9iwuM8GbiuVJjrtsrA1R7NdZWLRGrlWKQ1DdS4KQnbzr5QYUN9fnQ4Bplq2USHOrdKztUfGvXqA1PhjDjpcbPxyDy/eHRlKGbR0ofKlaLoR4w9LhEe7ZERo+n480KA2q9kUObf81glxicqp6cU7TJmLYbds1mLKs5aJrfXEykryNajtjN4fMwrr0W5TvjylY40jnF2pRhloRpLe6VGi2cadkWmcTo9isxPdMqy2ec7KPkoJzg4wI0F6XYmjEOgy6PpT88Yh9z+FgDanTHqFz9jLCVRAkSdS2mXQTvrMul6y/E8cs+WRMUo8AAdl0gWIVoPLm5RVOdosq7L9pudbF1UuvdV6wSitXKCnPV+cC1TkmKXDlC3S5diQj3ezklETw9TeseAHt8gurX8EpiqycU+XZNBpFWuqncDkU+U95RhUV6x8bmknDhZXL5EtzIrPMLBGqNOnabwPoXRVSZNZq9Iky6Spk+UbLdGlShUq+l2fXTmWC0h1aiYgOzH1CcF53EUW12hWMoqO50Zt7FC0yRVr6rjZD7Z9uIKUEDZJ2n8O4hcA82qV1+3Rpz9nWB1JR30T5UnH/Ej3YPZhyXogWXKRRfyf6Y83yvDMXgDVOYJTahSnkno0bL8abagf8zpH067VEjPmF1GcXEt8elh+kv0a5aYuYdRRN+t7w35rvXCNb+NwDRBe9RbqgDjzJl3BwsMAjqPQlFCv6T1rMr21A46RQaTfkuBSLMPLFwEMm3uLZDs0xydrSuItOURO5iwQUJc0mXsrrlirSEHCucNbV0c81NRRemQ25fuDJvU/c+7QauiG0xng036wQMzT8BtCLF522ouSy191HNPfPXc4mSvAckrqj0LvZIXgc4rSV0vNACrQjhulSifz+cF+p2j/gCI1aog1voG4k5nV/1NUtdTc92Excdy1PynrqwjO/oto179lIk0laN+d8zPLUddL7ho/FB3Pe8A4qdNWIpa3DlzUSnqeno0OPI+kmzKbypBTYr5NzYT+OWfKtVu/wM= -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![CircleCI](https://circleci.com/gh/simonireilly/kafka-webhook-dispatcher.svg?style=svg)](https://circleci.com/gh/simonireilly/kafka-webhook-dispatcher) 2 | 3 | This is an app for connecting to kafka and sending webhooks. 4 | 5 | - [Design Choices](#Design-Choices) 6 | - [Language](#Language) 7 | - [Architecture](#Architecture) 8 | - [Webhooks Received](#Webhooks-Received) 9 | - [Webhooks URL Monitor](#Webhooks-URL-Monitor) 10 | - [Webhooks Dispatcher](#Webhooks-Dispatcher) 11 | - [Developer](#Developer) 12 | - [Getting Started](#Getting-Started) 13 | - [Testing](#Testing) 14 | - [Methodology](#Methodology) 15 | - [Unit Tests](#Unit-Tests) 16 | - [Integration Tests](#Integration-Tests) 17 | 18 | # Design Choices 19 | 20 | ## Language 21 | 22 | - Language should be mature and have a mature kafka client 23 | 24 | Kafka Benchmarking: 25 | 26 | | Client Type | Throughput | 27 | |-------------|------------------| 28 | | Java | 40,000 - 50,0000 | 29 | | Go | 28,000 - 30,0000 | 30 | | Node | 6,000 - 8,000 | 31 | | Kafka-pixy | 700 - 800 | 32 | | Logstash | 250 | 33 | 34 | - The Language should be performant 35 | - Language should support concurrency 36 | 37 | **Language Go or Node are preferable.** 38 | 39 | # Architecture 40 | 41 | Document Architecture decisions using [ADR TOOLS](https://github.com/npryce/adr-tools) 42 | **If you cannot install the tools then use the provided template** 43 | 44 | ## Webhooks Received 45 | 46 | - single receiving topic enter the persist-and-validate consumer: 47 | - This consumer persists all webhooks and validates them afterward. 48 | - Then if it is invalid it will not send it on to the next consumer. 49 | - Valid uuid's are passed onto the next topic 50 | 51 | ## Webhooks URL Monitor 52 | 53 | - Unique URL's should be persisted and monitored 54 | - When a message arrived with a new URL this URL is added to the ENDPOINTS table 55 | - This URL is monitored periodically for availability 56 | 57 | ## Webhooks Dispatcher 58 | 59 | - single delivering topic from the webhooks received consumer will be dispatched here: 60 | - This consumer will poll the URL provided for the webhooks and send the webhooks on to the next stage 61 | 62 | # Developer 63 | 64 | ## Getting Started 65 | 66 | ```bash 67 | git clone git@github.com:simonireilly/kafka-webhook-dispatcher.git 68 | 69 | # Build app, compose up and tail logs 70 | make build up tail 71 | 72 | # Stop the app 73 | make stop 74 | ``` 75 | 76 | ## Testing 77 | 78 | ### Methodology 79 | 80 | #### Unit Tests 81 | 82 | 1. Create exported functions to be passed to a consumer and create unit tests for these functions. 83 | 84 | #### Integration Tests 85 | 86 | 1. Spin up the app with a consumer and producer and connect to a kafka cluster 87 | 2. Receive messages and process them into the database 88 | 3. Retrieve successfully stored messages and validate the contents 89 | 1. Is valid url for urls in collection 90 | 2. Is a valid json body (can be json parsed - all we can do really) 91 | 4. Post successfully validated webhooks concurrently for all urls provided 92 | 5. Create a container to receive webhooks and test response 93 | 1. Status - 200 94 | 2. Status - 404 95 | 3. Status - 401 96 | 4. Status - 301 97 | 5. Status - 500 98 | --------------------------------------------------------------------------------