├── .gitignore ├── .jshintrc ├── .travis.yml ├── LICENSE ├── README.md ├── bin ├── nkc-sequelize-sink.js └── nkc-sequelize-source.js ├── config ├── default.js └── loadConfig.js ├── index.js ├── kafka-setup ├── docker-compose.yml ├── start.sh └── stop.sh ├── lib ├── SequelizeSinkConfig.js ├── SequelizeSourceConfig.js ├── sink │ ├── SequelizeSinkConnector.js │ └── SequelizeSinkTask.js ├── source │ ├── SequelizeSourceConnector.js │ └── SequelizeSourceTask.js └── utils │ ├── ConverterFactory.js │ └── JsonConverter.js ├── package.json ├── test ├── README.md ├── int │ └── Connector.test.js ├── sink-config.js └── source-config.js └── yarn.lock /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (http://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # Typescript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | .idea/ 61 | .vscode/ 62 | logs/ -------------------------------------------------------------------------------- /.jshintrc: -------------------------------------------------------------------------------- 1 | { 2 | "globals": { 3 | "LOG": true 4 | }, 5 | "esversion": 6, 6 | "node": true 7 | } -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - "12" 4 | env: 5 | global: 6 | - ZOOKEEPER_PEERS=localhost:2181 7 | - KAFKA_PEERS=localhost:9092 8 | - KST_TOPIC=sc_test_topic 9 | - CXX=g++-4.8 10 | 11 | before_install: 12 | - wget http://archive.apache.org/dist/kafka/1.1.0/kafka_2.12-1.1.0.tgz -O kafka.tgz 13 | - mkdir -p kafka && tar xzf kafka.tgz -C kafka --strip-components 1 14 | - nohup bash -c "cd kafka && bin/zookeeper-server-start.sh config/zookeeper.properties &" 15 | - nohup bash -c "cd kafka && bin/kafka-server-start.sh config/server.properties &" 16 | - sleep 5 17 | - kafka/bin/kafka-topics.sh --create --partitions 1 --replication-factor 1 --topic sc_test_topic --zookeeper localhost:2181 18 | - kafka/bin/kafka-topics.sh --create --partitions 1 --replication-factor 1 --topic sc_test_topic_2 --zookeeper localhost:2181 19 | - sleep 2 20 | 21 | script: 22 | - rm -rf node_modules 23 | - yarn 24 | - yarn add --frozen-lockfile node-rdkafka@2.7.4 25 | - yarn test-ci 26 | 27 | addons: 28 | apt: 29 | sources: 30 | - ubuntu-toolchain-r-test 31 | packages: 32 | - g++-4.8 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 nodefluent 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # sequelize-kafka-connect 2 | 3 | Node.js Kafka Connect connector for MySQL, Postgres, SQLite and MSSQL databases 4 | 5 | [![Build Status](https://travis-ci.org/nodefluent/sequelize-kafka-connect.svg?branch=master)](https://travis-ci.org/nodefluent/sequelize-kafka-connect) 6 | 7 | [![Coverage Status](https://coveralls.io/repos/github/nodefluent/sequelize-kafka-connect/badge.svg?branch=master)](https://coveralls.io/github/nodefluent/sequelize-kafka-connect?branch=master) 8 | 9 | ## Use API 10 | 11 | ``` 12 | npm install --save sequelize-kafka-connect 13 | ``` 14 | 15 | ## A note on native mode 16 | 17 | If you are using the native mode (`config: { noptions: {} }`). 18 | You will have to manually install `node-rdkafka` alongside kafka-connect. 19 | (This requires a Node.js version between 9 and 12 and will not work with Node.js >= 13, last tested with 12.16.1) 20 | 21 | On Mac OS High Sierra / Mojave: 22 | `CPPFLAGS=-I/usr/local/opt/openssl/include LDFLAGS=-L/usr/local/opt/openssl/lib yarn add --frozen-lockfile node-rdkafka@2.7.4` 23 | 24 | Otherwise: 25 | `yarn add --frozen-lockfile node-rdkafka@2.7.4` 26 | 27 | (Please also note: Doing this with npm does not work, it will remove your deps, `npm i -g yarn`) 28 | 29 | ### database -> kafka 30 | 31 | ```es6 32 | const { runSourceConnector } = require("sequelize-kafka-connect"); 33 | runSourceConnector(config, [], onError).then(config => { 34 | //runs forever until: config.stop(); 35 | }); 36 | ``` 37 | 38 | ### kafka -> database 39 | 40 | ```es6 41 | const { runSinkConnector } = require("sequelize-kafka-connect"); 42 | runSinkConnector(config, [], onError).then(config => { 43 | //runs forever until: config.stop(); 44 | }); 45 | ``` 46 | 47 | ### kafka -> database (with custom topic (no source-task topic)) 48 | 49 | ```es6 50 | const { runSinkConnector, ConverterFactory } = require("sequelize-kafka-connect"); 51 | 52 | const tableSchema = { 53 | "id": { 54 | "type": "integer", 55 | "allowNull": false, 56 | "primaryKey": true 57 | }, 58 | "name": { 59 | "type": "varchar(255)", 60 | "allowNull": true 61 | } 62 | }; 63 | 64 | const etlFunc = (messageValue, callback) => { 65 | 66 | //type is an example json format field 67 | if (messageValue.type === "publish") { 68 | return callback(null, { 69 | id: messageValue.payload.id, 70 | name: messageValue.payload.name 71 | }); 72 | } 73 | 74 | if (messageValue.type === "unpublish") { 75 | return callback(null, null); //null value will cause deletion 76 | } 77 | 78 | callback(new Error("unknown messageValue.type")); 79 | }; 80 | 81 | const converter = ConverterFactory.createSinkSchemaConverter(tableSchema, etlFunc); 82 | 83 | runSinkConnector(config, [converter], onError).then(config => { 84 | //runs forever until: config.stop(); 85 | }); 86 | 87 | /* 88 | this example would be able to store kafka message values 89 | that look like this (so completely unrelated to messages created by a default SourceTask) 90 | { 91 | payload: { 92 | id: 123, 93 | name: "bla" 94 | }, 95 | type: "publish" 96 | } 97 | */ 98 | ``` 99 | 100 | ## Use CLI 101 | note: in BETA :seedling: 102 | 103 | ``` 104 | npm install -g sequelize-kafka-connect 105 | ``` 106 | 107 | ``` 108 | # run source etl: database -> kafka 109 | nkc-sequelize-source --help 110 | ``` 111 | 112 | ``` 113 | # run sink etl: kafka -> database 114 | nkc-sequelize-sink --help 115 | ``` 116 | 117 | ## Config(uration) 118 | ```es6 119 | const config = { 120 | kafka: { 121 | //zkConStr: "localhost:2181/", 122 | kafkaHost: "localhost:9092", 123 | logger: null, 124 | groupId: "kc-sequelize-test", 125 | clientName: "kc-sequelize-test-name", 126 | workerPerPartition: 1, 127 | options: { 128 | sessionTimeout: 8000, 129 | protocol: ["roundrobin"], 130 | fromOffset: "earliest", //latest 131 | fetchMaxBytes: 1024 * 100, 132 | fetchMinBytes: 1, 133 | fetchMaxWaitMs: 10, 134 | heartbeatInterval: 250, 135 | retryMinTimeout: 250, 136 | requireAcks: 1, 137 | //ackTimeoutMs: 100, 138 | //partitionerType: 3 139 | } 140 | }, 141 | topic: "sc_test_topic", 142 | partitions: 1, 143 | maxTasks: 1, 144 | pollInterval: 2000, 145 | produceKeyed: true, 146 | produceCompressionType: 0, 147 | connector: { 148 | options: { 149 | host: "localhost", 150 | port: 5432, 151 | dialect: "sqlite", 152 | pool: { 153 | max: 5, 154 | min: 0, 155 | idle: 10000 156 | }, 157 | storage: path.join(__dirname, "test-db.sqlite") 158 | }, 159 | database: null, 160 | user: null, 161 | password: null, 162 | maxPollCount: 50, 163 | table: "accounts", 164 | incrementingColumnName: "id" 165 | }, 166 | http: { 167 | port: 3149, 168 | middlewares: [] 169 | }, 170 | enableMetrics: true, 171 | batch: { 172 | batchSize: 100, 173 | commitEveryNBatch: 1, 174 | concurrency: 1, 175 | commitSync: true 176 | } 177 | }; 178 | ``` 179 | 180 | ## Native Clients Config(uration) 181 | ```es6 182 | const config = { 183 | 184 | kafka: { 185 | noptions: { 186 | "metadata.broker.list": "localhost:9092", 187 | "group.id": "n-test-group", 188 | "enable.auto.commit": false, 189 | "debug": "all", 190 | "event_cb": true, 191 | "client.id": "kcs-test" 192 | }, 193 | tconf: { 194 | "auto.offset.reset": "earliest", 195 | "request.required.acks": 1 196 | } 197 | }, 198 | 199 | topic: "sc_test_topic", 200 | partitions: 1, 201 | maxTasks: 1, 202 | pollInterval: 2000, 203 | produceKeyed: true, 204 | produceCompressionType: 0, 205 | connector: { 206 | options: { 207 | host: "localhost", 208 | port: 5432, 209 | dialect: "sqlite", 210 | pool: { 211 | max: 5, 212 | min: 0, 213 | idle: 10000 214 | }, 215 | storage: path.join(__dirname, "test-db.sqlite") 216 | }, 217 | database: null, 218 | user: null, 219 | password: null, 220 | maxPollCount: 50, 221 | table: "accounts", 222 | incrementingColumnName: "id" 223 | }, 224 | http: { 225 | port: 3149, 226 | middlewares: [] 227 | }, 228 | enableMetrics: true 229 | }; 230 | ``` -------------------------------------------------------------------------------- /bin/nkc-sequelize-sink.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const program = require("commander"); 4 | const path = require("path"); 5 | const { runSinkConnector } = require("./../index.js"); 6 | const pjson = require("./../package.json"); 7 | const loadConfig = require("./../config/loadConfig.js"); 8 | 9 | program 10 | .version(pjson.version) 11 | .option("-c, --config [string]", "Path to Config (optional)") 12 | .option("-k, --kafka [string]", "Zookeeper Connection String") 13 | .option("-g, --group [string]", "Kafka ConsumerGroup Id") 14 | .option("-t, --topic [string]", "Kafka Topic to read from") 15 | .option("-h, --db_host [string]", "Database Host") 16 | .option("-l, --dialect [string]", "Database dialect (mysql, sqlite, mssql, postgres)") 17 | .option("-b, --db_db [string]", "Database Database") 18 | .option("-p, --db_port [string]", "Database Port") 19 | .option("-u, --db_user [string]", "Database User") 20 | .option("-p, --db_pass [string]", "Database Password") 21 | .option("-d, --datastore [string]", "Target table name") 22 | .option("-q, --sqlite_storage [string]", "SQlite3 DB Storage path (absolute)") 23 | .parse(process.argv); 24 | 25 | const config = loadConfig(program.config); 26 | 27 | if (program.kafka) { 28 | config.kafka.zkConStr = program.kafka; 29 | } 30 | 31 | if (program.name) { 32 | config.kafka.clientName = program.name; 33 | } 34 | 35 | if (program.topic) { 36 | config.topic = program.topic; 37 | } 38 | 39 | if (program.db_host) { 40 | config.connector.options.host = program.db_host; 41 | } 42 | 43 | if (program.db_db) { 44 | config.connector.database = program.db_db; 45 | } 46 | 47 | if (program.db_port) { 48 | config.connector.options.port = program.db_port; 49 | } 50 | 51 | if (program.db_user) { 52 | config.connector.user = program.db_user; 53 | } 54 | 55 | if (program.db_pass) { 56 | config.connector.password = program.db_pass; 57 | } 58 | 59 | if (program.datastore) { 60 | config.connector.table = program.datastore; 61 | } 62 | 63 | if (program.sqlite_storage) { 64 | config.connector.options.storage = program.sqlite_storage; 65 | } 66 | 67 | if (program.dialect) { 68 | config.connector.options.dialect = program.dialect; 69 | } 70 | 71 | runSinkConnector(config, [], console.log.bind(console)).then(sink => { 72 | 73 | const exit = (isExit = false) => { 74 | sink.stop(); 75 | if (!isExit) { 76 | process.exit(); 77 | } 78 | }; 79 | 80 | process.on("SIGINT", () => { 81 | exit(false); 82 | }); 83 | 84 | process.on("exit", () => { 85 | exit(true); 86 | }); 87 | }); -------------------------------------------------------------------------------- /bin/nkc-sequelize-source.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const program = require("commander"); 4 | const path = require("path"); 5 | const { runSourceConnector } = require("./../index.js"); 6 | const pjson = require("./../package.json"); 7 | const loadConfig = require("./../config/loadConfig.js"); 8 | 9 | program 10 | .version(pjson.version) 11 | .option("-c, --config [string]", "Path to Config (alternatively)") 12 | .option("-k, --kafka [string]", "Zookeeper Connection String") 13 | .option("-n, --name [string]", "Kafka Client Name") 14 | .option("-t, --topic [string]", "Kafka Topic to Produce to") 15 | .option("-a, --partitions [integer]", "Amount of Kafka Topic Partitions") 16 | .option("-h, --db_host [string]", "Database Host") 17 | .option("-l, --dialect [string]", "Database dialect (mysql, sqlite, mssql, postgres)") 18 | .option("-b, --db_db [string]", "Database Database") 19 | .option("-p, --db_port [string]", "Database Port") 20 | .option("-u, --db_user [string]", "Database User") 21 | .option("-p, --db_pass [string]", "Database Password") 22 | .option("-d, --datastore [string]", "Tablename of the Source") 23 | .option("-f, --ifield [string]", "Table Identifier Fieldname") 24 | .option("-v, --interval [integer]", "Table poll interval (ms)") 25 | .option("-o, --max_pollcount [integer]", "Max row count per poll action") 26 | .option("-q, --sqlite_storage [string]", "SQlite3 DB Storage path (absolute)") 27 | .parse(process.argv); 28 | 29 | const config = loadConfig(program.config); 30 | 31 | if (program.kafka) { 32 | config.kafka.zkConStr = program.kafka; 33 | } 34 | 35 | if (program.name) { 36 | config.kafka.clientName = program.name; 37 | } 38 | 39 | if (program.topic) { 40 | config.topic = program.topic; 41 | } 42 | 43 | if (program.partitions) { 44 | config.partitions = program.partitions; 45 | } 46 | 47 | if (program.db_host) { 48 | config.connector.options.host = program.db_host; 49 | } 50 | 51 | if (program.db_db) { 52 | config.connector.database = program.db_db; 53 | } 54 | 55 | if (program.db_port) { 56 | config.connector.options.port = program.db_port; 57 | } 58 | 59 | if (program.db_user) { 60 | config.connector.user = program.db_user; 61 | } 62 | 63 | if (program.db_pass) { 64 | config.connector.password = program.db_pass; 65 | } 66 | 67 | if (program.datastore) { 68 | config.connector.table = program.datastore; 69 | } 70 | 71 | if (program.ifield) { 72 | config.connector.incrementingColumnName = program.ifield; 73 | } 74 | 75 | if (program.interval) { 76 | config.pollInterval = program.interval; 77 | } 78 | 79 | if (program.max_pollcount) { 80 | config.connector.maxPollCount = program.max_pollcount; 81 | } 82 | 83 | if (program.sqlite_storage) { 84 | config.connector.options.storage = program.sqlite_storage; 85 | } 86 | 87 | if (program.dialect) { 88 | config.connector.options.dialect = program.dialect; 89 | } 90 | 91 | runSourceConnector(config, [], console.log.bind(console)).then(sink => { 92 | 93 | const exit = (isExit = false) => { 94 | sink.stop(); 95 | if (!isExit) { 96 | process.exit(); 97 | } 98 | }; 99 | 100 | process.on("SIGINT", () => { 101 | exit(false); 102 | }); 103 | 104 | process.on("exit", () => { 105 | exit(true); 106 | }); 107 | }); -------------------------------------------------------------------------------- /config/default.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const path = require("path"); 4 | 5 | const config = { 6 | kafka: { 7 | //zkConStr: "localhost:2181/", 8 | kafkaHost: "localhost:9092", 9 | logger: null, 10 | groupId: "kc-sequelize-group", 11 | clientName: "kc-sequelize-client", 12 | workerPerPartition: 1, 13 | options: { 14 | sessionTimeout: 8000, 15 | protocol: ["roundrobin"], 16 | fromOffset: "earliest", //latest 17 | fetchMaxBytes: 1024 * 100, 18 | fetchMinBytes: 1, 19 | fetchMaxWaitMs: 10, 20 | heartbeatInterval: 250, 21 | retryMinTimeout: 250, 22 | autoCommit: true, 23 | autoCommitIntervalMs: 1000, 24 | requireAcks: 1, 25 | //ackTimeoutMs: 100, 26 | //partitionerType: 3 27 | } 28 | }, 29 | topic: "sc_table_topic", 30 | partitions: 1, 31 | maxTasks: 1, 32 | pollInterval: 250, 33 | produceKeyed: true, 34 | produceCompressionType: 0, 35 | connector: { 36 | options: { 37 | host: "localhost", 38 | port: 5432, 39 | dialect: "sqlite", 40 | pool: { 41 | max: 5, 42 | min: 0, 43 | idle: 10000 44 | }, 45 | logging: () => {}, 46 | storage: path.join(__dirname, "db.sqlite") 47 | }, 48 | database: null, 49 | user: null, 50 | password: null, 51 | maxPollCount: 50, 52 | table: "my_table", 53 | incrementingColumnName: "id" 54 | }, 55 | http: { 56 | port: 3149, 57 | middlewares: [] 58 | }, 59 | enableMetrics: true, 60 | batch: { 61 | batchSize: 100, 62 | commitEveryNBatch: 1, 63 | concurrency: 1, 64 | commitSync: true 65 | } 66 | }; 67 | 68 | module.exports = config; -------------------------------------------------------------------------------- /config/loadConfig.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const path = require("path"); 4 | const defaultConfig = require("./default.js"); 5 | 6 | function isObject(item) { 7 | return (item && typeof item === "object" && !Array.isArray(item) && item !== null); 8 | } 9 | 10 | function mergeDeep(target, source) { 11 | let output = Object.assign({}, target); 12 | if (isObject(target) && isObject(source)) { 13 | Object.keys(source).forEach(key => { 14 | if (isObject(source[key])) { 15 | if (!(key in target)) 16 | Object.assign(output, { 17 | [key]: source[key] 18 | }); 19 | else 20 | output[key] = mergeDeep(target[key], source[key]); 21 | } else { 22 | Object.assign(output, { 23 | [key]: source[key] 24 | }); 25 | } 26 | }); 27 | } 28 | return output; 29 | } 30 | 31 | function loadConfig(uri = null, configPath = "./../config/default.js") { 32 | 33 | if (uri) { 34 | if (path.isAbsolute(uri)) { 35 | configPath = uri; 36 | } else { 37 | configPath = path.join(path.dirname(process.argv[1]), uri); 38 | } 39 | } 40 | 41 | let config = require(configPath); 42 | 43 | if (!config || typeof config !== "object") { 44 | return console.log("Failed to load config @ " + configPath); 45 | } 46 | 47 | return mergeDeep(defaultConfig, config); 48 | } 49 | 50 | module.exports = loadConfig; -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const SequelizeSourceConfig = require("./lib/SequelizeSourceConfig.js"); 4 | const SequelizeSinkConfig = require("./lib/SequelizeSinkConfig.js"); 5 | 6 | const SequelizeSourceConnector = require("./lib/source/SequelizeSourceConnector.js"); 7 | const SequelizeSinkConnector = require("./lib/sink/SequelizeSinkConnector.js"); 8 | 9 | const SequelizeSourceTask = require("./lib/source/SequelizeSourceTask.js"); 10 | const SequelizeSinkTask = require("./lib/sink/SequelizeSinkTask.js"); 11 | 12 | const JsonConverter = require("./lib/utils/JsonConverter.js"); 13 | const ConverterFactory = require("./lib/utils/ConverterFactory.js"); 14 | 15 | const runSourceConnector = (properties, converters = [], onError = null) => { 16 | 17 | const config = new SequelizeSourceConfig(properties, 18 | SequelizeSourceConnector, 19 | SequelizeSourceTask, [JsonConverter].concat(converters)); 20 | 21 | if (onError) { 22 | config.on("error", onError); 23 | } 24 | 25 | return config.run().then(() => { 26 | return config; 27 | }); 28 | }; 29 | 30 | const runSinkConnector = (properties, converters = [], onError = null) => { 31 | 32 | const config = new SequelizeSinkConfig(properties, 33 | SequelizeSinkConnector, 34 | SequelizeSinkTask, [JsonConverter].concat(converters)); 35 | 36 | if (onError) { 37 | config.on("error", onError); 38 | } 39 | 40 | return config.run().then(() => { 41 | return config; 42 | }); 43 | }; 44 | 45 | module.exports = { 46 | runSourceConnector, 47 | runSinkConnector, 48 | ConverterFactory 49 | }; -------------------------------------------------------------------------------- /kafka-setup/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | zookeeper: 4 | image: jplock/zookeeper:3.4.6 5 | ports: 6 | - 2181:2181 7 | kafka: 8 | image: ches/kafka:0.10.2.0 9 | ports: 10 | - 9092:9092 11 | links: 12 | - zookeeper:zookeeper 13 | environment: 14 | KAFKA_ADVERTISED_HOST_NAME: 127.0.0.1 15 | ZOOKEEPER_IP: zookeeper 16 | volumes: 17 | - /tmp/kafka-data/data:/data 18 | - /tmp/kafka-data/logs:/logs -------------------------------------------------------------------------------- /kafka-setup/start.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | sudo rm -rf /tmp/kafka-data 3 | sudo mkdir /tmp/kafka-data 4 | sudo mkdir /tmp/kafka-data/data 5 | sudo mkdir /tmp/kafka-data/logs 6 | sudo chmod -R 777 /tmp/kafka-data 7 | docker-compose rm 8 | docker-compose up -d -------------------------------------------------------------------------------- /kafka-setup/stop.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | docker-compose stop -------------------------------------------------------------------------------- /lib/SequelizeSinkConfig.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const {SinkConfig} = require("kafka-connect"); 4 | 5 | class SequelizeSinkConfig extends SinkConfig { 6 | 7 | constructor(...args){ super(...args); } 8 | 9 | run(){ 10 | return super.run(); 11 | } 12 | } 13 | 14 | module.exports = SequelizeSinkConfig; -------------------------------------------------------------------------------- /lib/SequelizeSourceConfig.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const {SourceConfig} = require("kafka-connect"); 4 | 5 | class SequelizeSourceConfig extends SourceConfig { 6 | 7 | constructor(...args){ super(...args); } 8 | 9 | run(){ 10 | return super.run(); 11 | } 12 | } 13 | 14 | module.exports = SequelizeSourceConfig; -------------------------------------------------------------------------------- /lib/sink/SequelizeSinkConnector.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { SinkConnector } = require("kafka-connect"); 4 | const Sequelize = require("sequelize"); 5 | 6 | class SequelizeSinkConnector extends SinkConnector { 7 | 8 | start(properties, callback) { 9 | 10 | this.properties = properties; 11 | 12 | this.sequelize = new Sequelize(properties.database, 13 | properties.user, properties.password, 14 | properties.options); 15 | 16 | this.sequelize.authenticate().then(() => { 17 | callback(null); 18 | }).catch(error => { 19 | callback(error); 20 | }); 21 | } 22 | 23 | taskConfigs(maxTasks, callback) { 24 | 25 | const taskConfig = { 26 | maxTasks, 27 | sequelize: this.sequelize, 28 | table: this.properties.table, 29 | incrementingColumnName: this.properties.incrementingColumnName 30 | }; 31 | 32 | callback(null, taskConfig); 33 | } 34 | 35 | stop() { 36 | this.sequelize.close(); 37 | } 38 | } 39 | 40 | module.exports = SequelizeSinkConnector; -------------------------------------------------------------------------------- /lib/sink/SequelizeSinkTask.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { SinkTask } = require("kafka-connect"); 4 | 5 | class SequelizeSinkTask extends SinkTask { 6 | 7 | start(properties, callback, parentConfig) { 8 | 9 | this.parentConfig = parentConfig; 10 | this.properties = properties; 11 | const { 12 | sequelize, 13 | maxTasks, 14 | table, 15 | incrementingColumnName 16 | } = this.properties; 17 | 18 | this.sequelize = sequelize; 19 | this.maxTasks = maxTasks; 20 | this.table = table; 21 | this.incrementingColumnName = incrementingColumnName || "id"; 22 | 23 | this.initialisedTable = false; 24 | this.Model = null; 25 | 26 | callback(null); 27 | } 28 | 29 | beforeFirstPut(record) { 30 | 31 | //schema must be cloned, otherwise the reference would cause serious anomalies 32 | const clonedSchema = JSON.parse(JSON.stringify(record.valueSchema)); 33 | 34 | this.Model = this.sequelize.define( 35 | this.table, 36 | clonedSchema, 37 | Object.assign({}, { 38 | timestamps: false, 39 | freezeTableName: true, 40 | tableName: this.table 41 | }, record.schemaAttributes) 42 | ); 43 | 44 | console.log(this.table, clonedSchema); 45 | return this.sequelize.sync(); 46 | } 47 | 48 | putRecords(records) { 49 | return Promise.all(records.map(record => { 50 | 51 | if (record.value !== null && record.value !== "null") { 52 | this.parentConfig.emit("model-upsert", record.key.toString()); 53 | return this.Model.upsert(record.value); 54 | } 55 | 56 | //if record.value is null, we will use the key to delete the field 57 | this.parentConfig.emit("model-delete", record.key.toString()); 58 | return this.Model.destroy({ 59 | where: { 60 | [this.incrementingColumnName]: record.key.toString() 61 | } 62 | }); 63 | })); 64 | } 65 | 66 | put(records, callback) { 67 | 68 | if (!this.initialisedTable) { 69 | return this.beforeFirstPut(records[0]).then(_ => { 70 | this.initialisedTable = true; 71 | return this.putRecords(records); 72 | }).then(() => { 73 | callback(null); 74 | }).catch(error => { 75 | callback(error); 76 | }); 77 | } 78 | 79 | this.putRecords(records).then(() => { 80 | callback(null); 81 | }).catch(error => { 82 | callback(error); 83 | }); 84 | } 85 | 86 | stop() { 87 | //empty (con is closed by connector) 88 | } 89 | } 90 | 91 | module.exports = SequelizeSinkTask; -------------------------------------------------------------------------------- /lib/source/SequelizeSourceConnector.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { SourceConnector } = require("kafka-connect"); 4 | const Sequelize = require("sequelize"); 5 | const SequelizeAuto = require("sequelize-auto"); 6 | 7 | class SequelizeSourceConnector extends SourceConnector { 8 | 9 | start(properties, callback) { 10 | 11 | this.properties = properties; 12 | 13 | this.sequelize = new Sequelize(properties.database, 14 | properties.user, properties.password, 15 | properties.options); 16 | 17 | this.sequelizeAuto = new SequelizeAuto(properties.database, 18 | properties.user, properties.password, Object.assign({}, 19 | properties.options, { 20 | additional: { 21 | timestamps: false 22 | }, 23 | tables: [properties.table], 24 | directory: false //do not write models to fs 25 | })); 26 | 27 | this.sequelize.authenticate().then(() => { 28 | 29 | this.sequelizeAuto.run(error => { 30 | 31 | if (error) { 32 | return callback(error); 33 | } 34 | 35 | if (!this.sequelizeAuto.tables || 36 | !this.sequelizeAuto.tables[this.properties.table]) { 37 | return callback(new Error("Failed to load table schema, its empty.")); 38 | } 39 | 40 | this.tableSchema = this.sequelizeAuto.tables[this.properties.table]; 41 | callback(null); 42 | }); 43 | 44 | }).catch(error => { 45 | callback(error); 46 | }); 47 | } 48 | 49 | taskConfigs(maxTasks, callback) { 50 | 51 | const taskConfig = { 52 | maxTasks, 53 | sequelize: this.sequelize, 54 | table: this.properties.table, 55 | maxPollCount: this.properties.maxPollCount, 56 | incrementingColumnName: this.properties.incrementingColumnName, 57 | tableSchema: this.tableSchema 58 | }; 59 | 60 | callback(null, taskConfig); 61 | } 62 | 63 | stop() { 64 | this.sequelize.close(); 65 | //sequelizeAuto closes itself after .run() finishes 66 | } 67 | } 68 | 69 | module.exports = SequelizeSourceConnector; -------------------------------------------------------------------------------- /lib/source/SequelizeSourceTask.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { SourceTask, SourceRecord } = require("kafka-connect"); 4 | const SqlString = require("sequelize/lib/sql-string"); 5 | 6 | class SequelizeSourceTask extends SourceTask { 7 | 8 | start(properties, callback, parentConfig) { 9 | 10 | this.parentConfig = parentConfig; 11 | 12 | this.properties = properties; 13 | const { 14 | sequelize, 15 | maxTasks, 16 | table, 17 | maxPollCount, 18 | incrementingColumnName, 19 | tableSchema 20 | } = this.properties; 21 | 22 | this.sequelize = sequelize; 23 | this.maxTasks = maxTasks; 24 | this.table = table; 25 | this.maxPollCount = maxPollCount; 26 | this.incrementingColumnName = incrementingColumnName; 27 | this.tableSchema = tableSchema; 28 | 29 | this.currentOffset = 0; //TODO fetch from kafka 30 | 31 | callback(null); 32 | } 33 | 34 | poll(callback) { 35 | 36 | const idColumn = this.incrementingColumnName || "id"; 37 | const query = [ 38 | "SELECT", 39 | "*", 40 | "FROM", 41 | SqlString.escape(this.table), 42 | "ORDER BY", 43 | SqlString.escape(idColumn), 44 | "LIMIT", 45 | ":limit", 46 | "OFFSET", 47 | ":offset" 48 | ]; 49 | 50 | this.sequelize.query( 51 | query.join(" "), { 52 | type: this.sequelize.QueryTypes.SELECT, 53 | replacements: { 54 | limit: this.maxPollCount, 55 | offset: this.currentOffset, 56 | } 57 | } 58 | ).then(results => { 59 | 60 | this.currentOffset += results.length; 61 | 62 | const records = results.map(result => { 63 | 64 | const record = new SourceRecord(); 65 | 66 | record.key = result[idColumn]; 67 | record.keySchema = null; 68 | 69 | if (!record.key) { 70 | throw new Error("db results are missing incrementing column name or default 'id' field."); 71 | } 72 | 73 | record.value = result; 74 | record.valueSchema = this.tableSchema; 75 | 76 | record.timestamp = new Date().toISOString(); 77 | record.partition = -1; 78 | record.topic = this.table; 79 | 80 | this.parentConfig.emit("record-read", record.key.toString()); 81 | return record; 82 | }); 83 | 84 | callback(null, records); 85 | }).catch(error => { 86 | callback(error); 87 | }); 88 | } 89 | 90 | stop() { 91 | //empty (con is closed by connector) 92 | } 93 | } 94 | 95 | module.exports = SequelizeSourceTask; -------------------------------------------------------------------------------- /lib/utils/ConverterFactory.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { ConverterFactory } = require("kafka-connect"); 4 | module.exports = ConverterFactory; -------------------------------------------------------------------------------- /lib/utils/JsonConverter.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const { JsonConverter } = require("kafka-connect"); 4 | module.exports = JsonConverter; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sequelize-kafka-connect", 3 | "version": "4.0.0", 4 | "description": "kafka-connect connector for mysql,postgres and sqlite", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "istanbul cover _mocha -- --recursive --exit --timeout 12500 -R spec test/int && istanbul check-coverage --statements 80", 8 | "test-ci": "istanbul cover _mocha --report lcovonly -- --exit --timeout 12500 -R spec test/int && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js && rm -rf ./coverage", 9 | "yarn:openssl": "LDFLAGS='-L/usr/local/opt/openssl/lib' CPPFLAGS='-I/usr/local/opt/openssl/include' yarn" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/nodefluent/sequelize-kafka-connect.git" 14 | }, 15 | "keywords": [ 16 | "kafka-connect", 17 | "connector", 18 | "mysql", 19 | "postgres", 20 | "sqlite", 21 | "sequelize", 22 | "mssql", 23 | "kafka", 24 | "connect" 25 | ], 26 | "author": "Chris Froehlingsdorf ", 27 | "license": "MIT", 28 | "bugs": { 29 | "url": "https://github.com/nodefluent/sequelize-kafka-connect/issues" 30 | }, 31 | "homepage": "https://github.com/nodefluent/sequelize-kafka-connect#readme", 32 | "dependencies": { 33 | "commander": "~4.1.1", 34 | "kafka-connect": "~4.0.0", 35 | "mysql2": "~2.1.0", 36 | "pg": "~7.18.2", 37 | "pg-hstore": "~2.3.3", 38 | "sequelize": "~5.21.5", 39 | "sequelize-auto": "~0.4.29", 40 | "sqlite3": "~4.1.1", 41 | "tedious": "~8.0.1" 42 | }, 43 | "devDependencies": { 44 | "coveralls": "~3.0.9", 45 | "istanbul": "~0.4.5", 46 | "log4bro": "~3.14.0", 47 | "mocha": "~7.0.1", 48 | "mocha-lcov-reporter": "~1.3.0", 49 | "uuid": "~3.4.0" 50 | }, 51 | "preferGlobal": true, 52 | "bin": { 53 | "nkc-sequelize-source": "bin/nkc-sequelize-source.js", 54 | "nkc-sequelize-sink": "bin/nkc-sequelize-sink.js" 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /test/README.md: -------------------------------------------------------------------------------- 1 | # Test Readme 2 | 3 | * `sqlite3 test-db.sqlite` 4 | * `create table accounts (id integer primary key, name varchar(255));` 5 | * `insert into accounts (id, name) values (1, "peter");` 6 | * `insert into accounts (id, name) values (2, "bob");` -------------------------------------------------------------------------------- /test/int/Connector.test.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const assert = require("assert"); 4 | const Sequelize = require("sequelize"); 5 | const { SourceRecord } = require("kafka-connect"); 6 | const uuid = require("uuid"); 7 | const { NProducer } = require("sinek"); 8 | 9 | const { runSourceConnector, runSinkConnector, ConverterFactory } = require("./../../index.js"); 10 | const sinkProperties = require("./../sink-config.js"); 11 | const sourceProperties = require("./../source-config.js"); 12 | 13 | describe("Connector INT", function() { 14 | 15 | describe("Source", function() { 16 | 17 | let config = null; 18 | let error = null; 19 | 20 | it("should be able to run sequelize source config", function() { 21 | const onError = _error => { 22 | error = _error; 23 | }; 24 | return runSourceConnector(sourceProperties, [], onError).then(_config => { 25 | config = _config; 26 | config.on("record-read", id => console.log("read: " + id)); 27 | return true; 28 | }); 29 | }); 30 | 31 | it("should be able to await a few pollings", function(done) { 32 | setTimeout(() => { 33 | assert.ifError(error); 34 | done(); 35 | }, 4500); 36 | }); 37 | 38 | it("should be able to fake a delete action", function() { 39 | 40 | const record = new SourceRecord(); 41 | record.key = "1"; 42 | record.value = null; //will cause this record to be deleted when read by sink-task 43 | 44 | return config.produce(record); 45 | }); 46 | 47 | it("should await produce of single record", function(done){ 48 | setTimeout(done, 1500); 49 | }); 50 | 51 | it("should be able to close configuration", function(done) { 52 | config.stop(); 53 | setTimeout(done, 1500); 54 | }); 55 | }); 56 | 57 | describe("Sink", function() { 58 | 59 | before((done) => { 60 | const { database, options, user, password, table } = sinkProperties.connector; 61 | const sequelize = new Sequelize(database, user, password, options); 62 | sequelize.query(`DROP TABLE IF EXISTS ${table}`) 63 | .catch(error => console.log(error)) 64 | .then(() => { 65 | sequelize.close(); 66 | done(); 67 | }); 68 | }); 69 | 70 | let config = null; 71 | let error = null; 72 | 73 | it("should be able to run sequelize sink config", function() { 74 | const onError = _error => { 75 | error = _error; 76 | }; 77 | return runSinkConnector(Object.assign({}, sinkProperties, { enableMetrics: true }), [], onError).then(_config => { 78 | config = _config; 79 | config.on("model-upsert", id => console.log("upsert: " + id)); 80 | config.on("model-delete", id => console.log("delete: " + id)); 81 | return true; 82 | }); 83 | }); 84 | 85 | it("should be able to await a few message puts", function(done) { 86 | setTimeout(() => { 87 | assert.ifError(error); 88 | done(); 89 | }, 4500); 90 | }); 91 | 92 | it("should be able to close configuration", function(done) { 93 | config.stop(); 94 | setTimeout(done, 1500); 95 | }); 96 | 97 | it("should be able to see table data", function() { 98 | const { database, options, user, password, table } = sinkProperties.connector; 99 | const sequelize = new Sequelize(database, user, password, options); 100 | return sequelize.query(`SELECT * FROM ${table}`) 101 | .then(([results]) => { 102 | console.log(results); 103 | assert.equal(results.length, 1); 104 | sequelize.close(); 105 | return true; 106 | }); 107 | }); 108 | }); 109 | 110 | describe("Converter Factory", function() { 111 | 112 | let config = null; 113 | let error = null; 114 | let topic = "sc_test_topic_2"; 115 | let converter = {}; 116 | let producer = null; 117 | 118 | it("should be able to create custom converter", function(done) { 119 | 120 | const tableSchema = { 121 | "id": { 122 | "type": "integer", 123 | "allowNull": false, 124 | "primaryKey": true 125 | }, 126 | "name": { 127 | "type": "varchar(255)", 128 | "allowNull": true, 129 | "primaryKey": false 130 | } 131 | }; 132 | 133 | const etlFunc = (messageValue, callback) => { 134 | 135 | //type is an example json format field 136 | if (messageValue.type === "publish") { 137 | return callback(null, { 138 | id: messageValue.payload.id, 139 | name: messageValue.payload.name 140 | }); 141 | } 142 | 143 | if (messageValue.type === "unpublish") { 144 | return callback(null, null); //null value will cause deletion 145 | } 146 | 147 | console.log(messageValue); 148 | throw new Error("unknown messageValue.type"); 149 | }; 150 | 151 | converter = ConverterFactory.createSinkSchemaConverter(tableSchema, etlFunc); 152 | 153 | const aFakeKafkaMessage = { 154 | partition: 0, 155 | topic: "bla", 156 | value: { 157 | payload: { 158 | id: "123", 159 | name: "bla-blup" 160 | }, 161 | type: "publish" 162 | }, 163 | offset: 1, 164 | key: Buffer.from("123", "utf8") 165 | }; 166 | 167 | converter.toConnectData(Object.assign({}, aFakeKafkaMessage), (error, message) => { 168 | 169 | assert.ifError(error); 170 | assert.deepEqual(message.value.valueSchema, tableSchema); 171 | assert.deepEqual(message.value.value, { 172 | id: "123", 173 | name: "bla-blup" 174 | }); 175 | assert.ok(message.key); 176 | assert.ok(message.value.key); 177 | 178 | converter.toConnectData(Object.assign({}, aFakeKafkaMessage), (error, message) => { 179 | 180 | assert.ifError(error); 181 | assert.deepEqual(message.value.valueSchema, tableSchema); 182 | assert.deepEqual(message.value.value, { 183 | id: "123", 184 | name: "bla-blup" 185 | }); 186 | assert.ok(message.key); 187 | assert.ok(message.value.key); 188 | 189 | done(); 190 | }); 191 | }); 192 | }); 193 | 194 | it("should be able to produce a few messages", function() { 195 | producer = new NProducer(sinkProperties.kafka, topic, 1); 196 | return producer.connect().then(_ => { 197 | return Promise.all([ 198 | producer.buffer(topic, "3", { payload: { id: 3, name: "test1" }, type: "publish" }), 199 | producer.buffer(topic, "4", { payload: { id: 4, name: "test2" }, type: "publish" }), 200 | producer.buffer(topic, "3", { payload: null, type: "unpublish" }) 201 | ]); 202 | }); 203 | }); 204 | 205 | it("should be able to await a few broker interactions", function(done) { 206 | setTimeout(() => { 207 | assert.ifError(error); 208 | done(); 209 | }, 1500); 210 | }); 211 | 212 | it("shoud be able to sink message through custom converter", function() { 213 | const onError = _error => { 214 | error = _error; 215 | }; 216 | 217 | const customProperties = Object.assign({}, sinkProperties, { topic }); 218 | return runSinkConnector(customProperties, [converter], onError).then(_config => { 219 | config = _config; 220 | return true; 221 | }); 222 | }); 223 | 224 | it("should be able to await a few message puts", function(done) { 225 | setTimeout(() => { 226 | assert.ifError(error); 227 | done(); 228 | }, 4500); 229 | }); 230 | 231 | it("should be able to close configuration", function(done) { 232 | config.stop(); 233 | producer.close(); 234 | setTimeout(done, 1500); 235 | }); 236 | 237 | it("should be able to see table data", function() { 238 | const { database, options, user, password, table } = sinkProperties.connector; 239 | const sequelize = new Sequelize(database, user, password, options); 240 | return sequelize.query(`SELECT * FROM ${table}`) 241 | .then(([results]) => { 242 | console.log(results); 243 | assert.equal(results.length, 2); 244 | assert.deepEqual(results, [{ id: 2, name: "bob" }, { id: 4, name: "test2" }]); 245 | sequelize.close(); 246 | return true; 247 | }); 248 | }); 249 | }); 250 | 251 | describe("Sink with erroneous message", function() { 252 | 253 | before((done) => { 254 | const { database, options, user, password, table } = sinkProperties.connector; 255 | const sequelize = new Sequelize(database, user, password, options); 256 | sequelize.query(`DROP TABLE IF EXISTS ${table}`) 257 | .catch(error => console.log(error)) 258 | .then(() => { 259 | sequelize.close(); 260 | done(); 261 | }); 262 | }); 263 | 264 | const brokenTopic = sourceProperties.topic + "_broken"; 265 | let config = null; 266 | let error = null; 267 | 268 | it("should be able to run sequelize source config", function() { 269 | const onError = _error => { 270 | error = _error; 271 | }; 272 | 273 | sourceProperties.topic = brokenTopic; 274 | 275 | return runSourceConnector(sourceProperties, [], onError).then(_config => { 276 | config = _config; 277 | return true; 278 | }); 279 | }); 280 | 281 | it("should be able to await a few pollings", function(done) { 282 | setTimeout(() => { 283 | assert.ifError(error); 284 | done(); 285 | }, 4500); 286 | }); 287 | 288 | it("should be able to close configuration", function(done) { 289 | config.stop(); 290 | setTimeout(done, 1500); 291 | }); 292 | 293 | it("should produce the erroneous message", function(done) { 294 | 295 | const partitions = 1; 296 | const producer = new NProducer(sourceProperties.kafka, [brokenTopic]); 297 | producer.on("error", error => { 298 | console.error(error); 299 | return done(); 300 | }); 301 | 302 | producer.connect() 303 | .then(() => producer.send(brokenTopic, JSON.stringify({payload: "this is wrong"}))) 304 | .then(() => done()); 305 | }); 306 | 307 | xit("should be able to run sequelize sink config", function() { 308 | const onError = _error => { 309 | error = _error; 310 | }; 311 | 312 | sinkProperties.topic = brokenTopic; 313 | sinkProperties.maxRetries = 2; 314 | sinkProperties.awaitRetry = 100; 315 | sinkProperties.haltOnError = true; 316 | sinkProperties.kafka.logger = { 317 | debug: function(message) {/*console.log(message)*/}, 318 | info: function(message) {console.log(message)}, 319 | warn: function(message) {console.warn(message)}, 320 | error: function(message) { 321 | errorMessages.push(message); 322 | console.error(message); 323 | } 324 | } 325 | 326 | return runSinkConnector(sinkProperties, [], onError).then(_config => { 327 | config = _config; 328 | return true; 329 | }); 330 | }); 331 | 332 | xit("should put valid messages and fail on erroneous message", function(done) { 333 | setTimeout(() => { 334 | assert.equal(error, "Error: halting because of retry error."); 335 | done(); 336 | }, 8000); 337 | }); 338 | }); 339 | }); 340 | -------------------------------------------------------------------------------- /test/sink-config.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const path = require("path"); 4 | //const Logger = require("log4bro"); 5 | 6 | const config = { 7 | 8 | /* 9 | kafka: { 10 | //zkConStr: "localhost:2181/", 11 | kafkaHost: "localhost:9092", 12 | logger: new Logger(), 13 | groupId: "kc-sequelize-test", 14 | clientName: "kc-sequelize-test-name", 15 | workerPerPartition: 1, 16 | options: { 17 | sessionTimeout: 8000, 18 | protocol: ["roundrobin"], 19 | fromOffset: "earliest", //latest 20 | fetchMaxBytes: 1024 * 100, 21 | fetchMinBytes: 1, 22 | fetchMaxWaitMs: 10, 23 | heartbeatInterval: 250, 24 | retryMinTimeout: 250, 25 | requireAcks: 0, 26 | //ackTimeoutMs: 100, 27 | //partitionerType: 3 28 | } 29 | }, */ 30 | 31 | kafka: { 32 | noptions: { 33 | "metadata.broker.list": "localhost:9092", 34 | "group.id": "n-test-group", 35 | "client.id": "kcs-test", 36 | "enable.auto.commit": false, 37 | "debug": "all", 38 | "event_cb": true 39 | }, 40 | tconf: { 41 | "auto.offset.reset": "earliest" 42 | } 43 | }, 44 | 45 | topic: "sc_test_topic", 46 | partitions: 1, 47 | maxTasks: 1, 48 | pollInterval: 2000, 49 | produceKeyed: true, 50 | produceCompressionType: 0, 51 | awaitRetry: 2000, 52 | maxRetries: 3, 53 | connector: { 54 | options: { 55 | host: "localhost", 56 | port: 5432, 57 | dialect: "sqlite", 58 | pool: { 59 | max: 5, 60 | min: 0, 61 | idle: 10000 62 | }, 63 | storage: path.join(__dirname, "test-db.sqlite") 64 | }, 65 | database: null, 66 | user: null, 67 | password: null, 68 | maxPollCount: 50, 69 | table: "accounts_import", 70 | incrementingColumnName: "id" 71 | }, 72 | http: { 73 | port: 3149, 74 | middlewares: [] 75 | }, 76 | enableMetrics: false, 77 | batch: { 78 | batchSize: 100, 79 | commitEveryNBatch: 1, 80 | concurrency: 1, 81 | commitSync: true 82 | } 83 | }; 84 | 85 | module.exports = config; -------------------------------------------------------------------------------- /test/source-config.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const path = require("path"); 4 | //const Logger = require("log4bro"); 5 | 6 | const config = { 7 | 8 | /* 9 | kafka: { 10 | //zkConStr: "localhost:2181/", 11 | kafkaHost: "localhost:9092", 12 | logger: new Logger(), 13 | groupId: "kc-sequelize-test", 14 | clientName: "kc-sequelize-test-name", 15 | workerPerPartition: 1, 16 | options: { 17 | sessionTimeout: 8000, 18 | protocol: ["roundrobin"], 19 | fromOffset: "earliest", //latest 20 | fetchMaxBytes: 1024 * 100, 21 | fetchMinBytes: 1, 22 | fetchMaxWaitMs: 10, 23 | heartbeatInterval: 250, 24 | retryMinTimeout: 250, 25 | requireAcks: 0, 26 | //ackTimeoutMs: 100, 27 | //partitionerType: 3 28 | } 29 | }, */ 30 | 31 | kafka: { 32 | noptions: { 33 | "client.id": "kcs-test", 34 | "group.id": "n-test-group", 35 | "metadata.broker.list": "localhost:9092", 36 | //"debug": "all", 37 | "event_cb": true 38 | }, 39 | tconf: { 40 | "request.required.acks": 1 41 | } 42 | }, 43 | 44 | topic: "sc_test_topic", 45 | partitions: 1, 46 | maxTasks: 1, 47 | pollInterval: 250, 48 | produceKeyed: true, 49 | produceCompressionType: 0, 50 | connector: { 51 | options: { 52 | host: "localhost", 53 | port: 5432, 54 | dialect: "sqlite", 55 | pool: { 56 | max: 5, 57 | min: 0, 58 | idle: 10000 59 | }, 60 | storage: path.join(__dirname, "test-db.sqlite") 61 | }, 62 | database: null, 63 | user: null, 64 | password: null, 65 | maxPollCount: 50, 66 | table: "accounts", 67 | incrementingColumnName: "id" 68 | }, 69 | http: { 70 | port: 3149, 71 | middlewares: [] 72 | }, 73 | enableMetrics: false 74 | }; 75 | 76 | module.exports = config; --------------------------------------------------------------------------------