├── .autod.conf.js ├── .eslintignore ├── .eslintrc ├── .github ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── nodejs.yml ├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── README.zh_CN.md ├── agent.js ├── app.js ├── app └── extend │ └── context.js ├── appveyor.yml ├── config └── config.default.js ├── demo.js ├── index.d.ts ├── lib ├── client.js ├── consumer.js ├── definition.js ├── kafka-node.js ├── message.js ├── producer.js ├── sendMessage.js ├── subscription.js └── utils.js ├── package-lock.json ├── package.json ├── test ├── fixtures │ └── apps │ │ └── kafka-node-test │ │ ├── app │ │ ├── controller │ │ │ ├── home.js │ │ │ └── kafka.js │ │ ├── extend │ │ │ └── context.js │ │ ├── kafka │ │ │ ├── testTopic1 │ │ │ │ ├── Consumer.js │ │ │ │ ├── Some2Consumer.ts │ │ │ │ └── SomeConsumer.js │ │ │ ├── testTopic2 │ │ │ │ ├── Consumer.js │ │ │ │ └── EveryConsumer.js │ │ │ └── testTopic3 │ │ │ │ └── NewConsumer.js │ │ ├── router.js │ │ └── service │ │ │ └── home.js │ │ ├── config │ │ └── config.default.js │ │ └── package.json └── kafka-node.test.js └── tsconfig.json /.autod.conf.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | write: true, 5 | prefix: '^', 6 | plugin: 'autod-egg', 7 | test: [ 8 | 'test', 9 | 'benchmark', 10 | ], 11 | devdep: [ 12 | 'egg', 13 | 'egg-ci', 14 | 'egg-bin', 15 | 'autod', 16 | 'autod-egg', 17 | 'eslint', 18 | 'eslint-config-egg', 19 | 'webstorm-disable-index', 20 | ], 21 | exclude: [ 22 | './test/fixtures', 23 | './docs', 24 | './coverage', 25 | ], 26 | }; 27 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | coverage 2 | index.d.ts 3 | *.ts 4 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "eslint-config-egg" 3 | } 4 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 10 | 11 | ##### Checklist 12 | 13 | 14 | - [ ] `npm test` passes 15 | - [ ] tests and/or benchmarks are included 16 | - [ ] documentation is changed or added 17 | - [ ] commit message follows commit guidelines 18 | 19 | ##### Affected core subsystem(s) 20 | 21 | 22 | 23 | ##### Description of change 24 | 25 | -------------------------------------------------------------------------------- /.github/workflows/nodejs.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Node.js CI 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ${{ matrix.os }} 16 | 17 | strategy: 18 | matrix: 19 | node-version: [8.x, 9.x] 20 | os: [ubuntu-latest, windows-latest, macos-latest] 21 | 22 | steps: 23 | - uses: actions/checkout@v2 24 | - name: Use Node.js ${{ matrix.node-version }} 25 | uses: actions/setup-node@v1 26 | with: 27 | node-version: ${{ matrix.node-version }} 28 | - run: npm i -g npminstall && npminstall 29 | - run: npm run pkgfiles 30 | env: 31 | CI: true 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | logs/ 2 | npm-debug.log 3 | node_modules/ 4 | coverage/ 5 | .idea/ 6 | run/ 7 | .DS_Store 8 | *.swp 9 | 10 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: node_js 3 | node_js: 4 | - '8' 5 | - '9' 6 | install: 7 | - npm i npminstall && npminstall 8 | script: 9 | - npm run pkgfiles 10 | after_script: 11 | - npminstall codecov && codecov 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 Alibaba Group Holding Limited and other contributors. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # egg-kafka-node 2 | 3 | [![NPM version][npm-image]][npm-url] 4 | [![build status][travis-image]][travis-url] 5 | [![Test coverage][codecov-image]][codecov-url] 6 | [![Known Vulnerabilities][snyk-image]][snyk-url] 7 | [![npm download][download-image]][download-url] 8 | 9 | [npm-image]: https://img.shields.io/npm/v/egg-kafka-node.svg?style=flat-square 10 | [npm-url]: https://npmjs.org/package/egg-kafka-node 11 | [travis-image]: https://www.travis-ci.org/JohnApache/events-proxy.svg 12 | [travis-url]: https://travis-ci.org/JohnApache/egg-kafka-node 13 | [codecov-image]: https://codecov.io/gh/JohnApache/egg-kafka-node/branch/master/graph/badge.svg 14 | [codecov-url]: https://codecov.io/gh/JohnApache/egg-kafka-node 15 | [snyk-image]: https://snyk.io/test/github/JohnApache/egg-kafka-node/badge.svg?targetFile=package.json 16 | [snyk-url]: https://snyk.io/test/github/JohnApache/egg-kafka-node?targetFile=package.json 17 | [download-image]: https://img.shields.io/npm/dm/egg-kafka-node.svg?style=flat-square 18 | [download-url]: https://npmjs.org/package/egg-kafka-node 19 | 20 | - [English](README.md) 21 | - [简体中文](README.zh_CN.md) 22 | 23 | This plug-in is a package of [kafka-node] (https://github.com/SOHU-Co/kafka-node). It is an egg-style plug-in for easy use in the environment of egg.js. It also provides a detailed configuration of methods for sending Kafka messages. Refer to [https://github.com/SOHU-Co/kafka-node] (https://github.com/SOHU-Co/kafka-node). 24 | 25 | 26 | ## Install 27 | 28 | ```bash 29 | $ npm i egg-kafka-node --save 30 | ``` 31 | 32 | ## Usage 33 | 34 | ```js 35 | // {app_root}/config/plugin.js 36 | exports.kafkaNode = { 37 | enable: true, 38 | package: 'egg-kafka-node', 39 | }; 40 | ``` 41 | 42 | ## Configuration 43 | 44 | ```js 45 | // {app_root}/config/config.default.js 46 | exports.kafkaNode = { 47 | kafkaHost: '127.0.0.1:9092', // kafka connect host 48 | clientOption: {}, // KafkaClient option, more documentation please visit kafka-node 49 | consumerOption: [{ 50 | groupId: 'group1', // consumerGroup's groupId 51 | topics: [ 'testTopic1' ], // topics under the same consumer group 52 | options: { 53 | fetchMaxWaitMs: 100, 54 | fetchMinBytes: 1, 55 | fetchMaxBytes: 1024 * 1024, 56 | }, // relevant configuration for each consumer group, more documentation please visit kafka-node 57 | }, { 58 | groupId: 'group2', 59 | topics: [ 'testTopic2' ], 60 | options: {}, 61 | }, { 62 | groupId: 'group3', 63 | topics: [ 'testTopic3' ], 64 | }], 65 | 66 | // HighLevelProducer option, more documentation please visit kafka-node 67 | producerOption: { 68 | requireAcks: 1, 69 | ackTimeoutMs: 100, 70 | partitionerType: 2, 71 | autoCreateTopic: true, // Whether to turn on automatic topic creation. default true 72 | topics: [ 'testTopic1', 'testTopic2', 'testTopic3' ], // Topics that all consumers need to consume 73 | }, 74 | messageOption: { 75 | partition: 0, 76 | attributes: 0, // send message option 77 | }, 78 | // NewConfig 79 | baseConsumersDir: './app/kafka', // support read consumers files base dir 80 | }; 81 | ``` 82 | 83 | see [config/config.default.js](test/fixtures/apps/config/config.default.js) for more detail. 84 | 85 | ## Structure 86 | ```js 87 | egg-project 88 | ├── package.json 89 | ├── app.js (optional) 90 | ├── app 91 | | ├── router.js 92 | │ ├── controller 93 | │ | └── home.js 94 | │ ├── service (optional) 95 | │ | └── user.js 96 | │ | └── response_time.js 97 | │ └── kafka (optional) --------> like `controller, service...` 98 | │ ├── someTopic (optional) -------> topic name of kafka 99 | │ ├── someKey1Consumer.js(optional) ------> `someKey1` is a key of someTopic 100 | | └── someKey2Consumer.js(optional) ------> `someKey2` is an another key of someTopic 101 | | └── someKey3Consumer.ts(optional) ------> support load ts file consumers 102 | ├── config 103 | | ├── plugin.js 104 | | ├── config.default.js 105 | │ ├── config.prod.js 106 | | ├── config.test.js (optional) 107 | | ├── config.local.js (optional) 108 | | └── config.unittest.js (optional) 109 | 110 | ``` 111 | ## USE TIPS 112 | 113 | > Note: The producer option topics of the kafkaNode configuration must create a corresponding topic directory under the {app-root}/kafka directory. Kafka-node automatically reads the file containing the'Consumers'filename under the topic, and the key needs to be passed in when sendMessage to facilitate business differentiation. 114 | 115 | > Note: You must set app.config.baseDir, kafka need to load consumers base on the baseDir. 116 | 117 | > Note: SendMessage messages max bytes depending on the configuration of you set. 118 | 119 | ## Example 120 | 121 | ```js 122 | // {app_root}/controller/index.js 123 | class IndexController extends Controller { 124 | async index() { 125 | await this.ctx.kafka.sendMessage({ 126 | topic: 'someTopic', // Specify topics in the Kafka directory 127 | key: 'someKey', // Specify consumer for the corresponding key under topic 128 | messages: JSON.stringify({ 129 | username: 'JohnApache', 130 | userId: 10001, 131 | gender: 0 132 | }) 133 | }); 134 | } 135 | 136 | async some() { 137 | this.ctx.kafka.sendMessageSync({ 138 | topic: 'someTopic', // Specify topics in the Kafka directory 139 | key: 'someKey', // Specify consumer for the corresponding key under topic 140 | messages: JSON.stringify({ 141 | username: 'JohnApache', 142 | userId: 10001, 143 | gender: 0 144 | }) 145 | }, () => { 146 | // success callback 147 | }, () => { 148 | // error callback 149 | }) 150 | } 151 | } 152 | 153 | // {app_root}/kafka/someTopic/someKeyConsumer.js 154 | class SomeKeySubscription extends Subscription { 155 | async subscribe(message) { 156 | const {value, topic, key} = message; 157 | this.ctx.logger.info(`consume message ${value} by topic ${topic} key ${key} consumer`); 158 | await asyncTask(); 159 | } 160 | } 161 | ``` 162 | 163 | ## Questions & Suggestions 164 | Please open an issue [here](https://github.com/JohnApache/egg-kafka-node/issues). 165 | 166 | ## License 167 | 168 | [MIT](LICENSE) 169 | -------------------------------------------------------------------------------- /README.zh_CN.md: -------------------------------------------------------------------------------- 1 | # egg-kafka-node 2 | 3 | [![NPM version][npm-image]][npm-url] 4 | [![build status][travis-image]][travis-url] 5 | [![Test coverage][codecov-image]][codecov-url] 6 | [![Known Vulnerabilities][snyk-image]][snyk-url] 7 | [![npm download][download-image]][download-url] 8 | 9 | [npm-image]: https://img.shields.io/npm/v/egg-kafka-node.svg?style=flat-square 10 | [npm-url]: https://npmjs.org/package/egg-kafka-node 11 | [travis-image]: https://www.travis-ci.org/JohnApache/events-proxy.svg 12 | [travis-url]: https://travis-ci.org/JohnApache/egg-kafka-node 13 | [codecov-image]: https://codecov.io/gh/JohnApache/egg-kafka-node/branch/master/graph/badge.svg 14 | [codecov-url]: https://codecov.io/gh/JohnApache/egg-kafka-node 15 | [snyk-image]: https://snyk.io/test/github/JohnApache/egg-kafka-node/badge.svg?targetFile=package.json 16 | [snyk-url]: https://snyk.io/test/github/JohnApache/egg-kafka-node?targetFile=package.json 17 | [download-image]: https://img.shields.io/npm/dm/egg-kafka-node.svg?style=flat-square 18 | [download-url]: https://npmjs.org/package/egg-kafka-node 19 | 20 | 21 | 22 | - [English](README.md) 23 | - [简体中文](README.zh_CN.md) 24 | 25 | 该插件是[kafka-node](https://github.com/SOHU-Co/kafka-node) 的封装, 方便在egg.js 环境下使用的一个egg风格的插件, 并提供了方便的api发送给kafka消息的方法 部分详细配置请参考 [https://github.com/SOHU-Co/kafka-node](https://github.com/SOHU-Co/kafka-node) 26 | 27 | 28 | ## 依赖说明 29 | 30 | ### 依赖的 egg 版本 31 | 32 | egg-kafka-node 版本 | egg 1.x 33 | --- | --- 34 | 1.x | 😁 35 | 0.x | 😁 36 | 37 | ### 依赖的 Node 版本 38 | node >= 8.0.0 😁 39 | 40 | ## 开启插件 41 | 42 | ```js 43 | // config/plugin.js 44 | exports.kafkaNode = { 45 | enable: true, 46 | package: 'egg-kafka-node', 47 | }; 48 | ``` 49 | 50 | ## 配置 51 | ```js 52 | // {app_root}/config/config.default.js 53 | exports.kafkaNode = { 54 | kafkaHost: '127.0.0.1:9092', // kafka 连接的地址 55 | clientOption: {}, // KafkaClient 相关配置, 更多配置可以查看kafka-node 56 | consumerOption: [{ 57 | groupId: 'group1', // consumerGroup 消费组id 58 | topics: [ 'testTopic1' ], // 同一消费组 consumerGroup 下的所有 topic 59 | options: { 60 | fetchMaxWaitMs: 100, 61 | fetchMinBytes: 1, 62 | fetchMaxBytes: 1024 * 1024, 63 | }, // 每个消费组对应的相关 consumerGroup 配置 64 | }, { 65 | groupId: 'group2', 66 | topics: [ 'testTopic2' ], 67 | options: {}, 68 | }, { 69 | groupId: 'group3', 70 | topics: [ 'testTopic3' ], 71 | }], 72 | // HighLevelProducer 生产者配置, 更多配置可以查看kafka-node 73 | producerOption: { 74 | requireAcks: 1, 75 | ackTimeoutMs: 100, 76 | partitionerType: 2, 77 | autoCreateTopic: true, // 是否开启自动创建 topic功能 78 | topics: [ 'testTopic1', 'testTopic2', 'testTopic3' ], // 所有消费组需要包含的topics 集合 79 | }, 80 | messageOption: { 81 | partition: 0, 82 | attributes: 0, // 发送消息的相关配置 83 | }, 84 | // 新配置 85 | baseConsumersDir: './app/kafka', // 支持设置读取消费者的根目录 默认是 ./app/kafka 86 | }; 87 | ``` 88 | 89 | ## 详细配置 90 | 91 | 请到 [config/config.default.js](test/fixtures/apps/config/config.default.js] 查看详细配置项说明。 92 | 93 | ## 目录结构 94 | 95 | ```js 96 | egg-project 97 | ├── package.json 98 | ├── app.js (optional) 99 | ├── app 100 | | ├── router.js 101 | │ ├── controller 102 | │ | └── home.js 103 | │ ├── service (optional) 104 | │ | └── user.js 105 | │ | └── response_time.js 106 | │ └── kafka (optional) --------> like `controller, service...` 107 | │ ├── someTopic (optional) -------> topic name of kafka 108 | │ ├── someKey1Consumer.js(optional) ------> `someKey1` is a key of someTopic 109 | | └── someKey2Consumer.js(optional) ------> `someKey2` is an another key of someTopic 110 | | └── someKey3Consumer.ts(optional) ------> 支持加载 ts 文件格式的消费者 111 | ├── config 112 | | ├── plugin.js 113 | | ├── config.default.js 114 | │ ├── config.prod.js 115 | | ├── config.test.js (optional) 116 | | ├── config.local.js (optional) 117 | | └── config.unittest.js (optional) 118 | 119 | 120 | ``` 121 | ## 使用注意 122 | 123 | > Note: kafka配置下 生产者配置 producerOption 配置的topics必须在{app-root}/kafka 目录下创建对应的topic。kafka 会自动读取topic 目录下对应的Consumer.js ,并自动设置文件名前缀对应的 key 名, 该key需要在sendMessage 时提供 这个 key, 方便业务区分 124 | 125 | > Note: 你必须设置 app.config.baseDir, egg-kafka-node 需要基于 这个baseDir 去加载所使用的consumers 126 | 127 | > Note: sendMessage 发送消息 方法 messages参数 最大字节数 取决于 你设置的consumer配置 128 | 129 | 130 | ## 使用案例 131 | 132 | ```js 133 | // {app_root}/controller/index.js 134 | class IndexController extends Controller { 135 | async index() { 136 | await this.ctx.kafka.sendMessage({ 137 | topic: 'someTopic', // 指定 kafka 目录下 的topic 138 | key: 'someKey', // 指定 kafka 下的 topic 目录 对应key的consumer 139 | messages: JSON.stringify({ 140 | username: 'JohnApache', 141 | userId: 10001, 142 | gender: 0 143 | }) 144 | }); 145 | } 146 | 147 | async some() { 148 | this.ctx.kafka.sendMessageSync({ 149 | topic: 'someTopic', // 指定 kafka 目录下的 topic 150 | key: 'someKey', // 指定 kafka 下的 topic 目录 对应key 的consumer 151 | messages: JSON.stringify({ 152 | username: 'JohnApache', 153 | userId: 10001, 154 | gender: 0 155 | }) 156 | }, () => { 157 | // success callback 158 | }, () => { 159 | // error callback 160 | }) 161 | } 162 | } 163 | 164 | // {app_root}/kafka/someTopic/someKeyConsumer.js 165 | class SomeKeySubscription extends Subscription { 166 | async subscribe(message) { 167 | const {value, topic, key} = message; 168 | this.ctx.logger.info(`consume message ${value} by topic ${topic} key ${key} consumer`); 169 | await asyncTask(); 170 | } 171 | } 172 | ``` 173 | ## 提问交流 174 | 请到 [egg issues](https://github.com/JohnApache/egg-kafka-node/issues) 异步交流。 175 | 176 | ## License 177 | 178 | [MIT](LICENSE) 179 | -------------------------------------------------------------------------------- /agent.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const kafkaNode = require('./lib/kafka-node'); 3 | 4 | module.exports = agent => { 5 | if (agent.config.kafkaNode) kafkaNode(agent); 6 | }; 7 | -------------------------------------------------------------------------------- /app.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const kafkaNode = require('./lib/kafka-node'); 3 | 4 | module.exports = app => { 5 | if (app.config.kafkaNode) kafkaNode(app); 6 | }; 7 | -------------------------------------------------------------------------------- /app/extend/context.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | get kafka() { 5 | return this.app.kafka; 6 | }, 7 | }; 8 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | matrix: 3 | - nodejs_version: '8' 4 | - nodejs_version: '9' 5 | 6 | install: 7 | - ps: Install-Product node $env:nodejs_version 8 | - npm i npminstall && node_modules\.bin\npminstall 9 | 10 | test_script: 11 | - node --version 12 | - npm --version 13 | - npm run pkgfiles 14 | 15 | build: off 16 | -------------------------------------------------------------------------------- /config/config.default.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /** 4 | * egg-kafka-node default config 5 | * @member Config#kafkaNode 6 | * @property {String} SOME_KEY - some description 7 | */ 8 | exports.kafkaNode = { 9 | kafkaHost: '127.0.0.1:9092', 10 | clientOption: {}, 11 | consumerOption: [], 12 | producerOption: { 13 | requireAcks: 1, 14 | ackTimeoutMs: 100, 15 | partitionerType: 2, 16 | autoCreateTopic: true, 17 | topics: [], 18 | }, 19 | messageOption: { 20 | partition: 0, 21 | attributes: 0, 22 | }, 23 | }; 24 | -------------------------------------------------------------------------------- /demo.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const path = require('path'); 4 | const Client = require('./lib/client'); 5 | const Consumer = require('./lib/consumer'); 6 | const Message = require('./lib/message'); 7 | const Producer = require('./lib/producer'); 8 | const Subscription = require('./lib/subscription'); 9 | const createEventsProxy = require('eventsproxy'); 10 | 11 | const { 12 | CLIENT_READY, 13 | PRODUCER_READY, 14 | CONSUMER_READY, 15 | SUBSCRIPTION_READY, 16 | TOPICS_READY, 17 | CONSUMER_CONNECT, 18 | PRINT_LOGGER, 19 | PRINT_ERROR_LOGGER, 20 | } = require('./lib/definition'); 21 | 22 | const ep = createEventsProxy(); 23 | 24 | const app = { 25 | config: {}, 26 | }; 27 | app.config.baseDir = __dirname; 28 | app.config.kafkaNode = { 29 | kafkaHost: '127.0.0.1:9092', 30 | clientOption: {}, 31 | consumerOption: [{ 32 | groupId: 'group1', 33 | topics: [ 'testTopic1' ], 34 | options: { 35 | fetchMaxWaitMs: 100, 36 | fetchMinBytes: 1, 37 | fetchMaxBytes: 1024 * 1024, 38 | }, // 每个消费组对应的相关 consumerGroup 配置 39 | }, { 40 | groupId: 'group2', 41 | topics: [ 'testTopic2' ], 42 | options: {}, 43 | }, { 44 | groupId: 'group3', 45 | topics: [ 'testTopic3' ], 46 | }], 47 | producerOption: { 48 | requireAcks: 1, 49 | ackTimeoutMs: 100, 50 | partitionerType: 2, 51 | autoCreateTopic: true, 52 | topics: [ 'testTopic1', 'testTopic2', 'testTopic3' ], 53 | }, 54 | messageOption: { 55 | partition: 0, 56 | attributes: 0, 57 | }, 58 | }; 59 | 60 | 61 | const ClientOp = (kafkaHost, clientOption = {}) => { 62 | const option = Object.assign({}, { 63 | kafkaHost, 64 | }, clientOption); 65 | const client = Client(option); 66 | ep.emit(CLIENT_READY, client); 67 | ep.emit(PRINT_LOGGER, '[egg-kafka-node] client ready!'); 68 | }; 69 | 70 | const ProducerOp = (client, producerOption) => { 71 | const producer = Producer(client); 72 | const { 73 | autoCreateTopic, 74 | topics, 75 | } = producerOption; 76 | producer.on('ready', () => { 77 | ep.emit(PRODUCER_READY, producer); 78 | ep.emit(PRINT_LOGGER, '[egg-kafka-node] producer ready!'); 79 | 80 | if (autoCreateTopic && topics) { 81 | producer.createTopics(topics, false, err => { 82 | if (err) { 83 | ep.emit(PRINT_ERROR_LOGGER, '[egg-kafka-node] create topics error!'); 84 | ep.emit(PRINT_ERROR_LOGGER, err); 85 | return; 86 | } 87 | ep.emit(TOPICS_READY, topics); 88 | ep.emit(PRINT_LOGGER, '[egg-kafka-node] create topics success!'); 89 | }); 90 | } 91 | }); 92 | 93 | producer.on('error', error => { 94 | ep.emit(PRINT_ERROR_LOGGER, '[egg-kafka-node] producer connect error!'); 95 | ep.emit(PRINT_ERROR_LOGGER, error); 96 | }); 97 | }; 98 | 99 | const SubscriptionOp = (baseReadDir, consumerOption) => { 100 | const topicSubscription = Subscription(baseReadDir, consumerOption); 101 | ep.emit(SUBSCRIPTION_READY, topicSubscription); 102 | ep.emit(PRINT_LOGGER, '[egg-kafka-node] load consumer subscription success'); 103 | }; 104 | 105 | const ConsumerOp = (config, topicSubscription) => { 106 | const consumerMemory = Consumer(config); 107 | 108 | // 等待所有consumer连接 109 | const unregister = ep.wait(CONSUMER_CONNECT, () => { 110 | ep.emit(PRINT_LOGGER, '[egg-kafka-node] all consumer conncect success!'); 111 | unregister && unregister(); 112 | }, consumerMemory.length); 113 | 114 | consumerMemory.forEach(consumer => { 115 | consumer.on('connect', () => { 116 | ep.emit(CONSUMER_CONNECT); 117 | }); 118 | consumer.on('message', message => { 119 | const { 120 | topic, 121 | key, 122 | } = message; 123 | consumer.pause(); 124 | const Subscriber = topicSubscription.get(`${topic}:${key}`); 125 | if (!Subscriber) return consumer.resume(); 126 | const subcriber = new Subscriber(); 127 | subcriber 128 | .subscribe(message) 129 | .then(() => { 130 | consumer.commit(err => { 131 | consumer.resume(); 132 | if (err) { 133 | ep.emit(PRINT_ERROR_LOGGER, '[egg-kafka-node] commit offset error!'); 134 | ep.emit(PRINT_ERROR_LOGGER, err); 135 | } 136 | }); 137 | }) 138 | .catch(() => { 139 | consumer.resume(); 140 | ep.emit(PRINT_ERROR_LOGGER, '[egg-kafka-node] consume message fail!'); 141 | }); 142 | }); 143 | consumer.on('error', error => { 144 | ep.emit(PRINT_ERROR_LOGGER, '[egg-kafka-node] consumer connect error!'); 145 | ep.emit(PRINT_ERROR_LOGGER, error); 146 | }); 147 | }); 148 | ep.emit(CONSUMER_READY, consumerMemory); 149 | }; 150 | 151 | 152 | const config = app.config.kafkaNode; 153 | 154 | const { 155 | kafkaHost, 156 | producerOption = {}, 157 | clientOption = {}, 158 | consumerOption = [], 159 | } = config; 160 | 161 | const baseReadDir = path.join(app.config.baseDir, './app/kafka/'); 162 | 163 | 164 | ep.once(CLIENT_READY, client => { 165 | ProducerOp(client, producerOption); 166 | SubscriptionOp(baseReadDir, consumerOption); 167 | }); 168 | 169 | ep.once([ PRODUCER_READY, TOPICS_READY, SUBSCRIPTION_READY ], (producer, topics, topicSubscription) => { 170 | ConsumerOp(config, topicSubscription); 171 | }); 172 | 173 | ep.once(CONSUMER_READY, consumerMemory => { 174 | process.once('SIGINT', () => { 175 | consumerMemory.forEach(consumer => { 176 | ep.emit(PRINT_LOGGER, '[egg-kafka-node] consumer close connect'); 177 | consumer.close(true, function(error) { 178 | if (error) { 179 | ep.emit(PRINT_ERROR_LOGGER, '[egg-kafka-node] consumer close connect fail!'); 180 | ep.emit(PRINT_ERROR_LOGGER, error); 181 | } 182 | }); 183 | }); 184 | }); 185 | }); 186 | 187 | ep.on(PRINT_LOGGER, info => { 188 | console.info(info); 189 | }); 190 | 191 | ep.on(PRINT_ERROR_LOGGER, error => { 192 | console.error(error); 193 | }); 194 | 195 | ep.once(PRODUCER_READY, producer => { 196 | const createMessage = Message(config); 197 | const topic1Message = new Array(100); 198 | const topic2Message = new Array(100); 199 | const topic3Message = new Array(100); 200 | for (let i = 0; i < 100; i++) { 201 | topic1Message[i] = `this is a message ${new Date()} ${Math.random()}`; 202 | } 203 | for (let i = 0; i < 100; i++) { 204 | producer.send([ createMessage('testTopic1', 'Some', topic1Message) ], err => { 205 | if (err) { 206 | console.log(err); 207 | } 208 | }); 209 | } 210 | 211 | for (let i = 0; i < 100; i++) { 212 | topic2Message[i] = `this is a message ${new Date()} ${Math.random()}`; 213 | } 214 | for (let i = 0; i < 100; i++) { 215 | producer.send([ createMessage('testTopic2', 'Every', topic2Message) ], err => { 216 | if (err) { 217 | console.log(err); 218 | } 219 | }); 220 | } 221 | 222 | for (let i = 0; i < 100; i++) { 223 | topic3Message[i] = `this is a message ${new Date()} ${Math.random()}`; 224 | } 225 | for (let i = 0; i < 100; i++) { 226 | producer.send([ createMessage('testTopic3', 'New', topic3Message) ], err => { 227 | if (err) { 228 | console.log(err); 229 | } 230 | }); 231 | } 232 | 233 | }); 234 | 235 | 236 | ClientOp(kafkaHost, clientOption); 237 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | import 'egg'; 2 | declare module 'egg' { 3 | 4 | type UnitMessage = string | Buffer; 5 | 6 | interface MessageType { 7 | topic: string, 8 | key?: string | Buffer, // Specify consumer for the corresponding key under topic 9 | messages: UnitMessage | UnitMessage[] 10 | } 11 | 12 | interface Application { 13 | kafka: { 14 | sendMessage(msg: MessageType): Promise 15 | sendMessageSync(msg: MessageType, scb: (data: any) => void, ecb: (err: any) => void): void 16 | }; 17 | } 18 | 19 | interface Context { 20 | kafka: { 21 | sendMessage(msg: MessageType): Promise 22 | sendMessageSync(msg: MessageType, scb: (data: any) => void, ecb: (err: any) => void): void 23 | }; 24 | } 25 | 26 | } -------------------------------------------------------------------------------- /lib/client.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const kafka = require('kafka-node'); 4 | const defaultOptions = { 5 | kafkaHost: 'localhost:9092', // A string of kafka broker/host combination delimited by comma 6 | connectTimeout: 10000, // in ms it takes to wait for a successful connection before moving to the next host 7 | requestTimeout: 30000, // in ms for a kafka request to timeout 8 | autoConnect: true, // automatically connect when KafkaClient is instantiated otherwise you need to manually call connect 9 | // idleConnection: 5 * 60 * 1000 // allows the broker to disconnect an idle connection from a client 10 | // connectRetryOptions: // object hash that applies to the initial connection 11 | maxAsyncRequests: 10, // maximum async operations at a time toward the kafka cluster 12 | // sslOptions: '', // Object, options to be passed to the tls broker sockets 13 | // sasl: '' // Object, SASL authentication configuration (only SASL/PLAIN is currently supported) 14 | }; 15 | module.exports = function(config) { 16 | const { clientOptions = {}, kafkaHost } = config; 17 | const options = Object.assign({}, defaultOptions, { kafkaHost }, clientOptions); 18 | return new kafka.KafkaClient(options); 19 | }; 20 | -------------------------------------------------------------------------------- /lib/consumer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { 4 | ConsumerGroup, 5 | } = require('kafka-node'); 6 | 7 | const defaultOptions = { 8 | // kafkaHost: 'broker:9092', // connect directly to kafka broker (instantiates a KafkaClient) 9 | // batch: undefined, // put client batch settings if you need them 10 | // ssl: true, // optional (defaults to false) or tls options hash 11 | // groupId: 'ExampleTestGroup', 12 | sessionTimeout: 15000, 13 | // An array of partition assignment protocols ordered by preference. 14 | // 'roundrobin' or 'range' string for built ins (see below to pass in custom assignment protocol) 15 | protocol: [ 'roundrobin' ], 16 | encoding: 'utf8', // default is utf8, use 'buffer' for binary data 17 | autoCommit: false, 18 | // autoCommitIntervalMs: 500, 19 | // Offsets to use for new groups other options could be 'earliest' or 'none' (none will emit an error if no offsets were saved) 20 | // equivalent to Java client's auto.offset.reset 21 | fromOffset: 'latest', // default 22 | commitOffsetsOnFirstJoin: true, // on the very first time this consumer group subscribes to a topic, record the offset returned in fromOffset (latest/earliest) 23 | // how to recover from OutOfRangeOffset error (where save offset is past server retention) accepts same value as fromOffset 24 | outOfRangeOffset: 'earliest', // default 25 | // The max wait time is the maximum amount of time in milliseconds to block waiting if insufficient data is available at the time the request is issued, default 100ms 26 | fetchMaxWaitMs: 100, 27 | // This is the minimum number of bytes of messages that must be available to give a response, default 1 byte 28 | fetchMinBytes: 1, 29 | // The maximum bytes to include in the message set for this partition. This helps bound the size of the response. 30 | fetchMaxBytes: 1024 * 1024, 31 | 32 | migrateHLC: false, // for details please see Migration section below 33 | migrateRolling: true, 34 | // Callback to allow consumers with autoCommit false a chance to commit before a rebalance finishes 35 | // isAlreadyMember will be false on the first connection, and true on rebalances triggered after that 36 | // onRebalance: (isAlreadyMember, callback) => { callback(); } // or null 37 | }; 38 | 39 | const createConsumer = (topics, options = {}) => { 40 | const params = Object.assign({}, defaultOptions, options); 41 | if (!params.kafkaHost) throw new Error('kafkahost cant resovle undefined'); 42 | return new ConsumerGroup(params, topics); 43 | }; 44 | 45 | 46 | const createAllConsumer = config => { 47 | const { 48 | kafkaHost, 49 | consumerOption = [], 50 | } = config; 51 | const consumerMemory = []; 52 | consumerOption.forEach(group => { 53 | const topics = group.topics || []; 54 | const options = group.options || {}; 55 | const consumer = createConsumer(topics, Object.assign({}, options, { 56 | kafkaHost, 57 | groupId: group.groupId, 58 | })); 59 | consumerMemory.push(consumer); 60 | }); 61 | return consumerMemory; 62 | }; 63 | module.exports = createAllConsumer; 64 | -------------------------------------------------------------------------------- /lib/definition.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const CLIENT_READY = 'CLIENT_READY'; 4 | const PRODUCER_READY = 'PRODUCER_READY'; 5 | const SUBSCRIPTION_READY = 'SUBSCRIPTION_READY'; 6 | const CONSUMER_READY = 'CONSUMER_READY'; 7 | const PRODUCER_ERROR = 'PRODUCER_ERROR'; 8 | const TOPICS_READY = 'TOPICS_READY'; 9 | const SUBSCRIBE_CONSUMER_LOADED = 'SUBSCRIBE_CONSUMER_LOADED'; 10 | const CONSUMER_CONNECT = 'CONSUMER_CONNECT'; 11 | const ALL_CONSUMER_CONNECT = 'ALL_CONSUMER_CONNECT'; 12 | const PRINT_LOGGER = 'PRINT_LOGGER'; 13 | const PRINT_ERROR_LOGGER = 'PRINT_ERROR_LOGGER'; 14 | const START_READY = 'START_READY'; 15 | const CLOSE_ALL_CONSUMER = 'CLOSE_ALL_CONSUMER'; 16 | module.exports = { 17 | CLIENT_READY, 18 | PRODUCER_READY, 19 | PRODUCER_ERROR, 20 | CONSUMER_READY, 21 | SUBSCRIPTION_READY, 22 | TOPICS_READY, 23 | SUBSCRIBE_CONSUMER_LOADED, 24 | CONSUMER_CONNECT, 25 | ALL_CONSUMER_CONNECT, 26 | PRINT_LOGGER, 27 | PRINT_ERROR_LOGGER, 28 | START_READY, 29 | CLOSE_ALL_CONSUMER, 30 | }; 31 | -------------------------------------------------------------------------------- /lib/kafka-node.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const path = require('path'); 4 | const Client = require('./client'); 5 | const Consumer = require('./consumer'); 6 | const Producer = require('./producer'); 7 | const Subscription = require('./subscription'); 8 | const createEventsProxy = require('eventsproxy'); 9 | const kafkaLogging = require('kafka-node/logging'); 10 | const { SendMessage, SendMessageSync } = require('./sendMessage'); 11 | const { 12 | CLIENT_READY, 13 | PRODUCER_READY, 14 | CONSUMER_READY, 15 | SUBSCRIPTION_READY, 16 | TOPICS_READY, 17 | CONSUMER_CONNECT, 18 | CLOSE_ALL_CONSUMER, 19 | START_READY, 20 | } = require('./definition'); 21 | 22 | 23 | module.exports = app => { 24 | 25 | const ep = createEventsProxy(); 26 | 27 | const ClientOp = (kafkaHost, clientOption = {}, app) => { 28 | const option = Object.assign({}, { 29 | kafkaHost, 30 | }, clientOption); 31 | const client = Client(option); 32 | ep.emit(CLIENT_READY, client); 33 | app.coreLogger.info(`[egg-kafka-node] client kafkaHost:${kafkaHost} ready!`); 34 | }; 35 | 36 | const ProducerOp = (client, producerOption, app) => { 37 | const producer = Producer(client); 38 | const { 39 | autoCreateTopic, 40 | topics, 41 | } = producerOption; 42 | producer.on('ready', () => { 43 | ep.emit(PRODUCER_READY, producer); 44 | app.coreLogger.info('[egg-kafka-node] producer ready!'); 45 | if (autoCreateTopic && topics) { 46 | producer.createTopics(topics, false, err => { 47 | if (err) { 48 | app.coreLogger.error(`[egg-kafka-node] create topics: ${topics.join(' & ')} error!`); 49 | app.coreLogger.error(err); 50 | return; 51 | } 52 | ep.emit(TOPICS_READY, topics); 53 | app.coreLogger.info(`[egg-kafka-node] create topics: ${topics.join(' & ')} success!`); 54 | }); 55 | } 56 | }); 57 | 58 | producer.on('error', error => { 59 | app.coreLogger.error('[egg-kafka-node] producer connect error!'); 60 | app.coreLogger.error(error); 61 | }); 62 | }; 63 | 64 | const SubscriptionOp = (baseReadDir, consumerOption, app) => { 65 | const topicSubscription = Subscription(baseReadDir, consumerOption); 66 | ep.emit(SUBSCRIPTION_READY, topicSubscription); 67 | app.coreLogger.info('[egg-kafka-node] load consumer subscription success'); 68 | }; 69 | 70 | const ConsumerOp = (config, topicSubscription, app) => { 71 | const consumerMemory = Consumer(config); 72 | const start = Date.now(); 73 | const unregister = ep.wait(CONSUMER_CONNECT, () => { 74 | app.logger.info(`[egg-kafka-node] all consumer connected, use time ${((Date.now() - start) / 1000).toFixed(3)} s!`); 75 | unregister && unregister(); 76 | }, consumerMemory.length); 77 | // 不等待连接 并行后续任务 78 | ep.emit(CONSUMER_READY, consumerMemory); 79 | 80 | consumerMemory.forEach(consumer => { 81 | 82 | consumer.on('connect', () => { 83 | const { options = {} } = consumer; 84 | const { kafkaHost, groupId } = options; 85 | app.logger.info(`[egg-kafka-node] kafkaHost: ${kafkaHost} groupId: ${groupId} consumer connect success!`); 86 | ep.emit(CONSUMER_CONNECT); 87 | }); 88 | 89 | consumer.on('message', message => { 90 | const { 91 | topic, 92 | key = '', 93 | value, 94 | } = message; 95 | consumer.pause(); 96 | let Subscriber = topicSubscription.get(`${topic}:${key || ''}`); 97 | if (!Subscriber) Subscriber = topicSubscription.get(`${topic}:`); 98 | if (!Subscriber) return consumer.resume(); 99 | const ctx = app.createAnonymousContext(); 100 | const subscriber = new Subscriber(ctx); 101 | subscriber 102 | .subscribe(message) 103 | .then(() => { 104 | consumer.commit(err => { 105 | consumer.resume(); 106 | if (err) { 107 | app.logger.error(`[egg-kafka-node] commit topic: ${topic} key: ${key} value: ${value} offset error!`); 108 | app.logger.error(err); 109 | return; 110 | } 111 | }); 112 | }) 113 | .catch(err => { 114 | consumer.resume(); 115 | app.logger.error(`[egg-kafka-node] consumer consume topic: ${topic} key: ${key} value: ${value} fail!`); 116 | app.logger.error(err); 117 | }); 118 | }); 119 | 120 | consumer.on('error', error => { 121 | const { options = {} } = consumer; 122 | const { kafkaHost, groupId } = options; 123 | app.logger.error(`[egg-kafka-node] kafkaHost: ${kafkaHost} groupId: ${groupId} consumer connect error!`); 124 | app.logger.error(error); 125 | }); 126 | }); 127 | }; 128 | 129 | const logger = app.getLogger('kafkaLogger'); 130 | kafkaLogging.setLoggerProvider(logger); 131 | 132 | const config = app.config.kafkaNode; 133 | 134 | const { 135 | kafkaHost, 136 | producerOption = {}, 137 | clientOption = {}, 138 | consumerOption = [], 139 | baseConsumersDir = './app/kafka', 140 | } = config; 141 | 142 | const baseReadDir = path.join(app.config.baseDir, baseConsumersDir); 143 | 144 | ep.once(CLIENT_READY, client => { 145 | ProducerOp(client, producerOption, app); 146 | SubscriptionOp(baseReadDir, consumerOption, app); 147 | }); 148 | 149 | ep.once([ PRODUCER_READY, TOPICS_READY, SUBSCRIPTION_READY, START_READY ], (producer, topics, topicSubscription) => { 150 | if (app.type === 'agent') return; // agent 进程不处理消费者 151 | ConsumerOp(config, topicSubscription, app); 152 | }); 153 | 154 | ep.once(CONSUMER_READY, consumerMemory => { 155 | ep.once(CLOSE_ALL_CONSUMER, () => { 156 | consumerMemory.forEach(consumer => { 157 | const { options: { kafkaHost, groupId } } = consumer; 158 | consumer.close(true, error => { 159 | if (error) { 160 | app.logger.error(`[egg-kafka-node] kafkaHost: ${kafkaHost} groupId: ${groupId} consumer close connect fail!`); 161 | app.logger.error(error); 162 | return; 163 | } 164 | app.logger.info(`[egg-kafka-node] kafkaHost: ${kafkaHost} groupId: ${groupId} consumer connect already closed!`); 165 | }); 166 | }); 167 | }); 168 | }); 169 | 170 | ep.once(PRODUCER_READY, producer => { 171 | app.kafka = { 172 | sendMessage(data = {}) { 173 | return SendMessage(data, producer, config); 174 | }, 175 | sendMessageSync(data = {}, scb, ecb) { 176 | SendMessageSync(data, producer, config, scb, ecb); 177 | }, 178 | }; 179 | }); 180 | 181 | ClientOp(kafkaHost, clientOption, app); 182 | 183 | app.beforeClose(() => { 184 | ep.emit(CLOSE_ALL_CONSUMER); 185 | }); 186 | 187 | app.beforeStart(async () => { 188 | app.coreLogger.info('[egg-kafka-node] started!'); 189 | }); 190 | 191 | app.ready(() => { 192 | app.coreLogger.info('[egg-kafka-node] ready!'); 193 | ep.emit(START_READY); 194 | }); 195 | 196 | }; 197 | -------------------------------------------------------------------------------- /lib/message.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = config => { 4 | const { attributes = 0 } = config.producerOption; 5 | return function(topic, key, messages, timestamp) { 6 | return { 7 | topic, 8 | messages, // multi messages should be a array, single message can be just a string or a KeyedMessage instance 9 | key, // only needed when using keyed partitioner 10 | // partition: app.config.kafkaNode.pub.partition, // default 0 11 | attributes, // default: 0 12 | timestamp: timestamp || Date.now(), // <-- defaults to Date.now() (only available with kafka v0.10 and KafkaClient only) 13 | }; 14 | }; 15 | }; 16 | -------------------------------------------------------------------------------- /lib/producer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { HighLevelProducer } = require('kafka-node'); 4 | 5 | const defaultOptions = { 6 | requireAcks: 1, 7 | ackTimeoutMs: 5000, 8 | partitionerType: 2, 9 | }; 10 | const createProducer = (client, options = {}) => { 11 | const params = Object.assign({}, defaultOptions, options); 12 | const producer = new HighLevelProducer(client, params); 13 | return producer; 14 | }; 15 | module.exports = createProducer; 16 | -------------------------------------------------------------------------------- /lib/sendMessage.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Message = require('./message'); 4 | 5 | 6 | const isString = str => typeof str === 'string'; 7 | const isBuffer = buf => Buffer.isBuffer(buf); 8 | const isArray = arr => Array.isArray(arr); 9 | 10 | const checkMessage = (messages, fetchMaxBytes = 1024 * 1024) => { 11 | let len = 0; 12 | if (isArray(messages)) { 13 | for (let i = 0, ii = messages.length; i < ii; i++) { 14 | const m = messages[i]; 15 | if (isBuffer(m)) { 16 | len += m.length; 17 | } else if (isString(m)) { 18 | len += Buffer.byteLength(m); 19 | } else { 20 | return new Error('valid format messages'); 21 | } 22 | } 23 | } else if (isString(messages)) { 24 | len += Buffer.byteLength(messages); 25 | } else if (isBuffer(messages)) { 26 | len += messages.length; 27 | } else { 28 | return new Error('valid format messages'); 29 | } 30 | 31 | if (len > fetchMaxBytes) { 32 | return new Error(`messages max bytes is ${fetchMaxBytes}`); 33 | } 34 | return null; 35 | 36 | }; 37 | 38 | const searchFetchMaxBytes = (config, topic) => { 39 | const { consumerOption = [] } = config; 40 | let fetchMaxBytes = 1024 * 1024; 41 | for (let i = 0, ii = consumerOption.length; i < ii; i++) { 42 | const group = consumerOption[i]; 43 | const { topics = [], options = {} } = group; 44 | if (topics.some(v => v === topic) && options.fetchMaxBytes) { 45 | fetchMaxBytes = options.fetchMaxBytes; 46 | break; 47 | } 48 | } 49 | return fetchMaxBytes; 50 | }; 51 | 52 | 53 | exports.SendMessage = (data = {}, producer, config) => { 54 | const { topic, key, messages } = data; 55 | const createMessage = Message(config); 56 | const fetchMaxBytes = searchFetchMaxBytes(config, topic); 57 | return new Promise((resolve, reject) => { 58 | const error = checkMessage(messages, fetchMaxBytes); 59 | if (error) return reject(error); 60 | producer.send([ createMessage(topic, key, [].concat(messages)) ], (err, data) => { 61 | if (err) return reject(err); 62 | return resolve(data); 63 | }); 64 | }); 65 | }; 66 | 67 | exports.SendMessageSync = (data = {}, producer, config, scb, ecb) => { 68 | const { topic, key, messages } = data; 69 | const createMessage = Message(config); 70 | const fetchMaxBytes = searchFetchMaxBytes(config, topic); 71 | const error = checkMessage(messages, fetchMaxBytes); 72 | if (error) return ecb && ecb(error); 73 | producer.send([ createMessage(topic, key, [].concat(messages)) ], (err, data) => { 74 | if (err) return ecb && ecb(err); 75 | scb && scb(data); 76 | }); 77 | }; 78 | -------------------------------------------------------------------------------- /lib/subscription.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const createEventsProxy = require('eventsproxy'); 4 | const path = require('path'); 5 | const fs = require('fs'); 6 | const utils = require('./utils'); 7 | 8 | const { 9 | SUBSCRIBE_CONSUMER_LOADED, 10 | } = require('./definition'); 11 | 12 | const ep = createEventsProxy(); 13 | 14 | const readAllConsumers = (baseReadDir, consumerOption = []) => { 15 | 16 | consumerOption.forEach(group => { 17 | const { topics = [] } = group; 18 | topics.forEach(topic => { 19 | const targetTopic = path.join(baseReadDir, topic); 20 | const files = fs.readdirSync(targetTopic); 21 | files.forEach(file => { 22 | const targetConsumer = path.join(targetTopic, file); 23 | const stat = fs.statSync(targetConsumer); 24 | const m = file.match(/^(.*)Consumer.[tj]s$/); 25 | if (!stat.isFile() || !m) return; 26 | const consumer = utils.requireFile(targetConsumer); 27 | if (!consumer) return; 28 | ep.emit(SUBSCRIBE_CONSUMER_LOADED, { 29 | key: `${topic}:${m[1] || ''}`, 30 | consumer: consumer.default || consumer, 31 | }); 32 | }); 33 | 34 | // tip: fileDirent support Node 10.10.0 35 | // const fileDirents = fs.readdirSync(targetTopic, { 36 | // withFileTypes: true, 37 | // }); 38 | // fileDirents.forEach(dirent => { 39 | // if (dirent.isFile()) { 40 | // const fname = dirent.name; 41 | // const m = fname.match(/^(.+)Consumer.js$/); 42 | // const targetConsumer = path.join(targetTopic, fname); 43 | // if (m) { 44 | // ep.emit(SUBSCRIBE_CONSUMER_LOADED, { 45 | // key: `${topic}:${m[1]}`, 46 | // consumer: require(targetConsumer), 47 | // }); 48 | // } 49 | // } 50 | // }); 51 | 52 | }); 53 | }); 54 | }; 55 | 56 | 57 | const createSubscription = (baseReadDir, consumerOption = []) => { 58 | 59 | const topicSubscription = new Map(); 60 | 61 | ep.register(SUBSCRIBE_CONSUMER_LOADED, data => { 62 | topicSubscription.set(data.key, data.consumer); 63 | }); 64 | 65 | readAllConsumers(baseReadDir, consumerOption); 66 | return topicSubscription; 67 | }; 68 | 69 | module.exports = createSubscription; 70 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const isString = data => typeof data === 'string'; 4 | 5 | const isTsFile = filepath => filepath.endsWith('.ts'); 6 | const isJsFile = filepath => filepath.endsWith('.js'); 7 | const requireTsFile = filepath => { 8 | if (!filepath || !isString(filepath)) return; 9 | require('ts-node/register'); 10 | return require(filepath); 11 | }; 12 | 13 | const requireFile = filepath => { 14 | if (!filepath || !isString(filepath)) return; 15 | if (isJsFile(filepath)) { 16 | return require(filepath); 17 | } 18 | if (isTsFile(filepath)) { 19 | return requireTsFile(filepath); 20 | } 21 | return; 22 | }; 23 | 24 | 25 | module.exports = { 26 | isString, 27 | requireFile, 28 | }; 29 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "egg-kafka-node", 3 | "version": "1.1.6", 4 | "description": "kafka plugin for egg.js", 5 | "eggPlugin": { 6 | "name": "kafkaNode" 7 | }, 8 | "keywords": [ 9 | "egg", 10 | "eggPlugin", 11 | "egg-plugin", 12 | "kafka-node", 13 | "kafka-plugin", 14 | "kafka-node-plugin", 15 | "egg-kafka-node" 16 | ], 17 | "dependencies": { 18 | "eventsproxy": "^1.3.4", 19 | "kafka-node": "^4.0.0" 20 | }, 21 | "devDependencies": { 22 | "autod": "^3.0.0", 23 | "autod-egg": "^1.0.0", 24 | "egg": "^2.0.0", 25 | "egg-bin": "^4.3.0", 26 | "egg-ci": "^1.8.0", 27 | "egg-mock": "^3.13.0", 28 | "eslint": "^4.11.0", 29 | "eslint-config-egg": "^5.1.0", 30 | "ts-node": "^8.9.1", 31 | "typescript": "^3.8.3", 32 | "webstorm-disable-index": "^1.2.0" 33 | }, 34 | "engines": { 35 | "node": ">=8.0.0" 36 | }, 37 | "scripts": { 38 | "test": "npm run lint -- --fix && egg-bin pkgfiles && npm run test-local", 39 | "test-local": "egg-bin test", 40 | "cov": "egg-bin cov", 41 | "lint": "eslint .", 42 | "ci": "egg-bin pkgfiles --check && npm run lint && npm run cov", 43 | "pkgfiles": "egg-bin pkgfiles", 44 | "autod": "autod" 45 | }, 46 | "files": [ 47 | "app", 48 | "config", 49 | "agent.js", 50 | "lib", 51 | "app.js", 52 | "demo.js", 53 | "index.d.ts" 54 | ], 55 | "ci": { 56 | "version": "8, 9" 57 | }, 58 | "repository": { 59 | "type": "git", 60 | "url": "git+https://github.com/JohnApache/egg-kafka-node.git" 61 | }, 62 | "bugs": { 63 | "url": "https://github.com/JohnApache/egg-kafka-node/issues" 64 | }, 65 | "homepage": "https://github.com/JohnApache/egg-kafka-node#readme", 66 | "author": "JohnApache", 67 | "license": "MIT" 68 | } 69 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/controller/home.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Controller = require('egg').Controller; 4 | 5 | class HomeController extends Controller { 6 | async index() { 7 | this.ctx.body = 'hi, ' + this.app.plugins.kafkaNode.name; 8 | } 9 | } 10 | 11 | module.exports = HomeController; 12 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/controller/kafka.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Controller = require('egg').Controller; 4 | class KafkaController extends Controller { 5 | async index() { 6 | const { ctx } = this; 7 | ctx.body = 'test kafka'; 8 | } 9 | } 10 | 11 | module.exports = KafkaController; 12 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/extend/context.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | get kafka() { 5 | return this.app.kafka; 6 | }, 7 | }; 8 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/kafka/testTopic1/Consumer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // const Subscription = require('egg').Subscription; 4 | 5 | const asyncTask = (topic, key, value) => { 6 | return new Promise(resolve => { 7 | resolve(`[Consumer] test message: ${topic} : ${key} : ${value} : ${new Date()}`); 8 | }); 9 | }; 10 | 11 | class CommonSubscription { 12 | async subscribe(message = {}) { 13 | const { topic, key, value } = message; 14 | const msg = await asyncTask(topic, key, value); 15 | console.log(msg); 16 | } 17 | } 18 | 19 | module.exports = CommonSubscription; 20 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/kafka/testTopic1/Some2Consumer.ts: -------------------------------------------------------------------------------- 1 | import {Context} from 'egg' 2 | interface IMessage { 3 | topic: string; 4 | key: string; 5 | value: any; 6 | } 7 | 8 | const asyncTsTask = (topic: string, key: string, value: any) => { 9 | const aaaa:number = 111; 10 | return new Promise((resolve: any) => { 11 | resolve(`[Some2Consumer] test message:${aaaa} ${topic} : ${key} : ${value} : ${new Date()}`); 12 | }); 13 | }; 14 | 15 | class Some2Subscription { 16 | constructor(public ctx: Context) {} 17 | async subscribe(message: Partial = {}) { 18 | const { topic = '', key = '', value = '' } = message || {}; 19 | const msg = await asyncTsTask(topic, key, value); 20 | console.log(msg); 21 | const text = await this.ctx.service.home.find(); 22 | console.log(text); 23 | } 24 | } 25 | 26 | export default Some2Subscription; -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/kafka/testTopic1/SomeConsumer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // const Subscription = require('egg').Subscription; 4 | 5 | const asyncTask = (topic, key, value) => { 6 | return new Promise(resolve => { 7 | resolve(`[SomeConsumer] test message: ${topic} : ${key} : ${value} : ${new Date()}`); 8 | }); 9 | }; 10 | 11 | class SomeSubscription { 12 | async subscribe(message = {}) { 13 | const { topic, key, value } = message; 14 | const msg = await asyncTask(topic, key, value); 15 | console.log(msg); 16 | } 17 | } 18 | 19 | module.exports = SomeSubscription; 20 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/kafka/testTopic2/Consumer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // const Subscription = require('egg').Subscription; 4 | 5 | const asyncTask = (topic, key, value) => { 6 | return new Promise(resolve => { 7 | resolve(`[Common Subscription] test message: ${topic} : ${key} : ${value} : ${new Date()}`); 8 | }); 9 | }; 10 | 11 | class CommonSubscription { 12 | async subscribe(message = {}) { 13 | const { topic, key, value } = message; 14 | const msg = await asyncTask(topic, key, value); 15 | console.log(msg); 16 | } 17 | } 18 | 19 | module.exports = CommonSubscription; 20 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/kafka/testTopic2/EveryConsumer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // const Subscription = require('egg').Subscription; 4 | 5 | const asyncTask = (topic, key, value) => { 6 | return new Promise(resolve => { 7 | resolve(`[EverySubscription] test message: ${topic} : ${key} : ${value} : ${new Date()}`); 8 | }); 9 | }; 10 | 11 | class EverySubscription { 12 | async subscribe(message = {}) { 13 | const { topic, key, value } = message; 14 | const msg = await asyncTask(topic, key, value); 15 | console.log(msg); 16 | } 17 | } 18 | 19 | module.exports = EverySubscription; 20 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/kafka/testTopic3/NewConsumer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // const Subscription = require('egg').Subscription; 4 | 5 | const asyncTask = (topic, key, value) => { 6 | return new Promise(resolve => { 7 | resolve(`[NewSubscription] test message: ${topic} : ${key} : ${value} : ${new Date()}`); 8 | }); 9 | }; 10 | 11 | class NewSubscription { 12 | async subscribe(message = {}) { 13 | const { topic, key, value } = message; 14 | const msg = await asyncTask(topic, key, value); 15 | console.log(msg); 16 | } 17 | } 18 | 19 | module.exports = NewSubscription; 20 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/router.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = app => { 4 | const { router, controller } = app; 5 | 6 | router.get('/', controller.home.index); 7 | }; 8 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/app/service/home.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Service = require('egg').Service; 4 | 5 | class HomeService extends Service { 6 | async find() { 7 | const text = await new Promise(resolve => { 8 | setTimeout(() => { 9 | resolve('Hello World'); 10 | }); 11 | }); 12 | return text; 13 | } 14 | } 15 | 16 | module.exports = HomeService; 17 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/config/config.default.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | exports.keys = '123456'; 4 | 5 | exports.kafkaNode = { 6 | kafkaHost: '127.0.0.1:9092', 7 | clientOption: {}, 8 | consumerOption: [{ 9 | groupId: 'group1', 10 | topics: [ 'testTopic1' ], 11 | options: { 12 | fetchMaxWaitMs: 100, 13 | fetchMinBytes: 1, 14 | fetchMaxBytes: 1024 * 1024, 15 | }, // 每个消费组对应的相关 consumerGroup 配置 16 | }, { 17 | groupId: 'group2', 18 | topics: [ 'testTopic2', 'testTopic3' ], 19 | options: {}, 20 | }, { 21 | groupId: 'group3', 22 | topics: [ 'testTopic3' ], 23 | }], 24 | producerOption: { 25 | requireAcks: 1, 26 | ackTimeoutMs: 100, 27 | partitionerType: 2, 28 | autoCreateTopic: true, 29 | topics: [ 'testTopic1', 'testTopic2', 'testTopic3' ], 30 | }, 31 | messageOption: { 32 | partition: 0, 33 | attributes: 0, 34 | }, 35 | baseConsumersDir: './app/kafka', // 支持设置消费者文件的根目录 36 | }; 37 | -------------------------------------------------------------------------------- /test/fixtures/apps/kafka-node-test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafka-node-test", 3 | "version": "0.0.1" 4 | } -------------------------------------------------------------------------------- /test/kafka-node.test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const mock = require('egg-mock'); 4 | const assert = require('assert'); 5 | 6 | describe('test/kafka-node.test.js', () => { 7 | let app; 8 | before(() => { 9 | app = mock.app({ 10 | baseDir: 'apps/kafka-node-test', 11 | }); 12 | return app.ready(); 13 | }); 14 | after(() => { 15 | app.close(); 16 | }); 17 | afterEach(mock.restore); 18 | 19 | it('app.kafka is exist and app.kafka.sendMessage is a method', () => { 20 | const kafka = app.kafka; 21 | assert(Object.prototype.toString.call(kafka) === '[object Object]'); 22 | assert(Object.prototype.toString.call(kafka.sendMessage) === '[object Function]'); 23 | }); 24 | 25 | it('ctx.kafka is exist and same to app.kafka', () => { 26 | const ctx = app.mockContext(); 27 | const kafka = ctx.kafka; 28 | assert(Object.prototype.toString.call(kafka) === '[object Object]'); 29 | assert(Object.prototype.toString.call(kafka.sendMessage) === '[object Function]'); 30 | }); 31 | 32 | it('app.kafka.sendMessage can publish a message to kafka', async () => { 33 | const kafka = app.kafka; 34 | try { 35 | await kafka.sendMessage({ 36 | topic: 'testTopic1', 37 | key: 'Some', 38 | messages: `this is a message ${new Date()} ${Math.random()}`, 39 | }); 40 | assert(true); 41 | } catch (error) { 42 | assert(false); 43 | } 44 | }); 45 | 46 | it('app.kafka.sendMessage can publish a buffer message ', async () => { 47 | const kafka = app.kafka; 48 | try { 49 | await kafka.sendMessage({ 50 | topic: 'testTopic1', 51 | key: Buffer.from('Some'), 52 | messages: Buffer.from(`this is a message ${new Date()} ${Math.random()}`), 53 | }); 54 | assert(true); 55 | } catch (error) { 56 | assert(false); 57 | } 58 | }); 59 | 60 | it('app.kafka.sendMessageSync also can publish a message to kafka by a sync way', () => { 61 | const kafka = app.kafka; 62 | kafka.sendMessageSync({ 63 | topic: 'testTopic1', 64 | key: 'Some', 65 | messages: `this is a message ${new Date()} ${Math.random()}`, 66 | }, () => { 67 | assert(true); 68 | }, () => { 69 | assert(false); 70 | }); 71 | 72 | }); 73 | 74 | it('app.kafka.sendMessage published message can consume by corresponding topics', async () => { 75 | const kafka = app.kafka; 76 | for (let i = 0; i < 10; i++) { 77 | await kafka.sendMessage({ 78 | topic: 'testTopic1', 79 | key: 'Some', 80 | messages: `this is a message ${new Date()} ${Math.random()}`, 81 | }); 82 | } 83 | 84 | for (let i = 0; i < 10; i++) { 85 | await kafka.sendMessage({ 86 | topic: 'testTopic2', 87 | key: 'Every', 88 | messages: `this is a message ${new Date()} ${Math.random()}`, 89 | }); 90 | } 91 | 92 | 93 | for (let i = 0; i < 10; i++) { 94 | await kafka.sendMessageSync({ 95 | topic: 'testTopic3', 96 | key: 'New', 97 | messages: `this is a message ${new Date()} ${Math.random()}`, 98 | }); 99 | } 100 | 101 | assert(true); 102 | }); 103 | 104 | it('support consume by ts files', done => { 105 | const kafka = app.kafka; 106 | kafka.sendMessageSync({ 107 | topic: 'testTopic1', 108 | key: 'Some2', 109 | messages: `this is a message ${new Date()} ${Math.random()}`, 110 | }, () => { 111 | setTimeout(() => { 112 | assert(true); 113 | done(); 114 | }, 5000); 115 | }, () => { 116 | assert(false); 117 | }); 118 | 119 | }); 120 | 121 | it('support consume message without key params', done => { 122 | const kafka = app.kafka; 123 | kafka.sendMessageSync({ 124 | topic: 'testTopic1', 125 | messages: `this is a message ${new Date()} ${Math.random()}`, 126 | }, () => { 127 | setTimeout(() => { 128 | assert(true); 129 | done(); 130 | }, 5000); 131 | }, () => { 132 | assert(false); 133 | }); 134 | }); 135 | 136 | it('support consume message without any key params', done => { 137 | const kafka = app.kafka; 138 | kafka.sendMessageSync({ 139 | key: `${Math.random()}`, 140 | topic: 'testTopic2', 141 | messages: `this is a message ${new Date()} ${Math.random()}`, 142 | }, () => { 143 | setTimeout(() => { 144 | assert(true); 145 | done(); 146 | }, 5000); 147 | }, () => { 148 | assert(false); 149 | }); 150 | }); 151 | }); 152 | 153 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Basic Options */ 4 | // "incremental": true, /* Enable incremental compilation */ 5 | "target": "ES2015", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */ 6 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ 7 | // "lib": [], /* Specify library files to be included in the compilation. */ 8 | // "allowJs": true, /* Allow javascript files to be compiled. */ 9 | // "checkJs": true, /* Report errors in .js files. */ 10 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 11 | // "declaration": true, /* Generates corresponding '.d.ts' file. */ 12 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 13 | // "sourceMap": true, /* Generates corresponding '.map' file. */ 14 | // "outFile": "./", /* Concatenate and emit output to single file. */ 15 | // "outDir": "./", /* Redirect output structure to the directory. */ 16 | // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 17 | // "composite": true, /* Enable project compilation */ 18 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 19 | // "removeComments": true, /* Do not emit comments to output. */ 20 | // "noEmit": true, /* Do not emit outputs. */ 21 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 22 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 23 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 24 | 25 | /* Strict Type-Checking Options */ 26 | "strict": true, /* Enable all strict type-checking options. */ 27 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 28 | // "strictNullChecks": true, /* Enable strict null checks. */ 29 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 30 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 31 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 32 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 33 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 34 | 35 | /* Additional Checks */ 36 | // "noUnusedLocals": true, /* Report errors on unused locals. */ 37 | // "noUnusedParameters": true, /* Report errors on unused parameters. */ 38 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 39 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 40 | 41 | /* Module Resolution Options */ 42 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 43 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 44 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 45 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 46 | // "typeRoots": [], /* List of folders to include type definitions from. */ 47 | // "types": [], /* Type declaration files to be included in compilation. */ 48 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 49 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 50 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 51 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 52 | 53 | /* Source Map Options */ 54 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 55 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 56 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 57 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 58 | 59 | /* Experimental Options */ 60 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 61 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 62 | 63 | /* Advanced Options */ 64 | "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ 65 | }, 66 | "include": [ 67 | "test/**/*.ts" 68 | ] 69 | } 70 | --------------------------------------------------------------------------------