├── .babelrc ├── .eslintrc.js ├── .github └── workflows │ ├── codeql-analysis.yml │ ├── deploy.yml │ ├── lint.yml │ └── test.yml ├── .gitignore ├── .npmignore ├── .prettierrc ├── LICENSE.md ├── README.md ├── kafka ├── README.md └── docker-compose.yml ├── package-lock.json ├── package.json ├── src ├── constants.ts ├── deserializer │ ├── avro-response.deserializer.ts │ └── kafka-response.deserializer.ts ├── index.ts ├── interfaces.ts ├── kafka-module-options.provider.ts ├── kafka.decorator.ts ├── kafka.module.ts ├── kafka.service.ts └── serializer │ ├── avro-request.serializer.ts │ └── kafka-request.serializer.ts ├── test ├── app-async.e2e-spec.ts ├── app-sync.e2e-spec.ts ├── constants.ts ├── e2e │ └── app │ │ ├── config.app.async.ts │ │ ├── config.app.sync.ts │ │ ├── config.app.ts │ │ ├── key-schema.avsc │ │ ├── test-config.module.ts │ │ ├── test-config.service.ts │ │ ├── test.controller.ts │ │ └── value-schema.avsc ├── jest-e2e.json └── utils.ts ├── tsconfig.build.json └── tsconfig.json /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | [ 4 | "@babel/preset-env", { 5 | "targets": { 6 | "node": "current" 7 | }, 8 | "debug": true 9 | }, 10 | "@babel/preset-typescript" 11 | ] 12 | ], 13 | "plugins": [ 14 | ["@babel/plugin-proposal-decorators", { "legacy": true }], 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: '@typescript-eslint/parser', 3 | parserOptions: { 4 | project: 'tsconfig.json', 5 | sourceType: 'module', 6 | }, 7 | plugins: ['@typescript-eslint/eslint-plugin'], 8 | extends: [ 9 | 'plugin:@typescript-eslint/eslint-recommended', 10 | 'plugin:@typescript-eslint/recommended', 11 | 'prettier', 12 | 'prettier/@typescript-eslint', 13 | ], 14 | root: true, 15 | env: { 16 | node: true, 17 | jest: true, 18 | }, 19 | rules: { 20 | '@typescript-eslint/interface-name-prefix': 'off', 21 | '@typescript-eslint/explicit-function-return-type': 'off', 22 | '@typescript-eslint/explicit-module-boundary-types': 'off', 23 | '@typescript-eslint/no-explicit-any': 'off', 24 | }, 25 | }; 26 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | name: "CodeQL" 7 | 8 | on: 9 | push: 10 | branches: [master] 11 | pull_request: 12 | # The branches below must be a subset of the branches above 13 | branches: [master] 14 | schedule: 15 | - cron: '0 8 * * 1' 16 | 17 | jobs: 18 | analyze: 19 | name: Analyze 20 | runs-on: ubuntu-latest 21 | 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | # Override automatic language detection by changing the below list 26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] 27 | language: ['javascript'] 28 | # Learn more... 29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v2 34 | with: 35 | # We must fetch at least the immediate parents so that if this is 36 | # a pull request then we can checkout the head. 37 | fetch-depth: 2 38 | 39 | # If this run was triggered by a pull request event, then checkout 40 | # the head of the pull request instead of the merge commit. 41 | - run: git checkout HEAD^2 42 | if: ${{ github.event_name == 'pull_request' }} 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v1 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v1 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v1 72 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Create npm package 2 | on: 3 | release: 4 | types: [created] 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | # Setup .npmrc file to publish to npm 11 | - uses: actions/setup-node@v1 12 | with: 13 | node-version: '12.x' 14 | registry-url: 'https://registry.npmjs.org' 15 | - run: npm ci 16 | - run: npm publish --access public 17 | env: 18 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 19 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | lint: 11 | runs-on: ubuntu-latest 12 | 13 | strategy: 14 | matrix: 15 | node-version: [12.x] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Use Node.js ${{ matrix.node-version }} 20 | uses: actions/setup-node@v1 21 | with: 22 | node-version: ${{ matrix.node-version }} 23 | - run: npm ci 24 | - run: npm run lint 25 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Node.js CI 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | 9 | strategy: 10 | matrix: 11 | node-version: [12.x] 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Use Node.js ${{ matrix.node-version }} 16 | uses: actions/setup-node@v1 17 | with: 18 | node-version: ${{ matrix.node-version }} 19 | - run: npm ci 20 | - name: Start Docker containers for Zookeeper, Kafka and Schema Registry 21 | run: npm run kafka:start 22 | - run: sleep 120 23 | - run: npm run test:e2e 24 | env: 25 | CI: true 26 | - run: npm run kafka:stop 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist/ 3 | .vscode -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | src/ 2 | tsconfig.json 3 | .github/ 4 | .gitignore 5 | .prettierrc 6 | kafka/ 7 | .babelrc 8 | test/ 9 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "trailingComma": "all" 4 | } 5 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to 25 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | Nest Logo 4 | 5 | 6 | KafkaJS Logo 7 | 8 |

9 | 10 | # NestJS + KafkaJS 11 | 12 | Integration of KafkaJS with NestJS to build event driven microservices. 13 | 14 | 15 | ## Setup 16 | 17 | Import and add the `KafkaModule` to the imports array of the module for which you would like to use Kafka. 18 | 19 | ### Synchronous Module Initialization 20 | 21 | Register the `KafkaModule` synchronous with the `register()` method: 22 | 23 | ```javascript 24 | @Module({ 25 | imports: [ 26 | KafkaModule.register([ 27 | { 28 | name: 'HERO_SERVICE', 29 | options: { 30 | client: { 31 | clientId: 'hero', 32 | brokers: ['localhost:9092'], 33 | }, 34 | consumer: { 35 | groupId: 'hero-consumer' 36 | } 37 | } 38 | }, 39 | ]), 40 | ] 41 | ... 42 | }) 43 | 44 | ``` 45 | 46 | ### Asynchronous Module Initialization 47 | 48 | Register the `KafkaModule` asynchronous with the `registerAsync()` method: 49 | 50 | ```javascript 51 | import { ConfigModule, ConfigService } from '@nestjs/config'; 52 | 53 | @Module({ 54 | imports: [ 55 | ConfigModule.forRoot(), 56 | KafkaModule.registerAsync(['HERO_SERVICE'], { 57 | useFactory: async (configService: ConfigService) => { 58 | const broker = this.configService.get('broker'); 59 | return [ 60 | { 61 | name: 'HERO_SERVICE', 62 | options: { 63 | clientId: 'hero', 64 | brokers: [broker], 65 | }, 66 | consumer: { 67 | groupId: 'hero-consumer' 68 | } 69 | } 70 | } 71 | ]; 72 | }, 73 | inject: [ConfigService] 74 | }) 75 | ] 76 | ... 77 | }) 78 | 79 | ``` 80 | 81 | Full settings can be found: 82 | 83 | | Config | Options | 84 | | ------ | ------- | 85 | | client | https://kafka.js.org/docs/configuration | 86 | | consumer | https://kafka.js.org/docs/consuming#options | 87 | | producer | https://kafka.js.org/docs/producing#options | 88 | | serializer | | 89 | | deserializer | | 90 | | consumeFromBeginning | true/false | 91 | | | | 92 | 93 | 94 | 95 | ### Subscribing 96 | 97 | Subscribing to a topic to accept messages. 98 | 99 | ```javascript 100 | export class Consumer { 101 | constructor( 102 | @Inject('HERO_SERVICE') private client: KafkaService 103 | ) {} 104 | 105 | onModuleInit(): void { 106 | this.client.subscribeToResponseOf('hero.kill.dragon', this) 107 | } 108 | 109 | @SubscribeTo('hero.kill.dragon') 110 | async getWorld(data: any, key: any, offset: number, timestamp: number, partition: number, headers: IHeaders): Promise { 111 | ... 112 | } 113 | 114 | } 115 | 116 | ``` 117 | 118 | ### Producing 119 | 120 | Send messages back to kafka. 121 | 122 | ```javascript 123 | const TOPIC_NAME = 'hero.kill.dragon'; 124 | 125 | export class Producer { 126 | constructor( 127 | @Inject('HERO_SERVICE') private client: KafkaService 128 | ) {} 129 | 130 | async post(message: string = 'Hello world'): Promise { 131 | const result = await this.client.send({ 132 | topic: TOPIC_NAME, 133 | messages: [ 134 | { 135 | key: '1', 136 | value: message 137 | } 138 | ] 139 | }); 140 | 141 | return result; 142 | } 143 | 144 | } 145 | 146 | ``` 147 | 148 | ### Schema Registry support. 149 | 150 | By default messages are converted to JSON objects were possible. If you're using 151 | AVRO you can add the `SchemaRegistry` deserializer to convert the messages. This uses the [KafkaJS Schema-registry module](https://github.com/kafkajs/confluent-schema-registry) 152 | 153 | In your `module.ts`: 154 | 155 | ```javascript 156 | 157 | @Module({ 158 | imports: [ 159 | KafkaModule.register([ 160 | { 161 | name: 'HERO_SERVICE', 162 | options: { 163 | client: { 164 | clientId: 'hero', 165 | brokers: ['localhost:9092'], 166 | }, 167 | consumer: { 168 | groupId: 'hero-consumer' 169 | } 170 | }, 171 | deserializer: new KafkaAvroResponseDeserializer({ 172 | host: 'http://localhost:8081' 173 | }), 174 | serializer: new KafkaAvroRequestSerializer({ 175 | config: { 176 | host: 'http://localhost:8081/' 177 | }, 178 | schemas: [ 179 | { 180 | topic: 'test.topic', 181 | key: join(__dirname, 'key-schema.avsc'), 182 | value: join(__dirname, 'value-schema.avsc') 183 | } 184 | ], 185 | }), 186 | }, 187 | ]), 188 | ] 189 | ... 190 | }) 191 | ``` 192 | 193 | See the [e2e test](https://github.com/rob3000/nestjs-kafka/tree/master/test/e2e/app) for example. 194 | 195 | ## TODO 196 | 197 | * Tests 198 | 199 | 200 | PRs Welcome :heart: 201 | -------------------------------------------------------------------------------- /kafka/README.md: -------------------------------------------------------------------------------- 1 | # KAFKA 2 | 3 | A kafka environment to run E2E tests on. Based on [cp-all-in-one](https://github.com/confluentinc/cp-all-in-one/blob/5.5.1-post/cp-all-in-one-community/docker-compose.yml) 4 | -------------------------------------------------------------------------------- /kafka/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: '2' 3 | services: 4 | zookeeper: 5 | image: confluentinc/cp-zookeeper:latest 6 | hostname: zookeeper 7 | ports: 8 | - 2181:2181 9 | environment: 10 | ZOOKEEPER_CLIENT_PORT: '2181' 11 | ZOOKEEPER_TICK_TIME: '2000' 12 | 13 | broker: 14 | image: confluentinc/cp-kafka:5.4.2 15 | hostname: broker 16 | depends_on: 17 | - zookeeper 18 | ports: 19 | - 29092:29092 20 | - 9092:9092 21 | - 9101:9101 22 | environment: 23 | KAFKA_BROKER_ID: 1 24 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 25 | # KAFKA_ADVERTISED_HOST_NAME: 'broker' 26 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 27 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092 28 | KAFKA_LISTENERS: 'PLAINTEXT://:29092,PLAINTEXT_HOST://:9092' 29 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' 30 | KAFKA_DELETE_TOPIC_ENABLE: 'true' 31 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 32 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 33 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 34 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 35 | 36 | schema-registry: 37 | image: confluentinc/cp-schema-registry:5.5.1 38 | depends_on: 39 | - zookeeper 40 | - broker 41 | ports: 42 | - "8081:8081" 43 | environment: 44 | SCHEMA_REGISTRY_HOST_NAME: schema-registry 45 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' 46 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@rob3000/nestjs-kafka", 3 | "version": "1.4.1", 4 | "description": "KafkaJS integration with NestJS", 5 | "main": "dist/index.js", 6 | "types": "dist/index.d.ts", 7 | "scripts": { 8 | "prepare": "tsc --project tsconfig.build.json", 9 | "test": "jest", 10 | "lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix", 11 | "kafka:start": "docker-compose -f ./kafka/docker-compose.yml up -d", 12 | "kafka:stop": "docker-compose -f ./kafka/docker-compose.yml down -v", 13 | "test:watch": "jest --watch", 14 | "test:cov": "jest --coverage", 15 | "test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand", 16 | "test:e2e": "jest --config ./test/jest-e2e.json --detectOpenHandles --runInBand" 17 | }, 18 | "repository": { 19 | "type": "git", 20 | "url": "git+https://github.com/rob3000/nestjs-kafka.git" 21 | }, 22 | "keywords": [ 23 | "nestjs", 24 | "kafkajs" 25 | ], 26 | "author": "Rob Shepherd", 27 | "license": "MIT", 28 | "bugs": { 29 | "url": "https://github.com/rob3000/nestjs-kafka/issues" 30 | }, 31 | "homepage": "https://github.com/rob3000/nestjs-kafka#readme", 32 | "dependencies": { 33 | "@kafkajs/confluent-schema-registry": "^2.0.1", 34 | "@nestjs/common": "^7.6.17", 35 | "@nestjs/core": "^7.6.17", 36 | "@nestjs/microservices": "^7.6.17", 37 | "kafkajs": "^1.15.0", 38 | "reflect-metadata": "^0.1.13", 39 | "rxjs": "^6.6.7" 40 | }, 41 | "devDependencies": { 42 | "@babel/core": "^7.14.3", 43 | "@babel/plugin-proposal-decorators": "^7.10.5", 44 | "@babel/preset-env": "^7.14.4", 45 | "@babel/preset-typescript": "^7.10.4", 46 | "@nestjs/testing": "^7.6.17", 47 | "@types/jest": "^26.0.23", 48 | "@types/node": "^14.17.2", 49 | "@typescript-eslint/eslint-plugin": "^4.26.0", 50 | "@typescript-eslint/parser": "^4.26.0", 51 | "babel-jest": "^26.6.3", 52 | "eslint": "^7.27.0", 53 | "eslint-config-prettier": "^6.15.0", 54 | "eslint-plugin-import": "^2.23.4", 55 | "jest": "^26.6.3", 56 | "prettier": "^2.3.0", 57 | "ts-jest": "^26.5.6", 58 | "typescript": "^4.3.2" 59 | }, 60 | "jest": { 61 | "moduleFileExtensions": [ 62 | "js", 63 | "json", 64 | "ts" 65 | ], 66 | "rootDir": "src", 67 | "testRegex": ".spec.ts$", 68 | "transform": { 69 | "^.+\\.(t|j)s$": "ts-jest" 70 | }, 71 | "coverageDirectory": "../coverage", 72 | "testEnvironment": "node" 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | export const KAFKA_MODULE_OPTIONS = 'KAFKA_MODULE_OPTIONS'; 2 | -------------------------------------------------------------------------------- /src/deserializer/avro-response.deserializer.ts: -------------------------------------------------------------------------------- 1 | import { Deserializer } from "@nestjs/microservices"; 2 | import { Logger } from '@nestjs/common/services/logger.service'; 3 | import { KafkaResponse } from "../interfaces"; 4 | import { SchemaRegistry } from "@kafkajs/confluent-schema-registry"; 5 | import { SchemaRegistryAPIClientArgs } from "@kafkajs/confluent-schema-registry/dist/api" 6 | import { SchemaRegistryAPIClientOptions } from "@kafkajs/confluent-schema-registry/dist/@types"; 7 | import { KafkaResponseDeserializer } from "./kafka-response.deserializer"; 8 | 9 | export class KafkaAvroResponseDeserializer 10 | implements Deserializer> { 11 | 12 | protected registry: SchemaRegistry; 13 | protected logger = new Logger(KafkaAvroResponseDeserializer.name); 14 | protected fallback: KafkaResponseDeserializer; 15 | 16 | constructor(config: SchemaRegistryAPIClientArgs, options?: SchemaRegistryAPIClientOptions) { 17 | this.registry = new SchemaRegistry(config, options); 18 | this.fallback = new KafkaResponseDeserializer() 19 | } 20 | 21 | async deserialize(message: any, options?: Record): Promise { 22 | const { value, key, timestamp, offset } = message; 23 | const decodeResponse = { 24 | response: value, 25 | key, 26 | timestamp, 27 | offset, 28 | } 29 | 30 | try { 31 | decodeResponse.key = (message.key?.length > 0) ? await this.registry.decode(message.key) : null; 32 | decodeResponse.response = (message.value) ? await this.registry.decode(message.value) : message.value; 33 | } catch (e) { 34 | this.logger.error(e); 35 | // Fall back to the normal kafka deserialize. 36 | const msg = this.fallback.deserialize(message); 37 | Object.assign(decodeResponse, msg); 38 | } 39 | 40 | return decodeResponse; 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /src/deserializer/kafka-response.deserializer.ts: -------------------------------------------------------------------------------- 1 | import { Deserializer } from '@nestjs/microservices'; 2 | import { KafkaResponse } from '../interfaces'; 3 | 4 | export class KafkaResponseDeserializer 5 | implements Deserializer 6 | { 7 | deserialize(message: any, options?: Record): KafkaResponse { 8 | const { key, value, timestamp, offset, headers } = message; 9 | let id = key; 10 | let response = value; 11 | 12 | if (Buffer.isBuffer(key)) { 13 | id = Buffer.from(key).toString(); 14 | } 15 | 16 | if (Buffer.isBuffer(value)) { 17 | response = Buffer.from(value).toString(); 18 | } 19 | 20 | Object.keys(headers).forEach((key) => { 21 | if (Buffer.isBuffer(headers[key])) { 22 | headers[key] = Buffer.from(headers[key]).toString(); 23 | } 24 | }); 25 | 26 | return { 27 | key: id, 28 | response, 29 | timestamp, 30 | offset, 31 | headers, 32 | }; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./kafka.module"; 2 | export * from "./interfaces"; 3 | export * from "./kafka.service"; 4 | export * from "./kafka.decorator"; 5 | export * from "./deserializer/kafka-response.deserializer"; 6 | export * from "./serializer/kafka-request.serializer"; 7 | 8 | export * from "./deserializer/avro-response.deserializer"; 9 | export * from "./serializer/avro-request.serializer"; 10 | -------------------------------------------------------------------------------- /src/interfaces.ts: -------------------------------------------------------------------------------- 1 | import { Deserializer, Serializer } from '@nestjs/microservices'; 2 | import { 3 | ConsumerConfig, 4 | KafkaConfig, 5 | ProducerConfig, 6 | ProducerRecord, 7 | Message, 8 | ConsumerRunConfig, 9 | Transaction, 10 | RecordMetadata, 11 | } from 'kafkajs'; 12 | import { ModuleMetadata, Type } from '@nestjs/common'; 13 | 14 | export interface IHeaders { 15 | [key: string]: any; 16 | } 17 | export interface KafkaResponse { 18 | response: T; 19 | key: string; 20 | timestamp: string; 21 | offset: number; 22 | headers?: IHeaders; 23 | } 24 | export interface KafkaModuleOption { 25 | name: string; 26 | options: { 27 | client: KafkaConfig; 28 | consumer: ConsumerConfig; 29 | consumerRunConfig?: ConsumerRunConfig; 30 | producer?: ProducerConfig; 31 | deserializer?: Deserializer; 32 | serializer?: Serializer; 33 | consumeFromBeginning?: boolean; 34 | seek?: Record; 35 | autoConnect?: boolean; 36 | }; 37 | } 38 | export interface KafkaMessageObject extends Message { 39 | value: any | Buffer | string | null; 40 | key: any; 41 | } 42 | export interface KafkaMessageSend extends Omit { 43 | messages: KafkaMessageObject[]; 44 | topic?: string; 45 | } 46 | export interface KafkaModuleOptionsAsync 47 | extends Pick { 48 | inject?: any[]; 49 | useExisting?: Type; 50 | useClass?: Type; 51 | useFactory?: ( 52 | ...args: any[] 53 | ) => Promise | KafkaModuleOption[]; 54 | } 55 | export interface KafkaOptionsFactory { 56 | creatKafkaModuleOptions(): Promise | KafkaModuleOption[]; 57 | } 58 | export interface KafkaTransaction 59 | extends Omit { 60 | send(message: KafkaMessageSend): Promise; 61 | } 62 | -------------------------------------------------------------------------------- /src/kafka-module-options.provider.ts: -------------------------------------------------------------------------------- 1 | import { Inject, Injectable } from '@nestjs/common'; 2 | import { KafkaModuleOption } from '.'; 3 | import { KAFKA_MODULE_OPTIONS } from './constants'; 4 | 5 | @Injectable() 6 | export class KafkaModuleOptionsProvider { 7 | constructor( 8 | @Inject(KAFKA_MODULE_OPTIONS) 9 | private readonly kafkaModuleOptions: KafkaModuleOption[], 10 | ) {} 11 | 12 | getOptionsByName(name: string) { 13 | return this.kafkaModuleOptions.find((x) => x.name === name).options; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/kafka.decorator.ts: -------------------------------------------------------------------------------- 1 | export const SUBSCRIBER_MAP = new Map(); 2 | export const SUBSCRIBER_OBJECT_MAP = new Map(); 3 | export const SCHEMAS = new Map(); 4 | 5 | export function SubscribeTo(topic: string) { 6 | return (target, propertyKey, descriptor) => { 7 | const originalMethod = target[propertyKey]; 8 | SUBSCRIBER_MAP.set(topic, originalMethod); 9 | return descriptor; 10 | }; 11 | } 12 | -------------------------------------------------------------------------------- /src/kafka.module.ts: -------------------------------------------------------------------------------- 1 | import { Module, DynamicModule, Global, Provider } from '@nestjs/common'; 2 | import { KafkaService } from './kafka.service'; 3 | import { KafkaModuleOption, KafkaModuleOptionsAsync, KafkaOptionsFactory } from './interfaces'; 4 | import { KafkaModuleOptionsProvider } from './kafka-module-options.provider'; 5 | import { KAFKA_MODULE_OPTIONS } from './constants'; 6 | 7 | @Global() 8 | @Module({}) 9 | export class KafkaModule { 10 | static register(options: KafkaModuleOption[]): DynamicModule { 11 | const clients = (options || []).map((item) => ({ 12 | provide: item.name, 13 | useValue: new KafkaService(item.options), 14 | })); 15 | 16 | return { 17 | module: KafkaModule, 18 | providers: clients, 19 | exports: clients, 20 | }; 21 | } 22 | 23 | public static registerAsync( 24 | consumers: string[], 25 | connectOptions: KafkaModuleOptionsAsync, 26 | ): DynamicModule { 27 | const clients = []; 28 | for (const consumer of consumers) { 29 | clients.push({ 30 | provide: consumer, 31 | useFactory: async ( 32 | kafkaModuleOptionsProvider: KafkaModuleOptionsProvider, 33 | ) => { 34 | return new KafkaService( 35 | kafkaModuleOptionsProvider.getOptionsByName(consumer), 36 | ); 37 | }, 38 | inject: [KafkaModuleOptionsProvider], 39 | }); 40 | } 41 | 42 | const createKafkaModuleOptionsProvider = this.createKafkaModuleOptionsProvider(connectOptions); 43 | 44 | return { 45 | module: KafkaModule, 46 | imports: connectOptions.imports || [], 47 | providers: [ 48 | createKafkaModuleOptionsProvider, 49 | KafkaModuleOptionsProvider, 50 | ...clients, 51 | ], 52 | exports: [ 53 | createKafkaModuleOptionsProvider, 54 | ...clients, 55 | ], 56 | }; 57 | } 58 | 59 | private static createKafkaModuleOptionsProvider( 60 | options: KafkaModuleOptionsAsync, 61 | ): Provider { 62 | if (options.useFactory) { 63 | return { 64 | provide: KAFKA_MODULE_OPTIONS, 65 | useFactory: options.useFactory, 66 | inject: options.inject || [], 67 | }; 68 | } 69 | return { 70 | provide: KAFKA_MODULE_OPTIONS, 71 | useFactory: async (optionsFactory: KafkaOptionsFactory) => 72 | await optionsFactory.creatKafkaModuleOptions(), 73 | inject: [options.useExisting || options.useClass], 74 | }; 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/kafka.service.ts: -------------------------------------------------------------------------------- 1 | import { Injectable, OnModuleDestroy, OnModuleInit } from "@nestjs/common"; 2 | import { 3 | Consumer, 4 | Kafka, 5 | Producer, 6 | RecordMetadata, 7 | Admin, 8 | SeekEntry, 9 | TopicPartitionOffsetAndMetadata, 10 | Offsets, 11 | } from 'kafkajs'; 12 | import { Deserializer, Serializer } from "@nestjs/microservices"; 13 | import { Logger } from "@nestjs/common/services/logger.service"; 14 | import { KafkaLogger } from "@nestjs/microservices/helpers/kafka-logger"; 15 | import { KafkaResponseDeserializer } from "./deserializer/kafka-response.deserializer"; 16 | import { KafkaRequestSerializer } from "./serializer/kafka-request.serializer"; 17 | import { KafkaModuleOption, KafkaMessageSend, KafkaTransaction } from './interfaces'; 18 | 19 | import { 20 | SUBSCRIBER_MAP, 21 | SUBSCRIBER_OBJECT_MAP 22 | } from './kafka.decorator'; 23 | 24 | @Injectable() 25 | export class KafkaService implements OnModuleInit, OnModuleDestroy { 26 | 27 | private kafka: Kafka; 28 | private producer: Producer; 29 | private consumer: Consumer; 30 | private admin: Admin; 31 | private deserializer: Deserializer; 32 | private serializer: Serializer; 33 | private autoConnect: boolean; 34 | private options: KafkaModuleOption['options']; 35 | 36 | protected topicOffsets: Map = new Map(); 37 | 38 | protected logger = new Logger(KafkaService.name); 39 | 40 | constructor( 41 | options: KafkaModuleOption['options'] 42 | ) { 43 | const { 44 | client, 45 | consumer: consumerConfig, 46 | producer: producerConfig, 47 | } = options; 48 | 49 | this.kafka = new Kafka({ 50 | ...client, 51 | logCreator: KafkaLogger.bind(null, this.logger) 52 | }); 53 | 54 | const { groupId } = consumerConfig; 55 | const consumerOptions = Object.assign( 56 | { 57 | groupId: this.getGroupIdSuffix(groupId), 58 | }, 59 | consumerConfig 60 | ); 61 | 62 | this.autoConnect = options.autoConnect ?? true; 63 | this.consumer = this.kafka.consumer(consumerOptions); 64 | this.producer = this.kafka.producer(producerConfig); 65 | this.admin = this.kafka.admin(); 66 | 67 | this.initializeDeserializer(options); 68 | this.initializeSerializer(options); 69 | this.options = options; 70 | } 71 | 72 | async onModuleInit(): Promise { 73 | await this.connect(); 74 | await this.getTopicOffsets(); 75 | SUBSCRIBER_MAP.forEach((functionRef, topic) => { 76 | this.subscribe(topic); 77 | }); 78 | this.bindAllTopicToConsumer(); 79 | } 80 | 81 | async onModuleDestroy(): Promise { 82 | await this.disconnect(); 83 | } 84 | 85 | /** 86 | * Connect the kafka service. 87 | */ 88 | async connect(): Promise { 89 | if (!this.autoConnect) { 90 | return; 91 | } 92 | 93 | await this.producer.connect() 94 | await this.consumer.connect(); 95 | await this.admin.connect(); 96 | } 97 | 98 | /** 99 | * Disconnects the kafka service. 100 | */ 101 | async disconnect(): Promise { 102 | await this.producer.disconnect(); 103 | await this.consumer.disconnect(); 104 | await this.admin.disconnect(); 105 | } 106 | 107 | /** 108 | * Gets the high, low and partitions of a topic. 109 | */ 110 | private async getTopicOffsets(): Promise { 111 | const topics = SUBSCRIBER_MAP.keys(); 112 | 113 | for await (const topic of topics) { 114 | try { 115 | const topicOffsets = await this.admin.fetchTopicOffsets(topic); 116 | this.topicOffsets.set(topic, topicOffsets); 117 | } catch (e) { 118 | this.logger.error('Error fetching topic offset: ', topic); 119 | } 120 | } 121 | } 122 | 123 | /** 124 | * Subscribes to the topics. 125 | * 126 | * @param topic 127 | */ 128 | private async subscribe(topic: string): Promise { 129 | await this.consumer.subscribe({ 130 | topic, 131 | fromBeginning: this.options.consumeFromBeginning || false 132 | }); 133 | } 134 | 135 | /** 136 | * Send/produce a message to a topic. 137 | * 138 | * @param message 139 | */ 140 | async send(message: KafkaMessageSend): Promise { 141 | if (!this.producer) { 142 | this.logger.error('There is no producer, unable to send message.') 143 | return; 144 | } 145 | 146 | const serializedPacket = await this.serializer.serialize(message); 147 | 148 | // @todo - rather than have a producerRecord, 149 | // most of this can be done when we create the controller. 150 | return await this.producer.send(serializedPacket); 151 | } 152 | 153 | /** 154 | * Gets the groupId suffix for the consumer. 155 | * 156 | * @param groupId 157 | */ 158 | public getGroupIdSuffix(groupId: string): string { 159 | return groupId + '-client'; 160 | } 161 | 162 | /** 163 | * Calls the method you are subscribed to. 164 | * 165 | * @param topic 166 | * The topic to subscribe to. 167 | * @param instance 168 | * The class instance. 169 | */ 170 | subscribeToResponseOf(topic: string, instance: T): void { 171 | SUBSCRIBER_OBJECT_MAP.set(topic, instance); 172 | } 173 | 174 | /** 175 | * Returns a new producer transaction in order to produce messages and commit offsets together 176 | */ 177 | async transaction(): Promise { 178 | const producer = this.producer; 179 | if (!producer) { 180 | const msg = 'There is no producer, unable to start transactions.'; 181 | this.logger.error(msg); 182 | throw msg; 183 | } 184 | 185 | const tx = await producer.transaction(); 186 | const retval: KafkaTransaction = { 187 | abort(): Promise { 188 | return tx.abort(); 189 | }, 190 | commit(): Promise { 191 | return tx.commit(); 192 | }, 193 | isActive(): boolean { 194 | return tx.isActive(); 195 | }, 196 | async send(message: KafkaMessageSend): Promise { 197 | const serializedPacket = await this.serializer.serialize(message); 198 | return await tx.send(serializedPacket); 199 | }, 200 | sendOffsets(offsets: Offsets & { consumerGroupId: string }): Promise { 201 | return tx.sendOffsets(offsets); 202 | }, 203 | }; 204 | return retval; 205 | } 206 | 207 | /** 208 | * Commit consumer offsets manually. 209 | * Please note that in most cases you will want to use the given __autoCommitThreshold__ 210 | * or use a transaction to atomically set offsets and outgoing messages. 211 | * 212 | * @param topicPartitions 213 | */ 214 | async commitOffsets(topicPartitions: Array): Promise { 215 | return this.consumer.commitOffsets(topicPartitions); 216 | } 217 | 218 | /** 219 | * Sets up the serializer to encode outgoing messages. 220 | * 221 | * @param options 222 | */ 223 | protected initializeSerializer(options: KafkaModuleOption['options']): void { 224 | this.serializer = (options && options.serializer) || new KafkaRequestSerializer(); 225 | } 226 | 227 | /** 228 | * Sets up the deserializer to decode incoming messages. 229 | * 230 | * @param options 231 | */ 232 | protected initializeDeserializer(options: KafkaModuleOption['options']): void { 233 | this.deserializer = (options && options.deserializer) || new KafkaResponseDeserializer(); 234 | } 235 | 236 | /** 237 | * Runs the consumer and calls the consumers when a message arrives. 238 | */ 239 | private bindAllTopicToConsumer(): void { 240 | const runConfig = (this.options.consumerRunConfig) ? this.options.consumerRunConfig : {}; 241 | this.consumer.run({ 242 | ...runConfig, 243 | eachMessage: async ({ topic, partition, message }) => { 244 | const objectRef = SUBSCRIBER_OBJECT_MAP.get(topic); 245 | const callback = SUBSCRIBER_MAP.get(topic); 246 | 247 | try { 248 | const { timestamp, response, offset, key, headers } = await this.deserializer.deserialize(message, { topic }); 249 | await callback.apply(objectRef, [response, key, offset, timestamp, partition, headers]); 250 | } catch(e) { 251 | this.logger.error(`Error for message ${topic}: ${e}`); 252 | 253 | // Log and throw to ensure we don't keep processing the messages when there is an error. 254 | throw e; 255 | } 256 | }, 257 | }); 258 | 259 | if (this.options.seek !== undefined) { 260 | this.seekTopics(); 261 | } 262 | } 263 | 264 | /** 265 | * Seeks to a specific offset defined in the config 266 | * or to the lowest value and across all partitions. 267 | */ 268 | private seekTopics(): void { 269 | Object.keys(this.options.seek).forEach((topic) => { 270 | const topicOffsets = this.topicOffsets.get(topic); 271 | const seekPoint = this.options.seek[topic]; 272 | 273 | topicOffsets.forEach((topicOffset) => { 274 | let seek = String(seekPoint); 275 | 276 | // Seek by timestamp 277 | if (typeof seekPoint == 'object') { 278 | const time = seekPoint as Date; 279 | seek = time.getTime().toString(); 280 | } 281 | 282 | // Seek to the earliest timestamp. 283 | if (seekPoint === 'earliest') { 284 | seek = topicOffset.low; 285 | } 286 | 287 | this.consumer.seek({ 288 | topic, 289 | partition: topicOffset.partition, 290 | offset: seek 291 | }); 292 | }) 293 | }) 294 | } 295 | } 296 | -------------------------------------------------------------------------------- /src/serializer/avro-request.serializer.ts: -------------------------------------------------------------------------------- 1 | import { Serializer } from "@nestjs/microservices"; 2 | import { Logger } from '@nestjs/common/services/logger.service'; 3 | import { SchemaRegistry } from "@kafkajs/confluent-schema-registry"; 4 | import { SchemaRegistryAPIClientArgs } from "@kafkajs/confluent-schema-registry/dist/api" 5 | import { SchemaRegistryAPIClientOptions } from "@kafkajs/confluent-schema-registry/dist/@types"; 6 | 7 | import { KafkaMessageSend, KafkaMessageObject } from "../interfaces"; 8 | 9 | type KafkaAvroRequestSerializerSchema = { 10 | topic: string; 11 | key?: string; 12 | value?: string; 13 | keySuffix?: string, 14 | valueSuffix?:string, 15 | } 16 | 17 | export type KafkaAvroRequestSerializerConfig = { 18 | schemas: KafkaAvroRequestSerializerSchema[], 19 | config: SchemaRegistryAPIClientArgs; 20 | options: SchemaRegistryAPIClientOptions; 21 | schemaSeparator?: string; 22 | schemaFetchIntervalSeconds?: number; 23 | } 24 | 25 | interface KafkaSchemaMap { 26 | keyId: number|null; 27 | valueId: number; 28 | keySuffix: string; 29 | valueSuffix: string; 30 | } 31 | 32 | export class KafkaAvroRequestSerializer 33 | implements Serializer> { 34 | 35 | protected registry: SchemaRegistry; 36 | protected logger = new Logger(KafkaAvroRequestSerializer.name); 37 | protected schemas: Map = new Map(); 38 | protected separator: string; 39 | protected config: KafkaAvroRequestSerializerConfig; 40 | private lastSchemaFetchInterval: Map = new Map(); 41 | 42 | constructor(options: KafkaAvroRequestSerializerConfig) { 43 | this.registry = new SchemaRegistry(options.config, options.options); 44 | this.config = { 45 | schemaFetchIntervalSeconds: 3600, 46 | ...options 47 | }; 48 | 49 | this.getSchemaIds(); 50 | } 51 | 52 | /** 53 | * Grab the schemaIds for the registry to cache for serialization. 54 | */ 55 | private async getSchemaIds(): Promise { 56 | for await (const schema of this.config.schemas.values()) { 57 | await this.getSchemaId(schema); 58 | } 59 | } 60 | 61 | /** 62 | * Gets a single schema from schema registry. 63 | * 64 | * @param schema 65 | */ 66 | private async getSchemaId(schema): Promise { 67 | const keySuffix = schema.keySuffix ?? 'key'; 68 | const valueSuffix = schema.valueSuffix ?? 'value'; 69 | 70 | try { 71 | const keyId = await this.registry.getLatestSchemaId(`${schema.topic}-${keySuffix}`) || null; 72 | const valueId = await this.registry.getLatestSchemaId(`${schema.topic}-${valueSuffix}`) 73 | 74 | this.schemas.set(schema.topic, { 75 | keyId, 76 | valueId, 77 | keySuffix, 78 | valueSuffix, 79 | }); 80 | 81 | this.lastSchemaFetchInterval.set(schema.topic, Date.now()) 82 | } catch (e) { 83 | this.logger.error('Unable to get schema ID: ', e); 84 | throw e; 85 | } 86 | } 87 | 88 | /** 89 | * Check the last time we updated the schemas and attempt to update. 90 | * 91 | * @param topic 92 | */ 93 | private async updateSchemas(topic: string): Promise { 94 | const lastCheck = this.lastSchemaFetchInterval.get(topic); 95 | const configCheckMs = this.config.schemaFetchIntervalSeconds / 1000; 96 | const now = Date.now(); 97 | 98 | if ((lastCheck + configCheckMs) > now) { 99 | const config = this.config.schemas.find((schema) => schema.topic === topic); 100 | await this.getSchemaId(config); 101 | } 102 | } 103 | 104 | async serialize(value: KafkaMessageSend): Promise { 105 | const outgoingMessage = value; 106 | 107 | try { 108 | await this.updateSchemas(value.topic); 109 | 110 | const schema = this.schemas.get(value.topic); 111 | const { keyId, valueId } = schema; 112 | 113 | const messages: Promise[] = value.messages.map(async(origMessage) => { 114 | 115 | let encodedKey = origMessage.key; 116 | const encodedValue = await this.registry.encode(valueId, origMessage.value); 117 | 118 | if (keyId) { 119 | encodedKey = await this.registry.encode(keyId, origMessage.key); 120 | } 121 | 122 | return { 123 | ...origMessage, 124 | value: encodedValue, 125 | key: encodedKey 126 | }; 127 | }); 128 | 129 | const results = await Promise.all(messages); 130 | outgoingMessage.messages = results; 131 | } catch (e) { 132 | this.logger.error('Error serializing', e); 133 | throw e; 134 | } 135 | 136 | return outgoingMessage; 137 | } 138 | 139 | } 140 | -------------------------------------------------------------------------------- /src/serializer/kafka-request.serializer.ts: -------------------------------------------------------------------------------- 1 | import { 2 | isNil, 3 | isObject, 4 | isPlainObject, 5 | isString, 6 | isUndefined, 7 | } from '@nestjs/common/utils/shared.utils'; 8 | import { Serializer } from '@nestjs/microservices'; 9 | 10 | export interface KafkaRequest { 11 | key: Buffer | string | null; 12 | value: T; 13 | topic: string; 14 | headers: Record; 15 | } 16 | 17 | export class KafkaRequestSerializer implements Serializer { 18 | serialize(value: any): KafkaRequest { 19 | return value; 20 | } 21 | 22 | public encode(value: any): Buffer | string | null { 23 | const isObjectOrArray = 24 | !isNil(value) && !isString(value) && !Buffer.isBuffer(value); 25 | 26 | if (isObjectOrArray) { 27 | return isPlainObject(value) || Array.isArray(value) 28 | ? JSON.stringify(value) 29 | : value.toString(); 30 | } else if (isUndefined(value)) { 31 | return null; 32 | } 33 | return value; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /test/app-async.e2e-spec.ts: -------------------------------------------------------------------------------- 1 | import { Test, TestingModule } from '@nestjs/testing'; 2 | import { INestMicroservice } from '@nestjs/common'; 3 | import AppModule from './e2e/app/config.app.async'; 4 | import { TestConsumer } from './e2e/app/test.controller'; 5 | import { messages } from './constants'; 6 | import { Utils } from './utils'; 7 | 8 | describe('AppModule Async (e2e)', () => { 9 | let app: INestMicroservice; 10 | let controller: TestConsumer; 11 | 12 | beforeAll(async () => { 13 | jest.setTimeout(10000); 14 | 15 | await Utils.schemaRegistrySetup(); 16 | 17 | const moduleFixture: TestingModule = await Test.createTestingModule({ 18 | imports: [AppModule] 19 | }).compile(); 20 | 21 | app = moduleFixture.createNestMicroservice({}); 22 | app.enableShutdownHooks(); 23 | await app.listenAsync(); 24 | 25 | controller = await app.resolve(TestConsumer); 26 | controller.messages = []; 27 | }); 28 | 29 | afterAll(async () => { 30 | await app.close(); 31 | }); 32 | 33 | beforeEach(async () => { 34 | await Utils.sleep(2000); 35 | 36 | controller = await app.resolve(TestConsumer); 37 | controller.messages = []; 38 | }); 39 | 40 | it('We can SEND and ACCEPT AVRO messages', async () => { 41 | await Utils.sleep(2000); 42 | 43 | await controller.sendMessage({ messages }); 44 | 45 | let count = 0; 46 | while (controller.messages.length < messages.length && count < 4) { 47 | await Utils.sleep(1000); 48 | count++; 49 | } 50 | 51 | expect(controller.messages.length).toBe(messages.length); 52 | expect(controller.messages.find((x) => x.id == messages[0].value.id)).toBeDefined(); 53 | expect(controller.messages.find((x) => x.id == messages[1].value.id)).toBeDefined(); 54 | }); 55 | }); 56 | -------------------------------------------------------------------------------- /test/app-sync.e2e-spec.ts: -------------------------------------------------------------------------------- 1 | import { Test, TestingModule } from '@nestjs/testing'; 2 | import { INestMicroservice } from '@nestjs/common'; 3 | import AppModule from './e2e/app/config.app.sync'; 4 | import { TestConsumer } from './e2e/app/test.controller'; 5 | import { messages } from './constants'; 6 | import { Utils } from './utils'; 7 | 8 | describe('AppModule Sync (e2e)', () => { 9 | let app: INestMicroservice; 10 | let controller: TestConsumer; 11 | 12 | beforeAll(async () => { 13 | jest.setTimeout(10000); 14 | 15 | await Utils.schemaRegistrySetup(); 16 | 17 | const moduleFixture: TestingModule = await Test.createTestingModule({ 18 | imports: [AppModule] 19 | }).compile(); 20 | 21 | app = moduleFixture.createNestMicroservice({}); 22 | app.enableShutdownHooks(); 23 | await app.listenAsync(); 24 | }); 25 | 26 | afterAll(async () => { 27 | await app.close(); 28 | }); 29 | 30 | beforeEach(async () => { 31 | await Utils.sleep(2000); 32 | 33 | controller = await app.resolve(TestConsumer); 34 | controller.messages = []; 35 | }); 36 | 37 | it('We can SEND and ACCEPT AVRO messages', async () => { 38 | await Utils.sleep(2000); 39 | 40 | await controller.sendMessage({ messages }); 41 | 42 | let count = 0; 43 | while (controller.messages.length < messages.length && count < 4) { 44 | await Utils.sleep(1000); 45 | count++; 46 | } 47 | 48 | expect(controller.messages.length).toBe(messages.length); 49 | expect(controller.messages.find((x) => x.id == messages[0].value.id)).toBeDefined(); 50 | expect(controller.messages.find((x) => x.id == messages[1].value.id)).toBeDefined(); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /test/constants.ts: -------------------------------------------------------------------------------- 1 | export const messages = [ 2 | { 3 | key: { 4 | id: 1, 5 | }, 6 | value: { 7 | id: 1, 8 | metadataId: 2, 9 | objectId: 3, 10 | firstName: 'Hello', 11 | lastName: 'World!', 12 | __table: 'test-table', 13 | __deleted: null, 14 | }, 15 | }, 16 | { 17 | key: { 18 | id: 2, 19 | }, 20 | value: { 21 | id: 2, 22 | metadataId: 3, 23 | objectId: 4, 24 | firstName: 'Foo', 25 | lastName: 'Bar', 26 | __table: 'test-table', 27 | __deleted: null, 28 | }, 29 | }, 30 | ]; 31 | -------------------------------------------------------------------------------- /test/e2e/app/config.app.async.ts: -------------------------------------------------------------------------------- 1 | import { Module } from '@nestjs/common'; 2 | import { KafkaModule, KafkaAvroResponseDeserializer, KafkaAvroRequestSerializer } from '../../../src'; 3 | import TestConfigModule from './test-config.module'; 4 | import { TestConfigService } from './test-config.service'; 5 | import { TOPIC_NAME, TestConsumer } from './test.controller'; 6 | 7 | @Module({ 8 | imports: [ 9 | KafkaModule.registerAsync(['KAFKA_SERVICE'], { 10 | imports: [TestConfigModule], 11 | useFactory: (testConfigService: TestConfigService) => { 12 | return [ 13 | { 14 | name: 'KAFKA_SERVICE', 15 | options: { 16 | client: { 17 | clientId: 'test-e2e', 18 | brokers: [testConfigService.getBroker()], 19 | retry: { 20 | retries: 2, 21 | initialRetryTime: 30, 22 | }, 23 | }, 24 | consumer: { 25 | groupId: 'test-e2e-consumer', 26 | allowAutoTopicCreation: true, 27 | }, 28 | deserializer: { 29 | type: KafkaAvroResponseDeserializer, 30 | config: { 31 | host: testConfigService.getAvroHost(), 32 | }, 33 | options: { 34 | 35 | } 36 | }, 37 | serializer: { 38 | type: KafkaAvroRequestSerializer, 39 | config: { 40 | host: testConfigService.getAvroHost(), 41 | }, 42 | options: { 43 | 44 | }, 45 | schemas: [ 46 | { 47 | topic: TOPIC_NAME, 48 | key: TOPIC_NAME, 49 | value: TOPIC_NAME, 50 | }, 51 | ], 52 | } 53 | }, 54 | }, 55 | ]; 56 | }, 57 | inject: [TestConfigService], 58 | }), 59 | TestConsumer, 60 | ], 61 | }) 62 | export default class AppModule {} 63 | -------------------------------------------------------------------------------- /test/e2e/app/config.app.sync.ts: -------------------------------------------------------------------------------- 1 | import { Module } from '@nestjs/common'; 2 | import { KafkaModule, KafkaAvroResponseDeserializer, KafkaAvroRequestSerializer } from "../../../src"; 3 | import { TOPIC_NAME, TestConsumer } from "./test.controller"; 4 | 5 | @Module({ 6 | imports: [ 7 | KafkaModule.register([ 8 | { 9 | name: 'KAFKA_SERVICE', 10 | options: { 11 | client: { 12 | clientId: 'test-e2e', 13 | brokers: ['localhost:9092'], 14 | retry: { 15 | retries: 2, 16 | initialRetryTime: 30, 17 | }, 18 | }, 19 | consumer: { 20 | groupId: 'test-e2e-consumer', 21 | allowAutoTopicCreation: true, 22 | }, 23 | deserializer: new KafkaAvroResponseDeserializer({ 24 | host: 'http://localhost:8081/' 25 | }), 26 | serializer: new KafkaAvroRequestSerializer({ 27 | config: { 28 | host: 'http://localhost:8081/' 29 | }, 30 | schemas: [ 31 | { 32 | topic: TOPIC_NAME, 33 | key: TOPIC_NAME, 34 | value: TOPIC_NAME, 35 | } 36 | ], 37 | }), 38 | seek: { 39 | [TOPIC_NAME]: new Date('2020-05-21') 40 | } 41 | } 42 | }, 43 | ]), 44 | TestConsumer 45 | ], 46 | }) 47 | export default class AppModule {} 48 | -------------------------------------------------------------------------------- /test/e2e/app/config.app.ts: -------------------------------------------------------------------------------- 1 | import { Module } from '@nestjs/common'; 2 | import { KafkaModule, KafkaAvroResponseDeserializer, KafkaAvroRequestSerializer } from "../../../src"; 3 | import { TOPIC_NAME, TestConsumer } from "./test.controller"; 4 | 5 | @Module({ 6 | imports: [ 7 | KafkaModule.register([ 8 | { 9 | name: 'KAFKA_SERVICE', 10 | options: { 11 | client: { 12 | clientId: 'test-e2e', 13 | brokers: ['localhost:9092'], 14 | retry: { 15 | retries: 2, 16 | initialRetryTime: 30, 17 | }, 18 | }, 19 | consumer: { 20 | groupId: 'test-e2e-consumer', 21 | allowAutoTopicCreation: true, 22 | }, 23 | deserializer: new KafkaAvroResponseDeserializer({ 24 | host: 'http://localhost:8081/' 25 | }), 26 | serializer: new KafkaAvroRequestSerializer({ 27 | config: { 28 | host: 'http://localhost:8081/' 29 | }, 30 | schemas: [ 31 | { 32 | topic: TOPIC_NAME, 33 | key: TOPIC_NAME, 34 | value: TOPIC_NAME, 35 | } 36 | ], 37 | }), 38 | } 39 | }, 40 | ]), 41 | TestConsumer 42 | ], 43 | }) 44 | export default class AppModule {} 45 | -------------------------------------------------------------------------------- /test/e2e/app/key-schema.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "key", 3 | "namespace": "test.topic", 4 | "type": "record", 5 | "fields": [ 6 | { 7 | "name": "id", 8 | "type": "int" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /test/e2e/app/test-config.module.ts: -------------------------------------------------------------------------------- 1 | import { Module } from '@nestjs/common'; 2 | import { TestConfigService } from './test-config.service'; 3 | 4 | @Module({ 5 | providers: [TestConfigService], 6 | exports: [TestConfigService], 7 | }) 8 | export default class TestConfigModule {} 9 | -------------------------------------------------------------------------------- /test/e2e/app/test-config.service.ts: -------------------------------------------------------------------------------- 1 | import { Injectable } from '@nestjs/common'; 2 | 3 | @Injectable() 4 | export class TestConfigService { 5 | getBroker() { 6 | return 'localhost:9092'; 7 | } 8 | 9 | getAvroHost() { 10 | return 'http://localhost:8081/'; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /test/e2e/app/test.controller.ts: -------------------------------------------------------------------------------- 1 | import { Inject, Post } from '@nestjs/common'; 2 | import { Payload } from "@nestjs/microservices"; 3 | import { SubscribeTo, KafkaService } from '../../../src'; 4 | import { KafkaMessageSend } from '../../../src/interfaces'; 5 | 6 | export const TOPIC_NAME = 'test.topic'; 7 | 8 | export class TestConsumer { 9 | 10 | // Used to log the errors for testing. 11 | messages = []; 12 | 13 | constructor( 14 | @Inject('KAFKA_SERVICE') private client: KafkaService 15 | ) { 16 | } 17 | 18 | onModuleInit(): void { 19 | this.client.subscribeToResponseOf(TOPIC_NAME, this) 20 | } 21 | 22 | @SubscribeTo(TOPIC_NAME) 23 | async message(@Payload() data: any): Promise { 24 | this.messages.push(data); 25 | } 26 | 27 | @Post() 28 | async sendMessage(event: KafkaMessageSend) { 29 | const a = { 30 | ...event, 31 | topic: TOPIC_NAME 32 | } 33 | return await this.client.send(a); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /test/e2e/app/value-schema.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "value", 3 | "namespace": "test.topic", 4 | "type": "record", 5 | "fields": [ 6 | { 7 | "name": "id", 8 | "type": "int" 9 | }, 10 | { 11 | "name": "metadataId", 12 | "type": "int" 13 | }, 14 | { 15 | "name": "objectId", 16 | "type": "int" 17 | }, 18 | { 19 | "name": "firstName", 20 | "type": "string" 21 | }, 22 | { 23 | "name": "lastName", 24 | "type": "string" 25 | }, 26 | { 27 | "default": null, 28 | "name": "__table", 29 | "type": [ 30 | "null", 31 | "string" 32 | ] 33 | }, 34 | { 35 | "default": null, 36 | "name": "__deleted", 37 | "type": [ 38 | "null", 39 | "string" 40 | ] 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /test/jest-e2e.json: -------------------------------------------------------------------------------- 1 | { 2 | "moduleFileExtensions": ["js", "json", "ts"], 3 | "rootDir": ".", 4 | "testEnvironment": "node", 5 | "testRegex": ".e2e-spec.ts$", 6 | "transform": { 7 | "^.+\\.(t|j)s$": "ts-jest" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /test/utils.ts: -------------------------------------------------------------------------------- 1 | import { SchemaRegistry, readAVSC } from '@kafkajs/confluent-schema-registry'; 2 | import { join } from 'path'; 3 | 4 | export class Utils { 5 | public static sleep(ms: number) { 6 | return new Promise((resolve) => setTimeout(resolve, ms)); 7 | } 8 | 9 | public static async schemaRegistrySetup() { 10 | const registry = new SchemaRegistry({ host: 'http://localhost:8081/' }); 11 | 12 | // For our other tests we require the schema to already exist 13 | // in schema registry and dont allow uploaded through the nestJS 14 | // application. 15 | const valuePath = join(__dirname, 'e2e', 'app', 'value-schema.avsc'); 16 | const keyPath = join(__dirname, 'e2e', 'app', 'key-schema.avsc'); 17 | const valueSchema = readAVSC(valuePath); 18 | const keySchema = readAVSC(keyPath); 19 | 20 | const valueSchemaResult = await registry.register(valueSchema, { 21 | separator: '-' 22 | }); 23 | const keySchemaResult = await registry.register(keySchema, { 24 | separator: '-' 25 | }); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "exclude": ["node_modules", "test", "dist", "**/*spec.ts", "examples"] 4 | } 5 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "declaration": true, 5 | "removeComments": true, 6 | "emitDecoratorMetadata": true, 7 | "experimentalDecorators": true, 8 | "allowSyntheticDefaultImports": true, 9 | "target": "es2017", 10 | "sourceMap": true, 11 | "outDir": "./dist", 12 | "baseUrl": "./", 13 | "incremental": true 14 | } 15 | } 16 | --------------------------------------------------------------------------------