├── .github ├── FUNDING.yml └── workflows │ ├── release.yml │ └── test.yml ├── .gitignore ├── LICENSE ├── README.md ├── docker-compose.yml ├── docs ├── api.md ├── architecture.md └── templates │ └── api.hbs ├── examples ├── ConsumerExample.js ├── ConsumerExample.test.js ├── ProducerExample.js └── ProducerExample.test.js ├── getKafka.js ├── jest.config.js ├── package-lock.json ├── package.json ├── src ├── KafkaTestHelper.js ├── index.d.ts ├── index.js ├── placeholder.js └── utils.js ├── test ├── KafkaTestHelper.spec.js ├── __snapshots__ │ └── placeholder.spec.js.snap ├── index.spec.js ├── placeholder.spec.js └── utils.spec.js └── waitForKafkaReady.js /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | custom: https://www.paypal.me/chrvadala/15 2 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | NEXT_VERSION: 7 | description: Define next version (major/minor/patch) 8 | required: true 9 | REPOSITORY_NAME: 10 | description: Repository full name (e.g. chrvadala/hello ) 11 | required: true 12 | 13 | jobs: 14 | build_and_release: 15 | runs-on: ubuntu-22.04 16 | if: ${{ github.event.inputs.REPOSITORY_NAME == github.repository }} 17 | services: 18 | zookeeper: 19 | image: confluentinc/cp-zookeeper:7.6.0 20 | ports: 21 | - 2181:2181 22 | env: 23 | ZOOKEEPER_CLIENT_PORT: 2181 24 | ZOOKEEPER_TICK_TIME: 2000 25 | kafka: 26 | image: confluentinc/cp-kafka:7.6.0 27 | ports: 28 | - 29092:29092 29 | - 9092:9092 30 | env: 31 | KAFKA_BROKER_ID: 1 32 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 33 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 34 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 35 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 36 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 37 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 38 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 39 | steps: 40 | - uses: actions/checkout@v4 41 | - name: Create .env file 42 | run: | 43 | cat > .env < .env < { 38 | // init Kafka Test helper 39 | const kafka = getKafka() // see https://kafka.js.org/docs/configuration 40 | const topicPrefix = Date.now() // this avoids cross test interference 41 | const topicHelper = await createKafkaTestHelper(kafka, topicPrefix + '_something_happened') 42 | await topicHelper.ensureTopicExists() 43 | 44 | // init the module that has to be tested 45 | const controller = new ProducerExample() 46 | await controller.setup(kafka, topicPrefix) 47 | 48 | const record = { 49 | name: 'Tony', 50 | surname: 'Stark' 51 | } 52 | 53 | await controller.doSomething(record) 54 | 55 | // Kafka Test Helper retrieves published messages 56 | const messages = await topicHelper.messages() 57 | expect(messages).toHaveLength(1) 58 | expect(messages[0].json).toEqual({ 59 | operation: 'doSomething', 60 | record 61 | }) 62 | 63 | // OR 64 | expect(messages[0].string).toEqual(JSON.stringify({ 65 | operation: 'doSomething', 66 | record 67 | })) 68 | 69 | // OR 70 | expect(messages[0].buffer).toEqual(Buffer.from(JSON.stringify({ 71 | operation: 'doSomething', 72 | record 73 | }))) 74 | 75 | // OR 76 | expect(messages).toEqual([ 77 | expect.objectContaining({ 78 | json: { 79 | operation: 'doSomething', 80 | record 81 | } 82 | }) 83 | ]) 84 | 85 | // destroy 86 | await controller.destroy() 87 | await topicHelper.ensureTopicDeleted() 88 | }) 89 | ```` 90 | 91 | ## Testing a consumer 92 | Full source code available here: [ConsumerExample.test.js](https://github.com/chrvadala/kafka-test-helper/blob/main/examples/ConsumerExample.test.js) and [ConsumerExample.js](https://github.com/chrvadala/kafka-test-helper/blob/main/examples/ConsumerExample.js) 93 | ````javascript 94 | import { createKafkaTestHelper } from 'kafka-test-helper' 95 | 96 | test('ConsumerExample', async () => { 97 | // init Kafka Test helper 98 | const kafka = getKafka() // see https://kafka.js.org/docs/configuration 99 | const topicPrefix = Date.now() // this avoids cross test interference 100 | const topicHelper = await createKafkaTestHelper(kafka, topicPrefix + 'test-topic') 101 | await topicHelper.ensureTopicExists() 102 | 103 | // init the module that has to be tested 104 | const controller = new ConsumerExample() 105 | await controller.setup(kafka, topicPrefix) 106 | 107 | // a way to intercepts when the controller has done (there could be other ways...) 108 | const waitMessage = () => new Promise(resolve => { 109 | controller.handleMessage = jest.fn() 110 | .mockImplementation(message => { 111 | resolve(message) 112 | }) 113 | }) 114 | 115 | // Kafka Test Helper publishes a message, serialized as JSON string 116 | await topicHelper.publishMessages([ 117 | { 118 | json: { 119 | hello: 'world' 120 | } 121 | } 122 | ]) 123 | 124 | // wait for post elaboration and validates output 125 | const message = await waitMessage() 126 | expect(message).toBe('{"hello":"world"}') 127 | await controller.destroy() 128 | await topicHelper.ensureTopicDeleted() 129 | }) 130 | ```` 131 | 132 | # Changelog 133 | - **0.x** - Beta version 134 | - **1.0** - First official version 135 | - **1.1** - Adds typescript definition [[#1](https://github.com/chrvadala/kafka-test-helper/issues/1)]; Upgrades deps 136 | - **1.2** - Migrates to Kafka.JS 2; Upgrades deps; Upgrades referenced images on docker-compose 137 | - **1.3** - Fixes `js/insecure-randomness` security issue; Deprecates Node.js 14 and 16; Upgrades deps, Kafka docker images and workflows 138 | 139 | # Contributors 140 | - [chrvadala](https://github.com/chrvadala) (author) 141 | - [RodolfoSilva](https://github.com/RodolfoSilva) 142 | 143 | # References 144 | - https://cwiki.apache.org/confluence/display/KAFKA/KIP-98+-+Exactly+Once+Delivery+and+Transactional+Messaging 145 | - https://kafka.apache.org/documentation 146 | - https://github.com/edenhill/kcat#examples 147 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | zookeeper: 4 | image: confluentinc/cp-zookeeper:7.6.0 5 | hostname: zookeeper 6 | container_name: zookeeper-1 7 | ports: 8 | - "2181:2181" 9 | environment: 10 | ZOOKEEPER_CLIENT_PORT: 2181 11 | ZOOKEEPER_TICK_TIME: 2000 12 | 13 | kafka: 14 | image: confluentinc/cp-kafka:7.6.0 15 | hostname: broker 16 | container_name: kafka-1 17 | depends_on: 18 | - zookeeper 19 | ports: 20 | - "29092:29092" 21 | - "9092:9092" 22 | - "9101:9101" 23 | environment: 24 | KAFKA_BROKER_ID: 1 25 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 26 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 27 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 28 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 29 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 30 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 31 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 32 | KAFKA_JMX_PORT: 9101 33 | KAFKA_JMX_HOSTNAME: localhost 34 | 35 | kafka-ui: 36 | image: provectuslabs/kafka-ui:latest 37 | ports: 38 | - '8080:8080' 39 | environment: 40 | - KAFKA_CLUSTERS_0_NAME=main 41 | - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:29092 42 | - KAFKA_CLUSTERS_0_ZOOKEEPER=zookeeper:2181 43 | depends_on: 44 | - kafka -------------------------------------------------------------------------------- /docs/api.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | # Kafka Test Helper APIs 12 | ## Classes 13 | 14 |
15 |
KafkaTestHelper
16 |

KafkaTestHelper is the class that helps you interacting with Apache Kafka

17 |
18 |
19 | 20 | ## Functions 21 | 22 |
23 |
createKafkaTestHelper(kafka, topic)KafkaTestHelper
24 |

Creates and returns an instance of KafkaTestHelper

25 |
26 |
27 | 28 | ## Typedefs 29 | 30 |
31 |
ConsumedMessage : Object
32 |
33 |
ProducibleMessage : Object
34 |
35 |
36 | 37 | # Specs 38 | 39 | 40 | ## KafkaTestHelper 41 | KafkaTestHelper is the class that helps you interacting with Apache Kafka 42 | 43 | **Kind**: global class 44 | **See**: You can construct a KafkaTestHelper instancevia [createKafkaTestHelper](#createKafkaTestHelper) method 45 | 46 | * [KafkaTestHelper](#KafkaTestHelper) 47 | * [.reset()](#KafkaTestHelper+reset) 48 | * [.ensureTopicExists([timeout])](#KafkaTestHelper+ensureTopicExists) 49 | * [.ensureTopicDeleted([timeout])](#KafkaTestHelper+ensureTopicDeleted) 50 | * [.messages()](#KafkaTestHelper+messages) ⇒ [Array.<ConsumedMessage>](#ConsumedMessage) 51 | * [.publishMessages(messages)](#KafkaTestHelper+publishMessages) 52 | 53 | 54 | 55 | ### kafkaTestHelper.reset() 56 | Resets the helper to the current offset 57 | 58 | **Kind**: instance method of [KafkaTestHelper](#KafkaTestHelper) 59 | **Example** 60 | ```js 61 | await helper.reset() 62 | ``` 63 | 64 | 65 | ### kafkaTestHelper.ensureTopicExists([timeout]) 66 | Creates a topic if doesn't exist 67 | 68 | **Kind**: instance method of [KafkaTestHelper](#KafkaTestHelper) 69 | 70 | | Param | Type | Default | Description | 71 | | --- | --- | --- | --- | 72 | | [timeout] | number | 5000 | Timeout in ms | 73 | 74 | **Example** 75 | ```js 76 | await helper.ensureTopicExists() 77 | ``` 78 | 79 | 80 | ### kafkaTestHelper.ensureTopicDeleted([timeout]) 81 | Deletes a topic if exists 82 | 83 | **Kind**: instance method of [KafkaTestHelper](#KafkaTestHelper) 84 | 85 | | Param | Type | Default | Description | 86 | | --- | --- | --- | --- | 87 | | [timeout] | number | 5000 | Timeout in ms | 88 | 89 | **Example** 90 | ```js 91 | await helper.ensureTopicDeleted() 92 | ``` 93 | 94 | 95 | ### kafkaTestHelper.messages() ⇒ [Array.<ConsumedMessage>](#ConsumedMessage) 96 | Returns a list of messages published to the topic from last helper reset 97 | 98 | **Kind**: instance method of [KafkaTestHelper](#KafkaTestHelper) 99 | **Example** 100 | ```js 101 | const msgs = await helper.messages() 102 | [ 103 | { 104 | headers: {} 105 | partition: 0, 106 | buffer: , 107 | json: { "bar": 42 }, 108 | string: '{"bar":42}', 109 | }, 110 | ... 111 | ] 112 | ``` 113 | 114 | 115 | ### kafkaTestHelper.publishMessages(messages) 116 | Publishes a list of messages to the topic 117 | 118 | **Kind**: instance method of [KafkaTestHelper](#KafkaTestHelper) 119 | 120 | | Param | Type | Description | 121 | | --- | --- | --- | 122 | | messages | [Array.<ProducibleMessage>](#ProducibleMessage) | List of messages to publish | 123 | 124 | **Example** 125 | ```js 126 | await helper.publishMessages([ 127 | { 128 | partition: 0, 129 | key: 'key1', 130 | string: "hello world", 131 | }, 132 | ... 133 | ]) 134 | ``` 135 | **Example** 136 | ```js 137 | await helper.publishMessages([ 138 | { 139 | partition: 0, 140 | key: 'key1', 141 | json: { "foo": "bar" }, 142 | }, 143 | ... 144 | ]) 145 | ``` 146 | **Example** 147 | ```js 148 | await helper.publishMessages([ 149 | { 150 | partition: 0, 151 | key: 'key1', 152 | buffer: Buffer.from('hello world') 153 | }, 154 | ... 155 | ]) 156 | ``` 157 | 158 | 159 | ## createKafkaTestHelper(kafka, topic) ⇒ [KafkaTestHelper](#KafkaTestHelper) 160 | Creates and returns an instance of KafkaTestHelper 161 | 162 | **Kind**: global function 163 | 164 | | Param | Type | Description | 165 | | --- | --- | --- | 166 | | kafka | Kafka | KafkaJS instance. See [https://kafka.js.org/docs/configuration](https://kafka.js.org/docs/configuration) | 167 | | topic | string | Topic that the helper is going to monitor | 168 | 169 | **Example** 170 | ```js 171 | import { createKafkaTestHelper } from 'kafka-test-helper' 172 | 173 | test('your test', async () => { 174 | const kafka = getKafka() // see https://kafka.js.org/docs/configuration 175 | const topicPrefix = Date.now() // this avoids cross test interference 176 | const topicHelper = await createKafkaTestHelper(kafka, topicPrefix + 'test-topic') 177 | //do your tests here 178 | }) 179 | ``` 180 | 181 | 182 | ## ConsumedMessage : Object 183 | **Kind**: global typedef 184 | **Properties** 185 | 186 | | Name | Type | Description | 187 | | --- | --- | --- | 188 | | headers | Object | Object with headers | 189 | | partition | number | Partition number | 190 | | buffer | Buffer | Buffer with message | 191 | | json | Object | Object with message | 192 | | string | string | String with message | 193 | 194 | 195 | 196 | ## ProducibleMessage : Object 197 | **Kind**: global typedef 198 | **Properties** 199 | 200 | | Name | Type | Description | 201 | | --- | --- | --- | 202 | | partition | number | Partition number | 203 | | key | string | Message key | 204 | | buffer | Buffer | Message value as Buffer | 205 | | json | Object | Message value as object and serialized with JSON.stringify() | 206 | | string | string | Message value as string | 207 | 208 | -------------------------------------------------------------------------------- /docs/architecture.md: -------------------------------------------------------------------------------- 1 | # Architecture 2 | 3 | Kafka test helper is a Node.js library that helps you to write integration tests that interacts with Apache Kafka. 4 | 5 | Detecting published messages on a topic isn't so easy as you can think, because you can't rely on commit offsets, because of the control messages https://kafka.apache.org/documentation/#controlbatch published in the context of a transaction. 6 | 7 | In order to detect published messages, this library publishes some special messages, called **placeholders**. 8 | 9 | ``` 10 | +---+---+---+---+---+ 11 | + M + M + M + M + P + 12 | +---+---+---+---+ 13 | +---+---+ 14 | + M + P + 15 | +---+---+ 16 | + M + M + M + P + 17 | +---+---+---+---+ 18 | 19 | M = Message 20 | P = Placeholder 21 | ``` 22 | 23 | It uses the following technique: 24 | - _Step one_: When you call `messages()` function, this library fills your topic with a special placeholder message (one for partition). A placeholder is a special message that marks current topic's head. Note: Multiple placeholders can exists on a single partition. 25 | - _Step two_: Kafka test helper consumes any published messages from last `reset()` operation to latest placeholder message, then it returns any found message. 26 | - _Step three_: You can inspect those messages and assert them with your preferred 'expect' library. 27 | 28 | If placeholder messages interfers with your code, you can identify and skip them thanks to the following simple function. 29 | 30 | ```js 31 | const isPlaceholderMessage = msg => !!msg.kafka_test_helper 32 | ``` -------------------------------------------------------------------------------- /docs/templates/api.hbs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | # Kafka Test Helper APIs 12 | {{>main-index~}} 13 | 14 | # Specs 15 | {{>all-docs~}} -------------------------------------------------------------------------------- /examples/ConsumerExample.js: -------------------------------------------------------------------------------- 1 | const CONSUMER_TIMEOUT_DEFAULTS = { 2 | sessionTimeout: 10_000, 3 | rebalanceTimeout: 12_000, 4 | heartbeatInterval: 500, 5 | maxWaitTimeInMs: 100 6 | } 7 | 8 | export class ConsumerExample { 9 | async setup (kafka, topicPrefix) { 10 | const consumer = kafka.consumer({ groupId: 'test-group', ...CONSUMER_TIMEOUT_DEFAULTS }) 11 | 12 | await consumer.connect() 13 | await consumer.subscribe({ topic: topicPrefix + 'test-topic', fromBeginning: true }) 14 | 15 | await consumer.run({ 16 | eachMessage: async ({ message }) => { 17 | await this.handleMessage(message.value.toString()) 18 | } 19 | }) 20 | 21 | this.kafka = kafka 22 | this.consumer = consumer 23 | } 24 | 25 | async handleMessage (message) { 26 | // console.log(message) 27 | } 28 | 29 | async destroy () { 30 | await this.consumer.disconnect() 31 | this.consumer = null 32 | this.kakfa = null 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /examples/ConsumerExample.test.js: -------------------------------------------------------------------------------- 1 | import { createKafkaTestHelper } from '../src/index.js' // kafka-test-helper 2 | import { test, jest, expect } from '@jest/globals' 3 | import getKafka from '../getKafka.js' 4 | import { ConsumerExample } from './ConsumerExample' 5 | 6 | test('ConsumerExample', async () => { 7 | // init Kafka Test helper 8 | const kafka = getKafka() // see https://kafka.js.org/docs/configuration 9 | const topicPrefix = Date.now() // this avoids cross test interference 10 | const topicHelper = await createKafkaTestHelper(kafka, topicPrefix + 'test-topic') 11 | await topicHelper.ensureTopicExists() 12 | 13 | // init the module that has to be tested 14 | const controller = new ConsumerExample() 15 | await controller.setup(kafka, topicPrefix) 16 | 17 | // a way to intercepts when the controller has done (there could be other ways...) 18 | const waitMessage = () => new Promise(resolve => { 19 | controller.handleMessage = jest.fn() 20 | .mockImplementation(message => { 21 | resolve(message) 22 | }) 23 | }) 24 | 25 | // Kafka Test Helper publishes a message, serialized as JSON string 26 | await topicHelper.publishMessages([ 27 | { 28 | json: { 29 | hello: 'world' 30 | } 31 | } 32 | ]) 33 | 34 | // wait for post elaboration and validates output 35 | const message = await waitMessage() 36 | expect(message).toBe('{"hello":"world"}') 37 | await controller.destroy() 38 | await topicHelper.ensureTopicDeleted() 39 | }) 40 | -------------------------------------------------------------------------------- /examples/ProducerExample.js: -------------------------------------------------------------------------------- 1 | export class ProducerExample { 2 | async setup (kafka, topicPrefix) { 3 | const producer = kafka.producer() 4 | await producer.connect() 5 | 6 | this.kafka = kafka 7 | this.topicPrefix = topicPrefix 8 | this.producer = producer 9 | } 10 | 11 | async doSomething (record) { 12 | await this.producer.send({ 13 | topic: this.topicPrefix + '_something_happened', 14 | messages: [ 15 | { 16 | value: JSON.stringify( 17 | { 18 | operation: 'doSomething', 19 | record 20 | }) 21 | } 22 | ] 23 | }) 24 | } 25 | 26 | async destroy () { 27 | const producer = this.producer 28 | this.kakfa = null 29 | this.topicPrefix = null 30 | this.producer = null 31 | await producer.disconnect() 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /examples/ProducerExample.test.js: -------------------------------------------------------------------------------- 1 | import { createKafkaTestHelper } from '../src/index.js' // kafka-test-helper 2 | import { test, expect } from '@jest/globals' 3 | import getKafka from '../getKafka.js' 4 | import { ProducerExample } from './ProducerExample' 5 | 6 | test('ProducerExample', async () => { 7 | // init Kafka Test helper 8 | const kafka = getKafka() // see https://kafka.js.org/docs/configuration 9 | const topicPrefix = Date.now() // this avoids cross test interference 10 | const topicHelper = await createKafkaTestHelper(kafka, topicPrefix + '_something_happened') 11 | await topicHelper.ensureTopicExists() 12 | 13 | // init the module that has to be tested 14 | const controller = new ProducerExample() 15 | await controller.setup(kafka, topicPrefix) 16 | 17 | const record = { 18 | name: 'Tony', 19 | surname: 'Stark' 20 | } 21 | 22 | await controller.doSomething(record) 23 | 24 | // Kafka Test Helper retrieves published messages 25 | const messages = await topicHelper.messages() 26 | expect(messages).toHaveLength(1) 27 | expect(messages[0].json).toEqual({ 28 | operation: 'doSomething', 29 | record 30 | }) 31 | 32 | // OR 33 | expect(messages[0].string).toEqual(JSON.stringify({ 34 | operation: 'doSomething', 35 | record 36 | })) 37 | 38 | // OR 39 | expect(messages[0].buffer).toEqual(Buffer.from(JSON.stringify({ 40 | operation: 'doSomething', 41 | record 42 | }))) 43 | 44 | // OR 45 | expect(messages).toEqual([ 46 | expect.objectContaining({ 47 | json: { 48 | operation: 'doSomething', 49 | record 50 | } 51 | }) 52 | ]) 53 | 54 | // destroy 55 | await controller.destroy() 56 | await topicHelper.ensureTopicDeleted() 57 | }) 58 | -------------------------------------------------------------------------------- /getKafka.js: -------------------------------------------------------------------------------- 1 | import { Kafka, logLevel } from 'kafkajs' 2 | 3 | export default function getKafka () { 4 | const KAFKA_SERVER = process.env.KAFKA_SERVER 5 | if (!KAFKA_SERVER) { 6 | console.error('KAFKA_SERVER environment variable not found (e.g. KAFKA_SERVER=localhost:9092)') 7 | process.exit(1) 8 | } 9 | 10 | let logLevelConf = logLevel.INFO 11 | 12 | if (process.env.NODE_ENV === 'test') { 13 | logLevelConf = logLevel.NOTHING 14 | } 15 | 16 | return new Kafka({ 17 | clientId: 'kafka-test-helper', 18 | brokers: [KAFKA_SERVER], 19 | logLevel: logLevelConf, 20 | retry: { 21 | restartOnFailure: async () => false, 22 | retries: 10 23 | } 24 | }) 25 | } 26 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | export default { 2 | transform: {}, 3 | coveragePathIgnorePatterns: [ 4 | '/getKafka.js' 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafka-test-helper", 3 | "version": "1.3.0", 4 | "main": "src/index.js", 5 | "types": "src/index.d.ts", 6 | "repository": "https://github.com/chrvadala/kafka-test-helper.git", 7 | "author": "Christian Vadalà ", 8 | "license": "MIT", 9 | "type": "module", 10 | "scripts": { 11 | "start": "NODE_OPTIONS=--experimental-vm-modules jest --watch", 12 | "test": "npm run test:standard && npm run test:typescript && npm run test:waitForKafkaReady && npm run test:coverage", 13 | "test:jest": "NODE_OPTIONS=--experimental-vm-modules jest", 14 | "test:coverage": "NODE_OPTIONS=--experimental-vm-modules jest --coverage", 15 | "test:standard": "standard", 16 | "test:waitForKafkaReady": "node ./waitForKafkaReady.js", 17 | "docs:api": "jsdoc2md --template docs/templates/api.hbs --example-lang js --heading-depth 2 src/*.js > docs/api.md", 18 | "build": "npm run docs:api", 19 | "test:examples": "NODE_OPTIONS=--experimental-vm-modules jest examples/*", 20 | "test:typescript": "tsc --strict src/*.ts" 21 | }, 22 | "devDependencies": { 23 | "jest": "^29.7.0", 24 | "jsdoc-to-markdown": "^8.0.1", 25 | "kafkajs": "^2.2.4", 26 | "standard": "^17.1.0", 27 | "typescript": "^5.3.3" 28 | }, 29 | "peerDependencies": { 30 | "kafkajs": "^2.1.0" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/KafkaTestHelper.js: -------------------------------------------------------------------------------- 1 | import { makePlaceholderMessages, isPlaceholderMessage, isPlaceholderMessageWithUUID } from './placeholder.js' 2 | import { randomNumber, tryToConvertBufferToJson, tryToConvertBufferToString } from './utils.js' 3 | 4 | const GROUP_ID_PREFIX = 'kafka-test-helper-' 5 | const CONSUMER_TIMEOUT_DEFAULTS = { 6 | sessionTimeout: 10_000, 7 | rebalanceTimeout: 12_000, 8 | heartbeatInterval: 500, 9 | maxWaitTimeInMs: 100 10 | } 11 | 12 | /** 13 | * @classdesc KafkaTestHelper is the class that helps you interacting with Apache Kafka 14 | * @class KafkaTestHelper 15 | * @see You can construct a KafkaTestHelper instancevia {@link createKafkaTestHelper} method 16 | */ 17 | class KafkaTestHelper { 18 | constructor (kafka, topic) { 19 | this._kafka = kafka 20 | this._topic = topic 21 | this._initialTopicOffsets = [] // e.g. [ { partition: 0, offset: '0', high: '0', low: '0' } ] 22 | } 23 | 24 | /** 25 | * @desc Creates and returns an instance of KafkaTestHelper 26 | * @ignore 27 | * @static 28 | * @param {Kafka} kafka KafkaJS instance 29 | * @param {string} topic Topic that this helper is going to monitor 30 | * @returns {KafkaTestHelper} 31 | */ 32 | static async create (kafka, topic) { 33 | const helper = new KafkaTestHelper(kafka, topic) 34 | await helper.reset() 35 | return helper 36 | } 37 | 38 | /** 39 | * @desc Resets the helper to the current offset 40 | * @example await helper.reset() 41 | */ 42 | async reset () { 43 | const admin = await this._getAdmin() 44 | 45 | if (await this._topicExists(admin)) { 46 | this._initialTopicOffsets = await admin.fetchTopicOffsets(this._topic) 47 | } 48 | await admin.disconnect() 49 | } 50 | 51 | /** 52 | * Creates a topic if doesn't exist 53 | * @param {number} [timeout = 5000] - Timeout in ms 54 | * @example await helper.ensureTopicExists() 55 | */ 56 | async ensureTopicExists (timeout = null) { 57 | const admin = await this._getAdmin() 58 | 59 | if (!await this._topicExists(admin)) { 60 | await admin.createTopics({ 61 | validateOnly: false, 62 | waitForLeaders: true, 63 | timeout: timeout || 5000, 64 | topics: [{ 65 | topic: this._topic 66 | }] 67 | }) 68 | await this.reset() 69 | } 70 | await admin.disconnect() 71 | } 72 | 73 | /** 74 | * Deletes a topic if exists 75 | * @param {number} [timeout = 5000] - Timeout in ms 76 | * @example await helper.ensureTopicDeleted() 77 | */ 78 | async ensureTopicDeleted (timeout = null) { 79 | const admin = await this._getAdmin() 80 | if (await this._topicExists(admin)) { 81 | await admin.deleteTopics({ 82 | topics: [this._topic], 83 | timeout: timeout || 5000 84 | }) 85 | } 86 | await admin.disconnect() 87 | } 88 | 89 | /** 90 | * @typedef {Object} ConsumedMessage 91 | * @property {Object} headers - Object with headers 92 | * @property {number} partition - Partition number 93 | * @property {Buffer} buffer - Buffer with message 94 | * @property {Object} json - Object with message 95 | * @property {string} string - String with message 96 | */ 97 | 98 | /** 99 | * Returns a list of messages published to the topic from last helper reset 100 | * @returns {ConsumedMessage[]} 101 | * @example const msgs = await helper.messages() 102 | * [ 103 | * { 104 | * headers: {} 105 | * partition: 0, 106 | * buffer: , 107 | * json: { "bar": 42 }, 108 | * string: '{"bar":42}', 109 | * }, 110 | * ... 111 | * ] 112 | */ 113 | async messages () { 114 | const uuid = 'placeholder-' + randomNumber() 115 | const groupId = GROUP_ID_PREFIX + randomNumber() 116 | 117 | const partitions = this._initialTopicOffsets.length 118 | 119 | // produce messages 120 | const producer = this._kafka.producer() 121 | await producer.connect() 122 | await producer.send({ 123 | topic: this._topic, 124 | messages: makePlaceholderMessages(uuid, partitions) 125 | }) 126 | await producer.disconnect() 127 | 128 | // consume messages 129 | const messages = [] 130 | const receivedPlaceholdersWithUUID = [] 131 | const consumer = this._kafka.consumer({ 132 | groupId, 133 | ...CONSUMER_TIMEOUT_DEFAULTS 134 | }) 135 | await consumer.connect() 136 | await consumer.subscribe({ topic: this._topic, fromBeginning: false }) 137 | consumer.run({ 138 | eachMessage: ({ partition, message }) => { 139 | if (!isPlaceholderMessage(message)) { 140 | messages.push({ 141 | partition, 142 | headers: message.headers, 143 | buffer: message.value, 144 | json: tryToConvertBufferToJson(message.value), 145 | string: tryToConvertBufferToString(message.value) 146 | }) 147 | } 148 | 149 | if (isPlaceholderMessageWithUUID(message, uuid)) { 150 | receivedPlaceholdersWithUUID.push(message) 151 | } 152 | } 153 | }) 154 | 155 | // reset cursor 156 | for (const initialPartitionCursor of this._initialTopicOffsets) { 157 | await consumer.seek({ 158 | topic: this._topic, 159 | partition: initialPartitionCursor.partition, 160 | offset: initialPartitionCursor.offset 161 | }) 162 | } 163 | 164 | // wait messages until placeholders 165 | await new Promise((resolve) => { 166 | const interval = setInterval(() => { 167 | if (receivedPlaceholdersWithUUID.length >= partitions) { 168 | clearInterval(interval) 169 | consumer.pause() 170 | resolve() 171 | } 172 | }, 50) 173 | }) 174 | 175 | await consumer.disconnect() 176 | return messages 177 | } 178 | 179 | /** 180 | * @typedef {Object} ProducibleMessage 181 | * @property {number} partition - Partition number 182 | * @property {string} key - Message key 183 | * @property {Buffer} buffer - Message value as Buffer 184 | * @property {Object} json - Message value as object and serialized with JSON.stringify() 185 | * @property {string} string - Message value as string 186 | */ 187 | 188 | /** 189 | * Publishes a list of messages to the topic 190 | * @param {ProducibleMessage[]} messages - List of messages to publish 191 | * 192 | * @example 193 | * await helper.publishMessages([ 194 | * { 195 | * partition: 0, 196 | * key: 'key1', 197 | * string: "hello world", 198 | * }, 199 | * ... 200 | * ]) 201 | * 202 | * @example 203 | * await helper.publishMessages([ 204 | * { 205 | * partition: 0, 206 | * key: 'key1', 207 | * json: { "foo": "bar" }, 208 | * }, 209 | * ... 210 | * ]) 211 | * 212 | * @example 213 | * await helper.publishMessages([ 214 | * { 215 | * partition: 0, 216 | * key: 'key1', 217 | * buffer: Buffer.from('hello world') 218 | * }, 219 | * ... 220 | * ]) 221 | 222 | */ 223 | async publishMessages (messages) { 224 | const txMessages = [] 225 | let outcome 226 | for (const message of messages) { 227 | outcome = { 228 | value: message.buffer 229 | } 230 | if (message.string) outcome.value = message.string 231 | if (message.json) outcome.value = JSON.stringify(message.json) 232 | if (message.key) outcome.key = message.key 233 | if (message.partition) outcome.partition = message.partition 234 | txMessages.push(outcome) 235 | } 236 | 237 | const producer = this._kafka.producer() 238 | await producer.connect() 239 | await producer.send({ 240 | topic: this._topic, 241 | messages: txMessages 242 | }) 243 | await producer.disconnect() 244 | } 245 | 246 | /** 247 | * Gets the admin client 248 | * @ignore 249 | * @returns {Kafka.Admin} 250 | */ 251 | async _getAdmin () { 252 | const admin = this._kafka.admin() 253 | await admin.connect() 254 | return admin 255 | } 256 | 257 | /** 258 | * Validates if the topic exists 259 | * @ignore 260 | * @param {Kafka.Admin} Kafka admin client 261 | * @returns {boolean} 262 | */ 263 | async _topicExists (admin) { 264 | const topics = await admin.listTopics() 265 | const exists = topics.includes(this._topic) 266 | return exists 267 | } 268 | } 269 | 270 | export default KafkaTestHelper 271 | -------------------------------------------------------------------------------- /src/index.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | 3 | import { Kafka } from 'kafkajs'; 4 | 5 | declare class KafkaTestHelper { 6 | constructor(kafka: Kafka, topic: string) 7 | static create(kafka: Kafka, topic: string): Promise; 8 | reset (): Promise; 9 | ensureTopicExists (timeout: number | null): Promise; 10 | ensureTopicDeleted (timeout: number | null): Promise; 11 | messages (): Promise; 12 | publishMessages (messages: ProducibleMessage[]): Promise; 13 | } 14 | 15 | export interface ConsumedMessage { 16 | headers: Object; 17 | partition: number; 18 | buffer: Buffer; 19 | json: Object; 20 | string: string; 21 | } 22 | 23 | export type ProducibleMessage = { 24 | partition: number; 25 | key: string; 26 | buffer: Buffer; 27 | json: Object; 28 | string: string; 29 | } 30 | 31 | export declare function createKafkaTestHelper(kafka: Kafka, topic: string): Promise; -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import KafkaTestHelper from './KafkaTestHelper.js' 2 | 3 | /** 4 | * Creates and returns an instance of KafkaTestHelper 5 | * @function createKafkaTestHelper 6 | * @param {Kafka} kafka - KafkaJS instance. See {@link https://kafka.js.org/docs/configuration} 7 | * @param {string} topic Topic that the helper is going to monitor 8 | * @returns {KafkaTestHelper} 9 | * @example 10 | * import { createKafkaTestHelper } from 'kafka-test-helper' 11 | * 12 | * test('your test', async () => { 13 | * const kafka = getKafka() // see https://kafka.js.org/docs/configuration 14 | * const topicPrefix = Date.now() // this avoids cross test interference 15 | * const topicHelper = await createKafkaTestHelper(kafka, topicPrefix + 'test-topic') 16 | * //do your tests here 17 | * }) 18 | */ 19 | export async function createKafkaTestHelper (kafka, topic) { 20 | return KafkaTestHelper.create(kafka, topic) 21 | } 22 | -------------------------------------------------------------------------------- /src/placeholder.js: -------------------------------------------------------------------------------- 1 | export const WHAT_IS_THAT_MESSAGE_KEY = 'what_is_that_message' 2 | export const WHAT_IS_THAT_MESSAGE_VALUE = 'https://github.com/chrvadala/kafka-test-helper/blob/main/docs/architecture.md' 3 | 4 | export function makePlaceholderMessages (uuid, partions) { 5 | const messages = [] 6 | for (let i = 0; i < partions; i++) { 7 | messages.push({ 8 | partition: i, 9 | value: JSON.stringify({ 10 | [WHAT_IS_THAT_MESSAGE_KEY]: WHAT_IS_THAT_MESSAGE_VALUE, 11 | kafka_test_helper: 'yes', 12 | uuid 13 | }) 14 | }) 15 | } 16 | 17 | return messages 18 | } 19 | 20 | export function isPlaceholderMessage (message) { 21 | const json = tryToExtractValueFromMessage(message) 22 | 23 | if (!json) return false 24 | 25 | return typeof json === 'object' && 26 | json !== null && 27 | 'kafka_test_helper' in json 28 | } 29 | 30 | export function isPlaceholderMessageWithUUID (message, uuid) { 31 | const json = tryToExtractValueFromMessage(message) 32 | 33 | if (!json) return false 34 | 35 | return typeof json === 'object' && 36 | json !== null && 37 | ('kafka_test_helper' in json) && 38 | ('uuid' in json) && 39 | json.uuid === uuid 40 | } 41 | 42 | function tryToExtractValueFromMessage (message) { 43 | try { 44 | return JSON.parse(message.value) 45 | } catch { 46 | return false 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | import { getRandomValues } from 'crypto' 2 | 3 | export function randomNumber () { 4 | const array = new Uint32Array(1) 5 | getRandomValues(array) 6 | 7 | return array[0] 8 | } 9 | 10 | export function tryToConvertBufferToJson (buffer) { 11 | try { 12 | return JSON.parse(buffer.toString()) 13 | } catch (e) { 14 | return null 15 | } 16 | } 17 | 18 | export function tryToConvertBufferToString (buffer) { 19 | try { 20 | return buffer.toString('utf8') 21 | } catch (e) { 22 | return null 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /test/KafkaTestHelper.spec.js: -------------------------------------------------------------------------------- 1 | import getKafka from '../getKafka.js' 2 | import { describe, expect, it, beforeAll, afterAll } from '@jest/globals' 3 | import KafkaTestHelper from '../src/KafkaTestHelper.js' 4 | 5 | const CONSUMER_TIMEOUT_DEFAULTS = { 6 | sessionTimeout: 10_000, 7 | rebalanceTimeout: 12_000, 8 | heartbeatInterval: 500, 9 | maxWaitTimeInMs: 100 10 | } 11 | 12 | let kafka, admin 13 | 14 | beforeAll(async () => { 15 | kafka = getKafka() 16 | 17 | admin = kafka.admin() 18 | await admin.connect() 19 | }) 20 | 21 | afterAll(async () => { 22 | await admin.disconnect() 23 | admin = null 24 | }) 25 | 26 | describe('create', () => { 27 | it('should create a KafkaTestHelper instance', async () => { 28 | const testTopic = randomString('topic') 29 | const topicHelper = await KafkaTestHelper.create(kafka, testTopic) 30 | 31 | expect(typeof topicHelper).toBe('object') 32 | 33 | expect(async () => { 34 | await topicHelper.ensureTopicExists() 35 | await topicHelper.ensureTopicDeleted() 36 | }).not.toThrow() 37 | }) 38 | }) 39 | 40 | describe('reset', () => { 41 | it('should construct and setup the component when topic exists', async () => { 42 | const testTopic = randomString('topic') 43 | 44 | await createTopic(testTopic) 45 | 46 | const helper = new KafkaTestHelper(kafka, testTopic) 47 | await helper.reset() 48 | }) 49 | 50 | it('should construct and setup the component when topic does NOT exist', async () => { 51 | const testTopic = randomString('topic') 52 | const helper = new KafkaTestHelper(kafka, testTopic) 53 | await helper.reset() 54 | }) 55 | }) 56 | 57 | describe('ensureTopicExists', () => { 58 | it('should create a new topic', async () => { 59 | const testTopic = randomString('topic') 60 | 61 | const helper = new KafkaTestHelper(kafka, testTopic) 62 | 63 | await expect(admin.listTopics()).resolves 64 | .toEqual(expect.not.arrayContaining([testTopic])) 65 | 66 | await helper.ensureTopicExists() 67 | 68 | await expect(admin.listTopics()).resolves 69 | .toEqual(expect.arrayContaining([testTopic])) 70 | }) 71 | 72 | it('should not fail, even if the topic already exists', async () => { 73 | const testTopic = randomString('topic') 74 | 75 | const helper = new KafkaTestHelper(kafka, testTopic) 76 | 77 | await createTopic(testTopic) 78 | 79 | await helper.ensureTopicExists() 80 | 81 | await expect(admin.listTopics()).resolves 82 | .toEqual(expect.arrayContaining([testTopic])) 83 | }) 84 | }) 85 | 86 | describe('ensureTopicDeleted', () => { 87 | it('should delete a topic', async () => { 88 | const testTopic = randomString('topic') 89 | 90 | await createTopic(testTopic) 91 | 92 | await expect(admin.listTopics()).resolves 93 | .toEqual(expect.arrayContaining([testTopic])) 94 | 95 | const helper = new KafkaTestHelper(kafka, testTopic) 96 | await helper.ensureTopicDeleted() 97 | 98 | await expect(admin.listTopics()).resolves 99 | .toEqual(expect.not.arrayContaining([testTopic])) 100 | }) 101 | 102 | it('should not fail, even if the topic does not exist', async () => { 103 | const testTopic = randomString('topic') 104 | 105 | const helper = new KafkaTestHelper(kafka, testTopic) 106 | 107 | await expect(admin.listTopics()).resolves 108 | .toEqual(expect.not.arrayContaining([testTopic])) 109 | 110 | await helper.ensureTopicDeleted() 111 | }) 112 | }) 113 | 114 | describe('messages', () => { 115 | it('should return produced messages', async () => { 116 | const testTopic = randomString('topic') 117 | 118 | // emulate topic with some previous messages 119 | await produceMessages(testTopic, [ 120 | { value: 'message-x' }, 121 | { value: 'message-y' }, 122 | { value: 'message-z' } 123 | ]) 124 | 125 | // init lib 126 | const helper = new KafkaTestHelper(kafka, testTopic) 127 | await helper.reset() 128 | await expect(helper.messages()).resolves.toHaveLength(0) 129 | 130 | // wave 1 131 | await produceMessages(testTopic, [ 132 | { value: 'message-1' }, 133 | { value: 'message-2' }, 134 | { value: 'message-3' } 135 | ]) 136 | 137 | await expect(helper.messages()).resolves.toEqual([ 138 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-1') }), 139 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-2') }), 140 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-3') }) 141 | ]) 142 | 143 | // wave 2 144 | await produceMessages(testTopic, [ 145 | { value: 'message-4' }, 146 | { value: 'message-5' } 147 | ]) 148 | await expect(helper.messages()).resolves.toEqual([ 149 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-1') }), 150 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-2') }), 151 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-3') }), 152 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-4') }), 153 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-5') }) 154 | ]) 155 | }) 156 | 157 | it('should return produced messages, even in the context of a commited transaction', async () => { 158 | const testTopic = randomString('topic') 159 | 160 | // emulate topic with some previous messages 161 | await produceMessages(testTopic, [ 162 | { value: 'message-x' }, 163 | { value: 'message-y' }, 164 | { value: 'message-z' } 165 | ]) 166 | 167 | // init lib 168 | const helper = new KafkaTestHelper(kafka, testTopic) 169 | await helper.reset() 170 | await expect(helper.messages()).resolves.toHaveLength(0) 171 | 172 | // create transaction and commit 173 | const producer = kafka.producer({ transactionalId: 'tx-test123' }) 174 | await producer.connect() 175 | const transaction = await producer.transaction() 176 | await transaction.send({ 177 | topic: testTopic, 178 | messages: [ 179 | { value: 'message-1' }, 180 | { value: 'message-2' }, 181 | { value: 'message-3' } 182 | ] 183 | }) 184 | await transaction.commit() 185 | await producer.disconnect() 186 | 187 | // verify 188 | await expect(helper.messages()).resolves.toEqual([ 189 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-1') }), 190 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-2') }), 191 | expect.objectContaining({ headers: {}, partition: 0, buffer: Buffer.from('message-3') }) 192 | ]) 193 | }) 194 | 195 | it('should return produced messages, even in the context of an aborted transaction', async () => { 196 | const testTopic = randomString('topic') 197 | 198 | // emulate topic with some previous messages 199 | await produceMessages(testTopic, [ 200 | { value: 'message-x' }, 201 | { value: 'message-y' }, 202 | { value: 'message-z' } 203 | ]) 204 | 205 | // init lib 206 | const helper = new KafkaTestHelper(kafka, testTopic) 207 | await helper.reset() 208 | await expect(helper.messages()).resolves.toHaveLength(0) 209 | 210 | // create transaction and abort 211 | const producer = kafka.producer({ transactionalId: 'tx-test123' }) 212 | await producer.connect() 213 | const transaction = await producer.transaction() 214 | await transaction.send({ 215 | topic: testTopic, 216 | messages: [ 217 | { value: 'message-1' }, 218 | { value: 'message-2' }, 219 | { value: 'message-3' } 220 | ] 221 | }) 222 | await transaction.abort() 223 | await producer.disconnect() 224 | 225 | // verify 226 | await expect(helper.messages()).resolves.toHaveLength(0) 227 | }) 228 | 229 | it('should work even with multiple partitions', async () => { 230 | const testTopic = randomString('topic') 231 | 232 | // emulate topic with some previous messages 233 | await createTopic(testTopic, 8, 1) 234 | await produceMessages(testTopic, [ 235 | { value: 'message-x' }, 236 | { value: 'message-y' }, 237 | { value: 'message-z' } 238 | ]) 239 | 240 | // init lib 241 | const helper = new KafkaTestHelper(kafka, testTopic) 242 | await helper.reset() 243 | await expect(helper.messages()).resolves.toHaveLength(0) 244 | 245 | // wave 1 246 | await produceMessages(testTopic, [ 247 | { value: 'message-1' }, 248 | { value: 'message-2' }, 249 | { value: 'message-3' } 250 | ]) 251 | 252 | const messages1 = await helper.messages() 253 | expect(messages1).toHaveLength(3) 254 | expect(messages1).toEqual(expect.arrayContaining([ 255 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-1') }), 256 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-2') }), 257 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-3') }) 258 | ])) 259 | 260 | // wave 2 261 | await produceMessages(testTopic, [ 262 | { value: 'message-4' }, 263 | { value: 'message-5' } 264 | ]) 265 | const messages2 = await helper.messages() 266 | expect(messages2).toHaveLength(5) 267 | expect(messages2).toEqual(expect.arrayContaining([ 268 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-1') }), 269 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-2') }), 270 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-3') }), 271 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-4') }), 272 | expect.objectContaining({ headers: {}, partition: expect.any(Number), buffer: Buffer.from('message-5') }) 273 | ])) 274 | }) 275 | 276 | it('should decode json and string', async () => { 277 | const testTopic = randomString('topic') 278 | 279 | // emulate topic with some previous messages 280 | await produceMessages(testTopic, [ 281 | { value: 'message-x' }, 282 | { value: 'message-y' }, 283 | { value: 'message-z' } 284 | ]) 285 | 286 | // init lib 287 | const helper = new KafkaTestHelper(kafka, testTopic) 288 | await helper.reset() 289 | await expect(helper.messages()).resolves.toHaveLength(0) 290 | 291 | // wave 1 292 | await produceMessages(testTopic, [ 293 | { value: JSON.stringify({ bar: 42 }) }, 294 | { value: 'hellohello' }, 295 | { value: 'not_a_{{{{_json' }, 296 | { value: Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]) } 297 | ]) 298 | 299 | const messages1 = await helper.messages() 300 | expect(messages1).toHaveLength(4) 301 | expect(messages1).toEqual([ 302 | expect.objectContaining({ json: { bar: 42 } }), 303 | expect.objectContaining({ string: 'hellohello', json: null }), 304 | expect.objectContaining({ string: 'not_a_{{{{_json', json: null }), 305 | expect.objectContaining({ buffer: Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]) }) 306 | ]) 307 | }) 308 | }) 309 | 310 | describe('publishMessages', () => { 311 | it('should populate a topic', async () => { 312 | const testTopic = randomString('topic') 313 | const groupId = randomString('group') 314 | await createTopic(testTopic, 4) 315 | 316 | const json = { json: { hello: 42, ciao: 42 } } 317 | const string = { string: 'hello_42' } 318 | const buffer = { buffer: Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]) } 319 | const advancedKafkaFeatures = { key: 'key123', partition: 1, buffer: Buffer.from([0x42]) } 320 | 321 | const messages = [ 322 | json, 323 | string, 324 | buffer, 325 | advancedKafkaFeatures 326 | ] 327 | 328 | const helper = new KafkaTestHelper(kafka, testTopic) 329 | await helper.reset() 330 | await helper.publishMessages(messages) 331 | 332 | // download messages 333 | const recvMessages = [] 334 | const consumer = kafka.consumer({ 335 | groupId, 336 | ...CONSUMER_TIMEOUT_DEFAULTS 337 | }) 338 | await consumer.connect() 339 | await consumer.subscribe({ topic: testTopic, fromBeginning: true }) 340 | await new Promise((resolve) => { 341 | consumer.run({ 342 | eachMessage: ({ partition, message }) => { 343 | recvMessages.push({ 344 | partition, 345 | key: message.key, 346 | headers: message.headers, 347 | value: message.value 348 | }) 349 | if (recvMessages.length >= messages.length) resolve() 350 | } 351 | }) 352 | }) 353 | 354 | // verify 355 | expect(recvMessages).toHaveLength(messages.length) 356 | expect(recvMessages).toEqual(expect.arrayContaining([ 357 | { 358 | partition: expect.any(Number), 359 | key: null, 360 | headers: expect.any(Object), 361 | value: Buffer.from(JSON.stringify({ hello: 42, ciao: 42 })) 362 | }, 363 | { 364 | partition: expect.any(Number), 365 | key: null, 366 | headers: expect.any(Object), 367 | value: Buffer.from('hello_42') 368 | }, 369 | { 370 | partition: expect.any(Number), 371 | key: null, 372 | headers: expect.any(Object), 373 | value: Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]) 374 | }, 375 | { 376 | key: Buffer.from('key123'), 377 | partition: 1, 378 | headers: expect.any(Object), 379 | value: Buffer.from([0x42]) 380 | } 381 | ])) 382 | 383 | await consumer.disconnect() 384 | }) 385 | }) 386 | 387 | let i = 0 388 | const randomString = prefix => { 389 | const random = new Date().toISOString() 390 | .replace(/[-:]/g, '') 391 | .replace(/\..*/, '') 392 | 393 | const topic = `${random}_${prefix}_${i++}` 394 | 395 | return topic 396 | } 397 | 398 | const createTopic = async (topic, numPartitions = 1, replicationFactor = 1) => { 399 | await admin.createTopics({ 400 | validateOnly: false, 401 | waitForLeaders: true, 402 | topics: [{ 403 | topic, 404 | numPartitions, 405 | replicationFactor 406 | }] 407 | }) 408 | } 409 | 410 | const produceMessages = async (topic, messages) => { 411 | const producer = kafka.producer() 412 | await producer.connect() 413 | await producer.send({ 414 | topic, 415 | messages 416 | }) 417 | await producer.disconnect() 418 | } 419 | -------------------------------------------------------------------------------- /test/__snapshots__/placeholder.spec.js.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`placeholder makePlaceholderMessages 1`] = ` 4 | [ 5 | { 6 | "partition": 0, 7 | "value": "{"what_is_that_message":"https://github.com/chrvadala/kafka-test-helper/blob/main/docs/architecture.md","kafka_test_helper":"yes","uuid":"test123"}", 8 | }, 9 | { 10 | "partition": 1, 11 | "value": "{"what_is_that_message":"https://github.com/chrvadala/kafka-test-helper/blob/main/docs/architecture.md","kafka_test_helper":"yes","uuid":"test123"}", 12 | }, 13 | { 14 | "partition": 2, 15 | "value": "{"what_is_that_message":"https://github.com/chrvadala/kafka-test-helper/blob/main/docs/architecture.md","kafka_test_helper":"yes","uuid":"test123"}", 16 | }, 17 | { 18 | "partition": 3, 19 | "value": "{"what_is_that_message":"https://github.com/chrvadala/kafka-test-helper/blob/main/docs/architecture.md","kafka_test_helper":"yes","uuid":"test123"}", 20 | }, 21 | ] 22 | `; 23 | -------------------------------------------------------------------------------- /test/index.spec.js: -------------------------------------------------------------------------------- 1 | import { expect, beforeAll, test } from '@jest/globals' 2 | import { createKafkaTestHelper } from '../src/index.js' 3 | import getKafka from '../getKafka.js' 4 | 5 | let i = 0 6 | const randomString = prefix => { 7 | const random = new Date().toISOString() 8 | .replace(/[-:]/g, '') 9 | .replace(/\..*/, '') 10 | 11 | const topic = `${random}_${prefix}_${i++}` 12 | 13 | return topic 14 | } 15 | 16 | let kafka 17 | 18 | beforeAll(async () => { 19 | kafka = getKafka() 20 | }) 21 | 22 | test('create KafkaTestHelper', async () => { 23 | const testTopic = randomString('topic') 24 | const topicHelper = await createKafkaTestHelper(kafka, testTopic) 25 | 26 | expect(typeof topicHelper).toBe('object') 27 | 28 | await topicHelper.ensureTopicExists() 29 | await topicHelper.ensureTopicDeleted() 30 | }) 31 | -------------------------------------------------------------------------------- /test/placeholder.spec.js: -------------------------------------------------------------------------------- 1 | import { describe, expect, test } from '@jest/globals' 2 | import { makePlaceholderMessages, isPlaceholderMessage, isPlaceholderMessageWithUUID } from '../src/placeholder.js' 3 | 4 | const validPlaceholder = JSON.stringify({ 5 | what_is_that_message: 'https://github.com/chrvadala/kafka-test-helper/blob/main/docs/architecture.md', 6 | kafka_test_helper: true, 7 | uuid: 'test123' 8 | }) 9 | 10 | const invalidPlaceholder = JSON.stringify({ 11 | bar: 'abc', 12 | foo: 42 13 | }) 14 | 15 | describe.only('placeholder', () => { 16 | test('makePlaceholderMessages', () => { 17 | const placeholder = JSON.stringify({ 18 | what_is_that_message: 'https://github.com/chrvadala/kafka-test-helper/blob/main/docs/architecture.md', 19 | kafka_test_helper: 'yes', 20 | uuid: 'test123' 21 | }) 22 | 23 | const expected = [ 24 | { partition: 0, value: placeholder }, 25 | { partition: 1, value: placeholder }, 26 | { partition: 2, value: placeholder }, 27 | { partition: 3, value: placeholder } 28 | ] 29 | 30 | const messages = makePlaceholderMessages('test123', 4) 31 | 32 | expect(messages).toMatchSnapshot() 33 | expect(messages).toEqual(expected) 34 | }) 35 | 36 | test('isPlaceholderMessageWithUUID', () => { 37 | expect(isPlaceholderMessageWithUUID({ value: validPlaceholder }, 'test123')).toBe(true) 38 | expect(isPlaceholderMessageWithUUID({ value: validPlaceholder }, 'testabc')).toBe(false) 39 | expect(isPlaceholderMessageWithUUID({}, 'testabc')).toBe(false) 40 | 41 | expect(isPlaceholderMessageWithUUID({ value: invalidPlaceholder }, 'test123')).toBe(false) 42 | 43 | expect(isPlaceholderMessageWithUUID({ value: 'tfsahkh{' }, 'test123')).toBe(false) 44 | expect(isPlaceholderMessageWithUUID({ value: null }, 'test123')).toBe(false) 45 | }) 46 | 47 | test('isPlaceholderMessageWithUUID', () => { 48 | expect(isPlaceholderMessage({ value: validPlaceholder })).toBe(true) 49 | 50 | expect(isPlaceholderMessage({ value: invalidPlaceholder })).toBe(false) 51 | expect(isPlaceholderMessage({})).toBe(false) 52 | 53 | expect(isPlaceholderMessage({ value: 'tfsahkh{' })).toBe(false) 54 | expect(isPlaceholderMessage({ value: null })).toBe(false) 55 | }) 56 | }) 57 | -------------------------------------------------------------------------------- /test/utils.spec.js: -------------------------------------------------------------------------------- 1 | import { randomNumber, tryToConvertBufferToJson, tryToConvertBufferToString } from '../src/utils.js' 2 | import { test, expect } from '@jest/globals' 3 | 4 | test('randomNumber', () => { 5 | const number = randomNumber() 6 | expect(typeof number).toBe('number') 7 | }) 8 | 9 | test('tryToConvertBufferToJson', () => { 10 | const json = tryToConvertBufferToJson(Buffer.from('{"foo": "bar"}')) 11 | expect(json).toEqual({ foo: 'bar' }) 12 | 13 | const json2 = tryToConvertBufferToJson(Buffer.from('{NOT_A_JSON}')) 14 | expect(json2).toBe(null) 15 | }) 16 | 17 | test('tryToConvertBufferToString', () => { 18 | const string = tryToConvertBufferToString(Buffer.from('hello')) 19 | expect(string).toBe('hello') 20 | }) 21 | -------------------------------------------------------------------------------- /waitForKafkaReady.js: -------------------------------------------------------------------------------- 1 | import getKafka from './getKafka.js' 2 | 3 | waitForKafkaReady() 4 | .then(console.log) 5 | .catch(console.error) 6 | 7 | async function waitForKafkaReady (log = console.log) { 8 | const kafka = getKafka() 9 | 10 | const admin = kafka.admin() 11 | 12 | const connectionPromise = new Promise(resolve => { 13 | admin.on(admin.events.CONNECT, resolve) 14 | }) 15 | 16 | log('waiting for Kafka ready...') 17 | await admin.connect() 18 | await connectionPromise 19 | await admin.describeCluster() 20 | await admin.disconnect() 21 | log('Kafka is ready') 22 | } 23 | --------------------------------------------------------------------------------