├── .eslintignore ├── .eslintrc ├── .github └── workflows │ ├── main.yml │ └── release-package.yml ├── .gitignore ├── .mocharc.js ├── .prettierrc ├── .tool-versions ├── .vscode └── launch.json ├── LICENSE ├── Makefile ├── README.md ├── cluster ├── .gitignore ├── Dockerfile ├── README.md ├── conf │ ├── enabled_plugins │ └── rabbitmq.conf ├── docker-compose.yml └── haproxy.cfg ├── conf ├── enabled_plugins └── rabbitmq.conf ├── cspell.json ├── docker-compose.yaml ├── example ├── docker-compose.yaml ├── index.js ├── package-lock.json ├── package.json ├── src │ ├── autoreconnect_example.js │ ├── cluster_example.js │ ├── deduplication_example.js │ ├── offset_tracking_example.js │ ├── offset_tracking_receive.js │ ├── offset_tracking_send.js │ ├── single_active_consumer_update_example.js │ └── superstream_example.js └── tsconfig.json ├── package-lock.json ├── package.json ├── performance_test ├── index.ts ├── metrics.ts ├── package-lock.json ├── package.json ├── perf_test_publisher.ts └── tsconfig.json ├── src ├── amqp10 │ ├── applicationProperties.ts │ ├── decoder.ts │ ├── encoder.ts │ ├── messageAnnotations.ts │ ├── messageHeader.ts │ └── properties.ts ├── client.ts ├── compression.ts ├── connection.ts ├── connection_pool.ts ├── consumer.ts ├── consumer_credit_policy.ts ├── decoder_listener.ts ├── error_codes.ts ├── hash │ ├── murmur32.ts │ └── util.ts ├── heartbeat.ts ├── index.ts ├── logger.ts ├── promise_resolver.ts ├── publisher.ts ├── requests │ ├── abstract_request.ts │ ├── buffer_data_writer.ts │ ├── close_request.ts │ ├── consumer_update_response.ts │ ├── create_stream_request.ts │ ├── create_super_stream_request.ts │ ├── credit_request.ts │ ├── data_writer.ts │ ├── declare_publisher_request.ts │ ├── delete_publisher_request.ts │ ├── delete_stream_request.ts │ ├── delete_super_stream_request.ts │ ├── exchange_command_versions_request.ts │ ├── frame_size_exception.ts │ ├── heartbeat_request.ts │ ├── metadata_request.ts │ ├── metadata_update_request.ts │ ├── open_request.ts │ ├── partitions_query.ts │ ├── peer_properties_request.ts │ ├── publish_request.ts │ ├── publish_request_v2.ts │ ├── query_offset_request.ts │ ├── query_publisher_request.ts │ ├── request.ts │ ├── requests.ts │ ├── route_query.ts │ ├── sasl_authenticate_request.ts │ ├── sasl_handshake_request.ts │ ├── store_offset_request.ts │ ├── stream_stats_request.ts │ ├── sub_entry_batch_publish_request.ts │ ├── subscribe_request.ts │ ├── tune_request.ts │ └── unsubscribe_request.ts ├── response_decoder.ts ├── responses │ ├── abstract_response.ts │ ├── close_response.ts │ ├── consumer_update_query.ts │ ├── create_stream_response.ts │ ├── create_super_stream_response.ts │ ├── credit_response.ts │ ├── declare_publisher_response.ts │ ├── delete_publisher_response.ts │ ├── delete_stream_response.ts │ ├── delete_super_stream_response.ts │ ├── deliver_response.ts │ ├── deliver_response_v2.ts │ ├── exchange_command_versions_response.ts │ ├── heartbeat_response.ts │ ├── metadata_response.ts │ ├── metadata_update_response.ts │ ├── open_response.ts │ ├── partitions_response.ts │ ├── peer_properties_response.ts │ ├── publish_confirm_response.ts │ ├── publish_error_response.ts │ ├── query_offset_response.ts │ ├── query_publisher_response.ts │ ├── raw_response.ts │ ├── response.ts │ ├── responses.ts │ ├── route_response.ts │ ├── sasl_authenticate_response.ts │ ├── sasl_handshake_response.ts │ ├── store_offset_response.ts │ ├── stream_stats_response.ts │ ├── subscribe_response.ts │ ├── tune_response.ts │ └── unsubscribe_response.ts ├── super_stream_consumer.ts ├── super_stream_publisher.ts ├── util.ts ├── versions.ts └── waiting_response.ts ├── test ├── data │ └── header_amqpvalue_message ├── e2e │ ├── address_resolver.test.ts │ ├── basic_publish.test.ts │ ├── client_restart.test.ts │ ├── close_consumer.test.ts │ ├── close_publisher.test.ts │ ├── cluster_connection_management.test.ts │ ├── connect.test.ts │ ├── connect_frame_size_negotiation.test.ts │ ├── connection_closed_listener.test.ts │ ├── consumer_credit_flow_policy.test.ts │ ├── declare_consumer.test.ts │ ├── declare_publisher.test.ts │ ├── filtering.test.ts │ ├── metadata_update.test.ts │ ├── offset.test.ts │ ├── partitions_query.test.ts │ ├── publish_confirm.test.ts │ ├── query_metadata.test.ts │ ├── query_publisher_sequence.test.ts │ ├── route_query.test.ts │ ├── shared_consumer_clients.test.ts │ ├── shared_publisher_clients.test.ts │ ├── stream_cache.test.ts │ ├── sub_entry_consume.test.ts │ ├── sub_entry_publish.test.ts │ ├── subscribe.test.ts │ ├── superstream_consumer.test.ts │ └── superstream_publisher.test.ts ├── index.ts ├── setup.ts ├── support │ ├── fake_data.ts │ ├── rabbit.ts │ └── util.ts ├── tsconfig.json └── unit │ ├── buffer_data_writer.test.ts │ ├── create_stream.test.ts │ ├── create_super_stream.test.ts │ ├── delete_publisher.test.ts │ ├── delete_stream.test.ts │ ├── delete_super_stream.test.ts │ ├── heartbeat.test.ts │ ├── murmur32.test.ts │ ├── publish_request.test.ts │ ├── publisher.test.ts │ ├── response_decoder.test.ts │ ├── stream_stats.test.ts │ ├── util.test.ts │ └── versions.test.ts └── tsconfig.json /.eslintignore: -------------------------------------------------------------------------------- 1 | # /node_modules/* in the project root is ignored by default 2 | # build artefacts 3 | dist/* 4 | coverage/* 5 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "browser": true, 4 | "es6": true, 5 | "node": true 6 | }, 7 | "extends": ["plugin:import/errors", "plugin:import/warnings", "plugin:prettier/recommended", "prettier"], 8 | "parser": "@typescript-eslint/parser", 9 | "parserOptions": { 10 | "project": ["tsconfig.json", "test/tsconfig.json", "example/tsconfig.json"], 11 | "ecmaVersion": 2021, 12 | "sourceType": "module" 13 | }, 14 | "plugins": ["@typescript-eslint", "import", "prettier", "deprecation", "no-only-tests"], 15 | "rules": { 16 | "@typescript-eslint/adjacent-overload-signatures": "error", 17 | "@typescript-eslint/no-empty-function": "error", 18 | "@typescript-eslint/no-empty-interface": "warn", 19 | "@typescript-eslint/no-floating-promises": "error", 20 | "@typescript-eslint/no-namespace": "error", 21 | "@typescript-eslint/no-unnecessary-type-assertion": "error", 22 | "@typescript-eslint/prefer-for-of": "warn", 23 | "@typescript-eslint/triple-slash-reference": "error", 24 | "@typescript-eslint/unified-signatures": "warn", 25 | "@typescript-eslint/no-explicit-any": ["error"], 26 | "constructor-super": "error", 27 | "eqeqeq": ["warn", "always"], 28 | "import/no-deprecated": "warn", 29 | "import/no-extraneous-dependencies": "error", 30 | "import/no-unassigned-import": "warn", 31 | "no-cond-assign": "error", 32 | "no-duplicate-case": "error", 33 | "no-duplicate-imports": "error", 34 | "deprecation/deprecation": "warn", 35 | "no-empty": ["error", { "allowEmptyCatch": true }], 36 | "no-invalid-this": "error", 37 | "no-new-wrappers": "error", 38 | "no-param-reassign": "error", 39 | "no-redeclare": "error", 40 | "no-sequences": "error", 41 | "no-throw-literal": "error", 42 | "no-unsafe-finally": "error", 43 | "no-unused-labels": "error", 44 | "no-var": "warn", 45 | "no-void": "error", 46 | "prefer-const": "warn", 47 | "no-only-tests/no-only-tests": "error", 48 | "no-shadow": "off", 49 | "@typescript-eslint/no-shadow": "error" 50 | }, 51 | "settings": { 52 | "jsdoc": { 53 | "tagNamePreference": { 54 | "returns": "return" 55 | } 56 | }, 57 | "import/resolver": { 58 | "node": { 59 | "extensions": [".js", ".ts"] 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Node.js CI 5 | 6 | on: 7 | push: 8 | branches: [main] 9 | pull_request: 10 | branches: [main] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | 16 | strategy: 17 | matrix: 18 | node-version: [18.x, 20.x] 19 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ 20 | 21 | services: 22 | rabbitmq: 23 | image: rabbitmq:4.0.5-management 24 | options: --hostname test-node --name test-node 25 | env: 26 | RABBITMQ_DEFAULT_USER: "test-user" 27 | RABBITMQ_DEFAULT_PASS: "test-password" 28 | volumes: 29 | # these directories will be empty until checkout, but they will be 30 | # populated by the time we restart the service 31 | - ${{ github.workspace }}/conf:/etc/rabbitmq 32 | - ${{ github.workspace }}/certs:/certs 33 | ports: 34 | - 5552:5552 35 | - 5551:5551 36 | - 5672:5672 37 | - 15672:15672 38 | - 1883:1883 39 | - 61613:61613 40 | 41 | steps: 42 | - name: Add the rabbitmq service to /etc/hosts 43 | run: sudo echo "127.0.0.1 test-node" | sudo tee -a /etc/hosts 44 | - uses: actions/checkout@v3 45 | - name: Use Node.js ${{ matrix.node-version }} 46 | uses: actions/setup-node@v3 47 | with: 48 | node-version: ${{ matrix.node-version }} 49 | cache: "npm" 50 | - name: Generate certificates 51 | env: 52 | CN: test-node 53 | run: | 54 | git clone https://github.com/rabbitmq/tls-gen tls-gen 55 | cd tls-gen/basic 56 | make 57 | cd ../.. 58 | cp -a tls-gen/basic/result certs/ 59 | sudo chown -R 999:999 certs 60 | sudo mv certs/server_test-node_certificate.pem certs/server_rabbitmq_certificate.pem 61 | sudo mv certs/server_test-node_key.pem certs/server_rabbitmq_key.pem 62 | - name: Restart RabbitMQ 63 | run: | 64 | docker restart test-node 65 | sleep 2 66 | docker exec test-node rabbitmqctl await_startup 67 | - name: Create SuperStream 68 | run: docker exec test-node rabbitmq-streams add_super_stream super-stream-test --partitions 2 69 | - run: npm ci 70 | - run: npm run check 71 | - run: npm run build --if-present 72 | - run: | 73 | docker exec test-node rabbitmqctl add_user 'O=client,CN=test-node' '' 74 | docker exec test-node rabbitmqctl clear_password 'O=client,CN=test-node' 75 | docker exec test-node rabbitmqctl set_permissions 'O=client,CN=test-node' '.*' '.*' '.*' 76 | - run: npm test 77 | env: 78 | RABBITMQ_USER: "test-user" 79 | RABBITMQ_PASSWORD: "test-password" 80 | RABBIT_MQ_TEST_NODES: "test-node:5552" 81 | # - run: cd example && npm install && npm start 82 | # env: 83 | # RABBITMQ_USER: "test-user" 84 | # RABBITMQ_PASSWORD: "test-password" 85 | # - run: cd performance_test && npm install && npm run perftest 100000 86 | # env: 87 | # RABBITMQ_USER: "test-user" 88 | # RABBITMQ_PASSWORD: "test-password" 89 | -------------------------------------------------------------------------------- /.github/workflows/release-package.yml: -------------------------------------------------------------------------------- 1 | name: Release Package 2 | 3 | on: 4 | release: 5 | types: [created, edited] 6 | 7 | jobs: 8 | github-package-registry: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v3 13 | with: 14 | fetch-depth: 0 15 | - name: Setup Node.js 16 | uses: actions/setup-node@v3 17 | with: 18 | node-version: 20 19 | registry-url: "https://registry.npmjs.org" 20 | - name: Install dependencies 21 | run: npm ci 22 | - name: Build 23 | run: npm run build 24 | - name: Publish package to npm package registry 25 | run: npm publish --access public 26 | env: 27 | NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | node_modules/ 3 | performance_test/node_modules 4 | .envrc 5 | tls-gen/ 6 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | require: ["ts-node/register/transpile-only", "test/index.ts"], 3 | extension: ["ts"], 4 | recursive: true, 5 | } 6 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "semi": false, 3 | "arrowParens": "always", 4 | "singleQuote": false, 5 | "printWidth": 120, 6 | "trailingComma": "es5" 7 | } 8 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | nodejs 20.15.0 2 | python 3.8.12 -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "args": ["--timeout", "999999", "--colors", "${file}"], 9 | "internalConsoleOptions": "openOnSessionStart", 10 | "name": "Mocha Current File", 11 | "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha", 12 | "request": "launch", 13 | "skipFiles": ["/**"], 14 | "type": "node" 15 | }, 16 | { 17 | "args": ["--timeout", "999999", "--colors"], 18 | "internalConsoleOptions": "openOnSessionStart", 19 | "name": "Mocha Tests", 20 | "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha", 21 | "request": "launch", 22 | "skipFiles": ["/**"], 23 | "type": "node" 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2023 coders51 SRL 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the “Software”), 5 | to deal in the Software without restriction, including without limitation 6 | the rights to use, copy, modify, merge, publish, distribute, sublicense, 7 | and/or sell copies of the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 14 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 15 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 16 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 17 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 18 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | rabbitmq-cluster: 2 | cd cluster; rm -rf tls-gen; 3 | cd cluster; git clone https://github.com/michaelklishin/tls-gen tls-gen; cd tls-gen/basic; make 4 | mv cluster/tls-gen/basic/result/server_*_certificate.pem cluster/tls-gen/basic/result/server_certificate.pem 5 | mv cluster/tls-gen/basic/result/server_*key.pem cluster/tls-gen/basic/result/server_key.pem 6 | cd cluster; docker build -t haproxy-rabbitmq-cluster . 7 | cd cluster; chmod -R 755 tls-gen 8 | cd cluster; docker compose down 9 | cd cluster; docker compose up -d 10 | 11 | rabbitmq-test: 12 | rm -rf tls-gen; 13 | git clone https://github.com/rabbitmq/tls-gen tls-gen; cd tls-gen/basic; CN=rabbitmq make 14 | chmod -R 755 tls-gen 15 | docker compose down 16 | docker compose up -d 17 | sleep 5 18 | docker exec rabbitmq-stream rabbitmqctl await_startup 19 | docker exec rabbitmq-stream rabbitmqctl add_user 'O=client,CN=rabbitmq' '' 20 | docker exec rabbitmq-stream rabbitmqctl clear_password 'O=client,CN=rabbitmq' 21 | docker exec rabbitmq-stream rabbitmqctl set_permissions 'O=client,CN=rabbitmq' '.*' '.*' '.*' 22 | -------------------------------------------------------------------------------- /cluster/.gitignore: -------------------------------------------------------------------------------- 1 | tls-gen/ 2 | .DS_Store 3 | -------------------------------------------------------------------------------- /cluster/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM haproxy:2.2.22 2 | 3 | COPY haproxy.cfg /usr/local/etc/haproxy/haproxy.cfg -------------------------------------------------------------------------------- /cluster/README.md: -------------------------------------------------------------------------------- 1 | # RabbitMQ cluster with HA proxy 2 | 3 | how to run: 4 | 5 | add the following to your `/etc/hosts` 6 | 7 | ``` 8 | 127.0.0.1 node0 9 | 127.0.0.1 node1 10 | 127.0.0.1 node2 11 | ``` 12 | 13 | set the following values in your environment if you want to run the tests 14 | 15 | ``` 16 | RABBITMQ_USER="rabbit" 17 | RABBITMQ_PASSWORD="rabbit" 18 | RABBIT_MQ_MANAGEMENT_PORT=15673 19 | RABBIT_MQ_AMQP_PORT=5555 20 | RABBIT_MQ_TEST_NODES="node0:5562;node1:5572;node2:5582" 21 | RABBIT_MQ_TEST_ADDRESS_BALANCER="localhost:5553" 22 | ``` 23 | 24 | then run the following 25 | 26 | ```bash 27 | git clone git@github.com:rabbitmq/rabbitmq-stream-js-client.git . 28 | make rabbitmq-cluster 29 | ``` 30 | 31 | ports: 32 | 33 | ``` 34 | - localhost:5553 #standard stream port 35 | - localhost:5554 #TLS stream port 36 | - http://localhost:15673 #management port 37 | ``` 38 | -------------------------------------------------------------------------------- /cluster/conf/enabled_plugins: -------------------------------------------------------------------------------- 1 | [rabbitmq_management, rabbitmq_stream, rabbitmq_stream_management]. -------------------------------------------------------------------------------- /cluster/conf/rabbitmq.conf: -------------------------------------------------------------------------------- 1 | cluster_formation.peer_discovery_backend = rabbit_peer_discovery_classic_config 2 | 3 | cluster_formation.classic_config.nodes.1 = rabbit@node0 4 | cluster_formation.classic_config.nodes.2 = rabbit@node1 5 | cluster_formation.classic_config.nodes.3 = rabbit@node2 6 | loopback_users.guest = false 7 | 8 | ssl_options.cacertfile = /certs/ca_certificate.pem 9 | ssl_options.certfile = /certs/server_certificate.pem 10 | ssl_options.keyfile = /certs/server_key.pem 11 | listeners.ssl.default = 5671 12 | listeners.tcp.default = 5550 13 | stream.listeners.tcp.default = 5552 14 | stream.listeners.ssl.default = 5551 15 | ssl_options.verify = verify_peer 16 | ssl_options.fail_if_no_peer_cert = false 17 | log.file.level = debug 18 | -------------------------------------------------------------------------------- /cluster/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | services: 3 | rabbit_node0: 4 | environment: 5 | - RABBITMQ_ERLANG_COOKIE='secret_cookie' 6 | - RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS=-rabbitmq_stream advertised_host node0 advertised_port 5562 7 | - RABBITMQ_DEFAULT_USER=rabbit 8 | - RABBITMQ_DEFAULT_PASS=rabbit 9 | networks: 10 | - back 11 | hostname: node0 12 | image: rabbitmq:4.0.5-management 13 | ports: 14 | - "5560:5550" 15 | - "5561:5551" 16 | - "5562:5552" 17 | tty: true 18 | volumes: 19 | - ./conf/:/etc/rabbitmq/ 20 | - "./tls-gen/basic/result/:/certs" 21 | rabbit_node1: 22 | environment: 23 | - RABBITMQ_ERLANG_COOKIE='secret_cookie' 24 | - RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS=-rabbitmq_stream advertised_host node1 advertised_port 5572 25 | - RABBITMQ_DEFAULT_USER=rabbit 26 | - RABBITMQ_DEFAULT_PASS=rabbit 27 | networks: 28 | - back 29 | hostname: node1 30 | image: rabbitmq:4.0.5-management 31 | ports: 32 | - "5570:5550" 33 | - "5571:5551" 34 | - "5572:5552" 35 | tty: true 36 | volumes: 37 | - ./conf/:/etc/rabbitmq/ 38 | - "./tls-gen/basic/result/:/certs" 39 | rabbit_node2: 40 | environment: 41 | - RABBITMQ_ERLANG_COOKIE='secret_cookie' 42 | - RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS=-rabbitmq_stream advertised_host node2 advertised_port 5582 43 | - RABBITMQ_DEFAULT_USER=rabbit 44 | - RABBITMQ_DEFAULT_PASS=rabbit 45 | networks: 46 | - back 47 | hostname: node2 48 | image: rabbitmq:4.0.5-management 49 | ports: 50 | - "5580:5550" 51 | - "5581:5551" 52 | - "5582:5552" 53 | tty: true 54 | volumes: 55 | - ./conf/:/etc/rabbitmq/ 56 | - "./tls-gen/basic/result/:/certs" 57 | haproxy: 58 | image: haproxy-rabbitmq-cluster 59 | hostname: haproxy 60 | ports: 61 | - "5553:5552" 62 | - "5554:5551" 63 | - "5555:5550" 64 | - "15673:15672" 65 | networks: 66 | - back 67 | networks: 68 | back: -------------------------------------------------------------------------------- /cluster/haproxy.cfg: -------------------------------------------------------------------------------- 1 | global 2 | maxconn 4096 3 | log stdout format raw local0 debug 4 | 5 | defaults 6 | timeout connect 60s 7 | timeout client 60s 8 | timeout server 60s 9 | log global 10 | 11 | frontend tcp-0_0_0_0-443 12 | bind *:5551 13 | mode tcp 14 | use_backend rabbitmq-stream-tls 15 | tcp-request inspect-delay 5s 16 | tcp-request content accept if { req_ssl_hello_type 1 } 17 | 18 | backend rabbitmq-stream-tls 19 | mode tcp 20 | server rabbit_node0 rabbit_node0:5551 check inter 5000 fall 3 21 | server rabbit_node1 rabbit_node1:5551 check inter 5000 fall 3 22 | server rabbit_node2 rabbit_node2:5551 check inter 5000 fall 3 23 | 24 | listen rabbitmq-stream 25 | bind 0.0.0.0:5552 26 | balance roundrobin 27 | server rabbit_node0 rabbit_node0:5552 check inter 5000 fall 3 28 | server rabbit_node1 rabbit_node1:5552 check inter 5000 fall 3 29 | server rabbit_node2 rabbit_node2:5552 check inter 5000 fall 3 30 | 31 | listen rabbitmq-classic 32 | bind 0.0.0.0:5550 33 | balance roundrobin 34 | server rabbit_node0 rabbit_node0:5550 check inter 5000 fall 3 35 | server rabbit_node1 rabbit_node1:5550 check inter 5000 fall 3 36 | server rabbit_node2 rabbit_node2:5550 check inter 5000 fall 3 37 | 38 | listen rabbitmq-ui 39 | bind 0.0.0.0:15672 40 | balance roundrobin 41 | server rabbit_node0 rabbit_node0:15672 check inter 5000 fall 3 42 | server rabbit_node1 rabbit_node1:15672 check inter 5000 fall 3 43 | server rabbit_node2 rabbit_node2:15672 check inter 5000 fall 3 44 | -------------------------------------------------------------------------------- /conf/enabled_plugins: -------------------------------------------------------------------------------- 1 | [rabbitmq_management,rabbitmq_prometheus,rabbitmq_stream_management,rabbitmq_auth_mechanism_ssl]. 2 | -------------------------------------------------------------------------------- /conf/rabbitmq.conf: -------------------------------------------------------------------------------- 1 | loopback_users.guest = false 2 | 3 | ssl_options.cacertfile = /certs/ca_certificate.pem 4 | ssl_options.certfile = /certs/server_rabbitmq_certificate.pem 5 | ssl_options.keyfile = /certs/server_rabbitmq_key.pem 6 | listeners.ssl.default = 5671 7 | listeners.tcp.default = 5672 8 | stream.listeners.tcp.default = 5552 9 | stream.listeners.ssl.default = 5551 10 | auth_mechanisms.1 = PLAIN 11 | auth_mechanisms.2 = EXTERNAL 12 | ssl_options.verify = verify_peer 13 | ssl_options.fail_if_no_peer_cert = false 14 | log.file.level = debug 15 | log.console = true 16 | -------------------------------------------------------------------------------- /cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "language": "en", 3 | "words": ["ackmode", "ampq", "prefetch", "amqpvalue", "RABBITMQ", "Sasl", "Vbin", "akey", "superstream"] 4 | } 5 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | rabbitmq-stream: 3 | image: rabbitmq:4.0.5-management 4 | container_name: rabbitmq-stream 5 | restart: unless-stopped 6 | hostname: "rabbitmq" 7 | ports: 8 | - "15672:15672" 9 | - "5671:5671" 10 | - "5672:5672" 11 | - "5551:5551" 12 | - "5552:5552" 13 | environment: 14 | RABBITMQ_DEFAULT_USER: "rabbit" 15 | RABBITMQ_DEFAULT_PASS: "rabbit" 16 | volumes: 17 | - ./conf/:/etc/rabbitmq/ 18 | - "./tls-gen/basic/result/:/certs" 19 | -------------------------------------------------------------------------------- /example/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | services: 4 | rabbitmq-stream: 5 | image: rabbitmq:4.0.5-management 6 | container_name: rabbitmq-stream 7 | restart: unless-stopped 8 | hostname: "rabbitmq" 9 | ports: 10 | - "15672:15672" 11 | - "5672:5672" 12 | - "5552:5552" 13 | environment: 14 | RABBITMQ_DEFAULT_USER: "rabbit" 15 | RABBITMQ_DEFAULT_PASS: "rabbit" 16 | volumes: 17 | - ../conf/enabled_plugins:/etc/rabbitmq/enabled_plugins 18 | -------------------------------------------------------------------------------- /example/index.js: -------------------------------------------------------------------------------- 1 | const rabbit = require("rabbitmq-stream-js-client") 2 | const { randomUUID } = require("crypto") 3 | 4 | const rabbitUser = process.env.RABBITMQ_USER || "rabbit" 5 | const rabbitPassword = process.env.RABBITMQ_PASSWORD || "rabbit" 6 | 7 | async function main() { 8 | const streamName = `example-${randomUUID()}` 9 | console.log(`Creating stream ${streamName}`) 10 | 11 | const client = await rabbit.connect({ 12 | hostname: "localhost", 13 | port: 5552, 14 | username: rabbitUser, 15 | password: rabbitPassword, 16 | vhost: "/", 17 | heartbeat: 0, 18 | }) 19 | await client.createStream({ stream: streamName, arguments: {} }) 20 | const publisher = await client.declarePublisher({ stream: streamName }) 21 | 22 | await publisher.send(Buffer.from("Test message")) 23 | 24 | await client.declareConsumer({ stream: streamName, offset: rabbit.Offset.first() }, (message) => { 25 | console.log(`Received message ${message.content.toString()}`) 26 | }) 27 | 28 | await sleep(2000) 29 | 30 | await client.close() 31 | } 32 | 33 | main() 34 | .then(() => console.log("done!")) 35 | .catch((res) => { 36 | console.log("ERROR ", res) 37 | process.exit(-1) 38 | }) 39 | const sleep = (ms) => new Promise((r) => setTimeout(r, ms)) 40 | -------------------------------------------------------------------------------- /example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "example", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1", 8 | "start": "node index.js", 9 | "cluster-example": "node cluster_example.js", 10 | "rebuild-source": "cd .. && npm run build && cd - && npm install --force" 11 | }, 12 | "author": "", 13 | "license": "ISC", 14 | "dependencies": { 15 | "amqplib": "^0.10.3", 16 | "rabbitmq-stream-js-client": "file:../." 17 | }, 18 | "engines": { 19 | "node": "20.x.x" 20 | }, 21 | "devDependencies": { 22 | "typescript": "^4.9.5" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /example/src/cluster_example.js: -------------------------------------------------------------------------------- 1 | /* 2 | Run this example only after creating the cluster. 3 | Following the indications at https://github.com/coders51/rabbitmq-stream-js-client/tree/main/cluster 4 | */ 5 | 6 | const rabbit = require("rabbitmq-stream-js-client") 7 | const { randomUUID } = require("crypto") 8 | 9 | const rabbitUser = process.env.RABBITMQ_USER || "rabbit" 10 | const rabbitPassword = process.env.RABBITMQ_PASSWORD || "rabbit" 11 | 12 | async function main() { 13 | const streamName = `example-${randomUUID()}` 14 | console.log(`Creating stream ${streamName}`) 15 | 16 | const client = await rabbit.connect({ 17 | hostname: "node0", 18 | port: 5562, 19 | username: rabbitUser, 20 | password: rabbitPassword, 21 | vhost: "/", 22 | heartbeat: 0, 23 | addressResolver: { enabled: true, endpoint: { host: "localhost", port: 5553 } }, 24 | }) 25 | await client.createStream({ stream: streamName, arguments: {} }) 26 | await sleep(200) // Waiting for replicas to be created 27 | const publisher = await client.declarePublisher({ stream: streamName }) 28 | 29 | await publisher.send(Buffer.from("Test message")) 30 | 31 | await client.declareConsumer({ stream: streamName, offset: rabbit.Offset.first() }, (message) => { 32 | console.log(`Received message ${message.content.toString()}`) 33 | }) 34 | 35 | await sleep(2000) 36 | 37 | await client.close() 38 | } 39 | 40 | main() 41 | .then(() => console.log("done!")) 42 | .catch((res) => { 43 | console.log("ERROR ", res) 44 | process.exit(-1) 45 | }) 46 | const sleep = (ms) => new Promise((r) => setTimeout(r, ms)) 47 | -------------------------------------------------------------------------------- /example/src/deduplication_example.js: -------------------------------------------------------------------------------- 1 | /* 2 | Run this example only with rabbit management version >= 3.13.0. 3 | */ 4 | 5 | const rabbit = require("rabbitmq-stream-js-client") 6 | const { randomUUID } = require("crypto") 7 | 8 | const rabbitUser = process.env.RABBITMQ_USER || "rabbit" 9 | const rabbitPassword = process.env.RABBITMQ_PASSWORD || "rabbit" 10 | 11 | async function main() { 12 | const streamName = `example-${randomUUID()}` 13 | const publisherRef = `publisher-${randomUUID()}` 14 | console.log(`Creating stream ${streamName}`) 15 | 16 | const client = await rabbit.connect({ 17 | hostname: "localhost", 18 | port: 5552, 19 | username: rabbitUser, 20 | password: rabbitPassword, 21 | vhost: "/", 22 | heartbeat: 0, 23 | }) 24 | await client.createStream({ stream: streamName }) 25 | 26 | //to declare a publisher with deduplication enabled, you need to set a publisherRef 27 | const firstDeduplicationPublisher = await client.declarePublisher({ stream: streamName, publisherRef: publisherRef }) 28 | 29 | //with deduplication actived, you can send messages without a publishingId; in this case it will be incremental 30 | await firstDeduplicationPublisher.send(Buffer.from("Test message 1")) //publishingId = 1 31 | await firstDeduplicationPublisher.send(Buffer.from("Test message 2")) //publishingId = 2 32 | //but you can also set a publishingId, note that it must be greater than the last one for the message to be stored 33 | await firstDeduplicationPublisher.send(Buffer.from("Test message 3"), { publishingId: 3n }) //publishingId = 3 34 | //if you choose a publishingId that is less than the last one, the message will not be stored 35 | await firstDeduplicationPublisher.send(Buffer.from("Test message 4"), { publishingId: 1n }) //this message won't be stored 36 | await firstDeduplicationPublisher.flush() 37 | const firstPublisherPublishingId = await firstDeduplicationPublisher.getLastPublishingId() 38 | await firstDeduplicationPublisher.close() 39 | 40 | console.log(`Publishing id is ${firstPublisherPublishingId} (must be 3)`) //this must be the greatest publishingId, 3 in this case 41 | 42 | const secondDeduplicationPublisher = await client.declarePublisher({ stream: streamName, publisherRef: publisherRef }) 43 | //with the second publisher if we try to send messages with lower publishingId than the last one, they will not be stored 44 | await secondDeduplicationPublisher.send(Buffer.from("Test message 5"), { publishingId: 1n }) //won't be stored 45 | await secondDeduplicationPublisher.send(Buffer.from("Test message 6"), { publishingId: 2n }) //won't be stored 46 | await secondDeduplicationPublisher.send(Buffer.from("Test message 7"), { publishingId: 7n }) //this will be stored since 7 is greater than 3, the last highest publishingId 47 | await secondDeduplicationPublisher.flush() 48 | const secondPublisherPublishingId = await secondDeduplicationPublisher.getLastPublishingId() 49 | await secondDeduplicationPublisher.close() 50 | 51 | console.log(`Publishing id is ${secondPublisherPublishingId} (must be 7)`) //this must be the greatest publishingId, 7 in this case 52 | 53 | await client.deleteStream({ stream: streamName }) 54 | 55 | await client.close() 56 | } 57 | 58 | main() 59 | .then(() => console.log("done!")) 60 | .catch((res) => { 61 | console.log("ERROR ", res) 62 | process.exit(-1) 63 | }) 64 | -------------------------------------------------------------------------------- /example/src/offset_tracking_example.js: -------------------------------------------------------------------------------- 1 | const rabbit = require("rabbitmq-stream-js-client") 2 | 3 | const rabbitUser = process.env.RABBITMQ_USER || "rabbit" 4 | const rabbitPassword = process.env.RABBITMQ_PASSWORD || "rabbit" 5 | 6 | async function main() { 7 | const streamName = `stream-offset-tracking-javascript` 8 | console.log(`Creating stream ${streamName}`) 9 | 10 | const client = await rabbit.connect({ 11 | hostname: "localhost", 12 | port: 5552, 13 | username: rabbitUser, 14 | password: rabbitPassword, 15 | vhost: "/", 16 | heartbeat: 0, 17 | }) 18 | await client.createStream({ stream: streamName, arguments: {} }) 19 | const publisher = await client.declarePublisher({ stream: streamName }) 20 | const toSend = 100 21 | 22 | console.log(`Publishing ${toSend} messages`) 23 | for (let i = 0; i < toSend; i++) { 24 | const body = i === toSend - 1 ? "marker" : `hello ${i}` 25 | await publisher.send(Buffer.from(body)) 26 | } 27 | 28 | const consumerRef = "offset-tracking-tutorial" 29 | let firstOffset = undefined 30 | let offsetSpecification = rabbit.Offset.first() 31 | try { 32 | const offset = await client.queryOffset({ reference: consumerRef, stream: streamName }) 33 | offsetSpecification = rabbit.Offset.offset(offset + 1n) 34 | } catch (e) {} 35 | 36 | let lastOffset = offsetSpecification.value 37 | let messageCount = 0 38 | const consumer = await client.declareConsumer( 39 | { stream: streamName, offset: offsetSpecification, consumerRef }, 40 | async (message) => { 41 | messageCount++ 42 | if (!firstOffset && messageCount === 1) { 43 | firstOffset = message.offset 44 | console.log("First message received") 45 | } 46 | if (messageCount % 10 === 0) { 47 | await consumer.storeOffset(message.offset) 48 | } 49 | if (message.content.toString() === "marker") { 50 | console.log("Marker found") 51 | lastOffset = message.offset 52 | await consumer.storeOffset(message.offset) 53 | console.log(`Done consuming, first offset was ${firstOffset}, last offset was ${lastOffset}`) 54 | await consumer.close(true) 55 | process.exit(0) 56 | } 57 | } 58 | ) 59 | 60 | console.log(`Start consuming...`) 61 | await sleep(2000) 62 | } 63 | 64 | main() 65 | .then(() => console.log("done!")) 66 | .catch((res) => { 67 | console.log("ERROR ", res) 68 | process.exit(-1) 69 | }) 70 | const sleep = (ms) => new Promise((r) => setTimeout(r, ms)) 71 | -------------------------------------------------------------------------------- /example/src/offset_tracking_receive.js: -------------------------------------------------------------------------------- 1 | const rabbit = require("rabbitmq-stream-js-client") 2 | 3 | const sleep = (ms) => new Promise((r) => setTimeout(r, ms)) 4 | 5 | async function main() { 6 | console.log("Connecting...") 7 | const client = await rabbit.connect({ 8 | hostname: "localhost", 9 | port: 5552, 10 | username: "rabbit", 11 | password: "rabbit", 12 | vhost: "/", 13 | }) 14 | 15 | console.log("Making sure the stream exists...") 16 | const streamName = "stream-offset-tracking-javascript" 17 | await client.createStream({ stream: streamName, arguments: {} }) 18 | 19 | const consumerRef = "offset-tracking-tutorial" 20 | let firstOffset = undefined 21 | let offsetSpecification = rabbit.Offset.first() 22 | try { 23 | const offset = await client.queryOffset({ reference: consumerRef, stream: streamName }) 24 | offsetSpecification = rabbit.Offset.offset(offset + 1n) 25 | } catch (e) {} 26 | 27 | let lastOffset = offsetSpecification.value 28 | let messageCount = 0 29 | const consumer = await client.declareConsumer( 30 | { stream: streamName, offset: offsetSpecification, consumerRef }, 31 | async (message) => { 32 | messageCount++ 33 | if (!firstOffset && messageCount === 1) { 34 | firstOffset = message.offset 35 | console.log("First message received") 36 | } 37 | if (messageCount % 10 === 0) { 38 | await consumer.storeOffset(message.offset) 39 | } 40 | if (message.content.toString() === "marker") { 41 | console.log("Marker found") 42 | lastOffset = message.offset 43 | await consumer.storeOffset(message.offset) 44 | await consumer.close() 45 | } 46 | } 47 | ) 48 | 49 | console.log(`Start consuming...`) 50 | await sleep(2000) 51 | console.log(`Done consuming, first offset was ${firstOffset}, last offset was ${lastOffset}`) 52 | } 53 | 54 | main() 55 | .then(() => process.exit(0)) 56 | .catch((res) => { 57 | console.log("Error while receiving message!", res) 58 | process.exit(-1) 59 | }) 60 | -------------------------------------------------------------------------------- /example/src/offset_tracking_send.js: -------------------------------------------------------------------------------- 1 | const rabbit = require("rabbitmq-stream-js-client") 2 | 3 | async function main() { 4 | console.log("Connecting...") 5 | const client = await rabbit.connect({ 6 | vhost: "/", 7 | port: 5552, 8 | hostname: "localhost", 9 | username: "rabbit", 10 | password: "rabbit", 11 | }) 12 | 13 | console.log("Making sure the stream exists...") 14 | const streamName = "stream-offset-tracking-javascript" 15 | await client.createStream({ stream: streamName, arguments: {} }) 16 | 17 | console.log("Creating the publisher...") 18 | const publisher = await client.declarePublisher({ stream: streamName }) 19 | 20 | const messageCount = 100 21 | console.log(`Publishing ${messageCount} messages`) 22 | for (let i = 0; i < messageCount; i++) { 23 | const body = i === messageCount - 1 ? "marker" : `hello ${i}` 24 | await publisher.send(Buffer.from(body)) 25 | } 26 | 27 | console.log("Closing the connection...") 28 | await client.close() 29 | } 30 | 31 | main() 32 | .then(() => console.log("done!")) 33 | .catch((res) => { 34 | console.log("Error in publishing message!", res) 35 | process.exit(-1) 36 | }) 37 | -------------------------------------------------------------------------------- /example/src/single_active_consumer_update_example.js: -------------------------------------------------------------------------------- 1 | const rabbit = require("rabbitmq-stream-js-client") 2 | const crypto = require("crypto") 3 | 4 | const wait = (ms) => new Promise((r) => setTimeout(r, ms)) 5 | 6 | async function main() { 7 | const messagesFromFirstConsumer = [] 8 | const messagesFromSecondConsumer = [] 9 | 10 | console.log("Connecting...") 11 | const client = await rabbit.connect({ 12 | vhost: "/", 13 | port: 5552, 14 | hostname: "localhost", 15 | username: "rabbit", 16 | password: "rabbit", 17 | }) 18 | 19 | console.log("Making sure the stream exists...") 20 | const streamName = "active-consumer-switch-on-single-active-consumer" 21 | await client.createStream({ stream: streamName, arguments: {} }) 22 | const consumerRef = `my-consumer-${crypto.randomUUID()}` 23 | 24 | console.log("Creating the publisher and sending 100 messages...") 25 | const publisher = await client.declarePublisher({ stream: streamName }) 26 | for (let i = 1; i <= 100; i++) { 27 | await publisher.send(Buffer.from(`${i}`)) 28 | } 29 | 30 | console.log("Creating the first consumer, when 50 messages are consumed it saves the offset on the server...") 31 | const consumer1 = await client.declareConsumer( 32 | { 33 | stream: streamName, 34 | offset: rabbit.Offset.first(), 35 | singleActive: true, 36 | consumerRef: consumerRef, 37 | }, 38 | async (message) => { 39 | messagesFromFirstConsumer.push(`Message ${message.content.toString("utf-8")} from ${consumerRef}`) 40 | if (messagesFromFirstConsumer.length === 50) { 41 | await consumer1.storeOffset(message.offset) 42 | } 43 | } 44 | ) 45 | 46 | await wait(500) 47 | 48 | console.log("Creating the second consumer, when it becomes active it resumes from the stored offset on the server...") 49 | await client.declareConsumer( 50 | { 51 | stream: streamName, 52 | offset: rabbit.Offset.first(), 53 | singleActive: true, 54 | consumerRef: consumerRef, 55 | // This callback is executed when the consumer becomes active 56 | consumerUpdateListener: async (consumerReference, streamName) => { 57 | const offset = await client.queryOffset({ reference: consumerReference, stream: streamName }) 58 | return rabbit.Offset.offset(offset) 59 | }, 60 | }, 61 | (message) => { 62 | messagesFromSecondConsumer.push(`Message ${message.content.toString("utf-8")} from ${consumerRef}`) 63 | } 64 | ) 65 | 66 | console.log("Closing the first consumer to trigger the activation of the second one...") 67 | await client.closeConsumer(consumer1.extendedId) 68 | 69 | await wait(500) 70 | 71 | console.log(`Messages consumed by the first consumer: ${messagesFromFirstConsumer.length}`) 72 | console.log(`Messages consumed by the second consumer: ${messagesFromSecondConsumer.length}`) 73 | } 74 | 75 | main() 76 | .then(() => { 77 | console.log("done!") 78 | process.exit(0) 79 | }) 80 | .catch((res) => { 81 | console.log("Error in publishing message!", res) 82 | process.exit(-1) 83 | }) 84 | -------------------------------------------------------------------------------- /example/src/superstream_example.js: -------------------------------------------------------------------------------- 1 | /* 2 | Run this example only with rabbit management version >= 3.13.0. 3 | */ 4 | 5 | const rabbit = require("rabbitmq-stream-js-client") 6 | const { randomUUID } = require("crypto") 7 | 8 | const rabbitUser = process.env.RABBITMQ_USER || "rabbit" 9 | const rabbitPassword = process.env.RABBITMQ_PASSWORD || "rabbit" 10 | 11 | async function main() { 12 | const superStreamName = `example-${randomUUID()}` 13 | console.log(`Creating super stream ${superStreamName}`) 14 | 15 | const client = await rabbit.connect({ 16 | hostname: "localhost", 17 | port: 5552, 18 | username: rabbitUser, 19 | password: rabbitPassword, 20 | vhost: "/", 21 | heartbeat: 0, 22 | }) 23 | await client.createSuperStream({ streamName: superStreamName }) 24 | await sleep(200) // Waiting for partitions to be created 25 | 26 | const routingKeyExtractor = (content, msgOptions) => msgOptions.messageProperties.messageId 27 | const publisher = await client.declareSuperStreamPublisher({ superStream: superStreamName }, routingKeyExtractor) 28 | 29 | await publisher.send(Buffer.from("Test message 1"), { messageProperties: { messageId: "1" } }) 30 | await publisher.send(Buffer.from("Test message 2"), { messageProperties: { messageId: "2" } }) 31 | await publisher.send(Buffer.from("Test message 3"), { messageProperties: { messageId: "3" } }) 32 | 33 | await client.declareSuperStreamConsumer({ superStream: superStreamName }, (message) => { 34 | console.log(`Received message ${message.content.toString()}`) 35 | }) 36 | 37 | await sleep(2000) 38 | 39 | await client.close() 40 | } 41 | 42 | main() 43 | .then(() => console.log("done!")) 44 | .catch((res) => { 45 | console.log("ERROR ", res) 46 | process.exit(-1) 47 | }) 48 | const sleep = (ms) => new Promise((r) => setTimeout(r, ms)) 49 | -------------------------------------------------------------------------------- /example/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "preserveConstEnums": true, 5 | "noImplicitReturns": true, 6 | "noFallthroughCasesInSwitch": true, 7 | "noImplicitThis": true, 8 | "strictNullChecks": true, 9 | "noUnusedLocals": true, 10 | "noUnusedParameters": true, 11 | "sourceMap": true, 12 | "noEmit": true, 13 | "declaration": true, 14 | "allowJs": true 15 | }, 16 | "include": ["./*.js", "src/autoreconnect_example.js", "src/cluster_example.js", "src/superstream_example.js"], 17 | "exclude": ["node_modules", "**/*.spec.ts"] 18 | } 19 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rabbitmq-stream-js-client", 3 | "version": "0.6.1", 4 | "description": "Rabbit stream client for JS/TS application", 5 | "main": "dist/index.js", 6 | "scripts": { 7 | "test": "mocha test", 8 | "build": "tsc", 9 | "check": "npm run check-ts && npm run check-lint && npm run check-format && npm run check-spell", 10 | "check-ts": "tsc --noEmit && tsc --project test/tsconfig.json --noEmit", 11 | "check-lint": "tsc --noEmit && eslint 'src/**/*.ts' 'test/**/*.ts'", 12 | "check-format": "prettier -c 'src/**/*.ts' 'test/**/*.ts'", 13 | "check-spell": "cspell 'src/**/*ts' 'test/**/*ts'", 14 | "format": "prettier -w './**/*.ts'" 15 | }, 16 | "repository": { 17 | "type": "git", 18 | "url": "git+https://github.com/coders51/rabbitmq-stream-js-client.git" 19 | }, 20 | "keywords": [ 21 | "RabbitMQ", 22 | "Stream" 23 | ], 24 | "author": "coders51 ", 25 | "license": "ISC", 26 | "bugs": { 27 | "url": "https://github.com/coders51/rabbitmq-stream-js-client/issues" 28 | }, 29 | "homepage": "https://github.com/coders51/rabbitmq-stream-js-client#readme", 30 | "devDependencies": { 31 | "@tsconfig/node-lts": "^20.1.1", 32 | "@types/amqplib": "^0.10.1", 33 | "@types/chai": "^4.3.4", 34 | "@types/chai-as-promised": "^7.1.8", 35 | "@types/chai-spies": "^1.0.6", 36 | "@types/mocha": "^10.0.1", 37 | "@types/node": "^20.11.5", 38 | "@typescript-eslint/eslint-plugin": "^6.19.0", 39 | "@typescript-eslint/parser": "^6.19.0", 40 | "amqplib": "^0.10.5", 41 | "chai": "^4.3.7", 42 | "chai-as-promised": "^7.1.1", 43 | "chai-spies": "^1.1.0", 44 | "cspell": "^7.3.9", 45 | "eslint": "^8.33.0", 46 | "eslint-config-prettier": "^9.1.0", 47 | "eslint-plugin-deprecation": "^2.0.0", 48 | "eslint-plugin-import": "^2.27.5", 49 | "eslint-plugin-no-only-tests": "^3.1.0", 50 | "eslint-plugin-prettier": "^5.1.3", 51 | "got": "^11.8.5", 52 | "mocha": "^10.2.0", 53 | "ts-node": "^10.9.1", 54 | "typescript": "^5.3.3", 55 | "winston": "^3.8.2" 56 | }, 57 | "dependencies": { 58 | "semver": "^7.5.4" 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /performance_test/index.ts: -------------------------------------------------------------------------------- 1 | import { createLogger, format, transports } from "winston" 2 | import { connect } from "rabbitmq-stream-js-client" 3 | import { randomUUID } from "crypto" 4 | import { argv } from "process" 5 | import { PerfTestPublisher } from "./perf_test_publisher" 6 | import { inspect } from "util" 7 | import { BufferSizeSettings } from "../dist/requests/request" 8 | 9 | const logger = createLogger({ 10 | level: "info", 11 | format: format.combine( 12 | format.colorize(), 13 | format.timestamp(), 14 | format.align(), 15 | format.splat(), 16 | format.label(), 17 | format.printf((info) => `${info.timestamp} ${info.level}: ${info.message} ${info.meta ? inspect(info.meta) : ""}`) 18 | ), 19 | transports: new transports.Console(), 20 | }) 21 | 22 | const connLogger = undefined 23 | 24 | function parseArgs(args) { 25 | const zip = (a: string[], b: string[]): [string, number][] => { 26 | const shorterArray = a.length < b.length ? a : b 27 | const zipped = shorterArray.map((_, i) => [a[i], +b[i]] as [string, number]) 28 | return zipped 29 | } 30 | const orderedNamedArgs = ["maxMessages", "messageSize"] 31 | const defaultNamedArgs = { 32 | maxMessages: 100000, 33 | messageSize: 10, 34 | } 35 | const passedNamedArgs = Object.fromEntries(zip(orderedNamedArgs, args)) 36 | return { ...defaultNamedArgs, ...passedNamedArgs } 37 | } 38 | 39 | async function main() { 40 | const rabbitUser = process.env.RABBITMQ_USER || "rabbit" 41 | const rabbitPassword = process.env.RABBITMQ_PASSWORD || "rabbit" 42 | const bufferSizeSettings: BufferSizeSettings = { initialSize: 16384 } 43 | const frameMax = 65536 44 | 45 | const client = await connect( 46 | { 47 | hostname: "localhost", 48 | port: 5552, 49 | username: rabbitUser, 50 | password: rabbitPassword, 51 | bufferSizeSettings: bufferSizeSettings, 52 | vhost: "/", 53 | frameMax, 54 | }, 55 | connLogger 56 | ) 57 | 58 | const streamName = `my-stream-${randomUUID()}` 59 | await client.createStream({ stream: streamName, arguments: {} }) 60 | const publisherRef = `my-publisher-${randomUUID()}` 61 | const passedArgs = parseArgs(argv.slice(2)) 62 | logger.info( 63 | `Stream: ${streamName} - publisher ${publisherRef} - max messages ${passedArgs.maxMessages} - message size: ${ 64 | passedArgs.messageSize 65 | } bytes - write buffer settings: ${inspect(bufferSizeSettings)}` 66 | ) 67 | 68 | const perfTestPublisher = new PerfTestPublisher( 69 | client, 70 | logger, 71 | passedArgs.maxMessages, 72 | { stream: streamName, publisherRef: publisherRef }, 73 | passedArgs.messageSize 74 | ) 75 | logger.info(`${new Date().toISOString()} - cycle start`) 76 | await perfTestPublisher.cycle() 77 | } 78 | 79 | main() 80 | .then((_v) => setTimeout(() => process.exit(0), 1000)) 81 | .catch((res) => { 82 | logger.error("ERROR ", res) 83 | process.exit(400) 84 | }) 85 | -------------------------------------------------------------------------------- /performance_test/metrics.ts: -------------------------------------------------------------------------------- 1 | export class Metrics { 2 | private metrics: { [key: string]: number | undefined } = { 3 | published: 0, 4 | confirmed: 0, 5 | } 6 | private ts_start: number = 0 7 | private ts_end: number = 0 8 | 9 | public addCounter(metricName: string, value: number = 1) { 10 | this.metrics[metricName] = (this.metrics[metricName] || 0) + value 11 | } 12 | 13 | public reset(metricName: string) { 14 | this.metrics[metricName] = 0 15 | } 16 | 17 | public setStart() { 18 | this.ts_start = Date.now() 19 | } 20 | 21 | public getMetrics() { 22 | this.ts_end = Date.now() 23 | const delta = this.ts_end - this.ts_start 24 | const timedMetrics = Object.fromEntries( 25 | this.getMetricsNames().map((k) => [`${k}/s`, this.getTimedMetric(k, delta)]) 26 | ) 27 | const result = { ...this.metrics, ...timedMetrics, delta } 28 | 29 | this.resetAll() 30 | 31 | return result 32 | } 33 | 34 | public getCurrentDelta() { 35 | const dt_now = Date.now() 36 | 37 | return dt_now - this.ts_start 38 | } 39 | 40 | private resetAll() { 41 | this.getMetricsNames().forEach((k) => this.reset(k)) 42 | this.ts_start = 0 43 | this.ts_end = 0 44 | } 45 | 46 | private getMetricsNames() { 47 | return Object.keys(this.metrics) 48 | } 49 | 50 | private getTimedMetric(metricName: string, delta: number) { 51 | if (delta < 1) return 0 52 | 53 | const v = this.metrics[metricName] || 0 54 | 55 | return +((v / delta) * 1000).toFixed(0) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /performance_test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "perftest", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1", 8 | "perftest": "ts-node ./index.ts", 9 | "perftest-reset": "cd .. && npm run build && cd - && npm install --force && ts-node ./index.ts" 10 | }, 11 | "author": "", 12 | "license": "ISC", 13 | "dependencies": { 14 | "rabbitmq-stream-js-client": "file:../.", 15 | "ts-node": "^10.9.1", 16 | "winston": "^3.11.0" 17 | }, 18 | "engines": { 19 | "node": "20.x.x" 20 | }, 21 | "devDependencies": { 22 | "typescript": "^4.9.5" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /performance_test/perf_test_publisher.ts: -------------------------------------------------------------------------------- 1 | import { inspect } from "util" 2 | import { Metrics } from "./metrics" 3 | import { Client, DeclarePublisherParams, Publisher } from "rabbitmq-stream-js-client" 4 | import { Logger } from "winston" 5 | 6 | export class PerfTestPublisher { 7 | private readonly metrics = new Metrics() 8 | private payload: Buffer 9 | private readonly maxChunkSize: number = 1000 10 | private ctr = 0 11 | private displayTimer: NodeJS.Timeout | null 12 | 13 | constructor( 14 | private readonly client: Client, 15 | private readonly logger: Logger, 16 | private readonly maxMessages: number, 17 | private readonly publisherParams: DeclarePublisherParams, 18 | 19 | byteLength: number = 10 20 | ) { 21 | this.payload = Buffer.alloc(byteLength, Math.random().toString()) 22 | this.metrics.setStart() 23 | this.displayTimer = null 24 | } 25 | 26 | public async cycle() { 27 | const publisher = await this.client.declarePublisher(this.publisherParams) 28 | publisher.on("publish_confirm", (err, confirmedIds) => { 29 | if (err) { 30 | this.logger.error(err) 31 | } 32 | this.metrics.addCounter("confirmed", confirmedIds.length) 33 | }) 34 | 35 | this.displayTimer = setInterval(() => { 36 | this.displayMetrics() 37 | this.metrics.setStart() 38 | }, 500) 39 | 40 | await this.send(publisher) 41 | 42 | return true 43 | } 44 | 45 | private displayMetrics(stop: boolean = false) { 46 | const metrics = { ...this.metrics.getMetrics(), total: this.ctr } 47 | this.logger.info(`${inspect(metrics)}`) 48 | if (stop && this.displayTimer) { 49 | clearInterval(this.displayTimer) 50 | } 51 | } 52 | 53 | private async send(publisher: Publisher) { 54 | while (this.maxMessages === -1 || this.ctr < this.maxMessages) { 55 | const messageQuantity = this.maxMessages > 0 ? Math.min(this.maxChunkSize, this.maxMessages) : this.maxChunkSize 56 | for (let index = 0; index < messageQuantity; index++) { 57 | await publisher.send(this.payload, {}) 58 | } 59 | this.ctr = this.ctr + messageQuantity 60 | this.metrics.addCounter("published", messageQuantity) 61 | } 62 | this.displayMetrics(true) 63 | } 64 | 65 | public getDisplayTimer() { 66 | return this.displayTimer 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /performance_test/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "preserveConstEnums": true, 5 | "noImplicitReturns": true, 6 | "noFallthroughCasesInSwitch": true, 7 | "noImplicitThis": true, 8 | "strictNullChecks": true, 9 | "noUnusedLocals": true, 10 | "noUnusedParameters": true, 11 | "moduleResolution": "node", 12 | "sourceMap": true, 13 | "noEmit": true, 14 | "declaration": true, 15 | "allowJs": true 16 | }, 17 | "include": ["./*.js", "index.ts"], 18 | "exclude": ["node_modules", "**/*.spec.ts"] 19 | } 20 | -------------------------------------------------------------------------------- /src/amqp10/applicationProperties.ts: -------------------------------------------------------------------------------- 1 | import { MessageApplicationProperties } from "../publisher" 2 | import { DataReader } from "../responses/raw_response" 3 | import { range } from "../util" 4 | import { readUTF8String, decodeFormatCode } from "../response_decoder" 5 | 6 | export class ApplicationProperties { 7 | public static parse(dataReader: DataReader, elementsLength: number): MessageApplicationProperties { 8 | const numEntries = elementsLength / 2 9 | 10 | return range(numEntries).reduce((acc: MessageApplicationProperties, _) => { 11 | const propertyKey = readUTF8String(dataReader) 12 | const nextByteType = dataReader.readUInt8() 13 | const propertyValue = decodeFormatCode(dataReader, nextByteType) 14 | if (!propertyValue) throw new Error(`invalid nextByteType %#02x: ${nextByteType}`) 15 | acc[propertyKey] = propertyValue as string | number 16 | return acc 17 | }, {}) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/amqp10/decoder.ts: -------------------------------------------------------------------------------- 1 | export const FormatCodeType = { 2 | MessageHeader: 0x70, 3 | MessageAnnotations: 0x72, 4 | MessageProperties: 0x73, 5 | ApplicationProperties: 0x74, 6 | ApplicationData: 0x75, 7 | AmqpValue: 0x77, 8 | Size: 3, 9 | } as const 10 | 11 | export const FormatCode = { 12 | Described: 0x00, 13 | Vbin8: 0xa0, 14 | Str8: 0xa1, 15 | Sym8: 0xa3, 16 | Vbin32: 0xb0, 17 | Str32: 0xb1, 18 | Sym32: 0xb3, 19 | List0: 0x45, 20 | List8: 0xc0, 21 | List32: 0xd0, 22 | Map8: 0xc1, 23 | Map32: 0xd1, 24 | Null: 0x40, 25 | ULong0: 0x44, 26 | Ubyte: 0x50, 27 | SmallUlong: 0x53, 28 | ULong: 0x80, 29 | Uint: 0x70, 30 | Uint0: 0x43, 31 | Int: 0x71, 32 | SmallUint: 0x52, 33 | SmallInt: 0x54, 34 | Timestamp: 0x83, 35 | Bool: 0x56, 36 | BoolTrue: 0x41, 37 | BoolFalse: 0x42, 38 | } as const 39 | 40 | export const PropertySizeDescription = 41 | 3 + // sizeOf DescribedFormatCode.Size (3 byte) 42 | 1 + // sizeOf FormatCode.List32 (byte) 43 | 4 + // sizeOf field numbers (uint) 44 | 4 // sizeof propertySize (uint) 45 | -------------------------------------------------------------------------------- /src/amqp10/messageAnnotations.ts: -------------------------------------------------------------------------------- 1 | import { MessageAnnotations } from "../publisher" 2 | import { DataReader } from "../responses/raw_response" 3 | import { range } from "../util" 4 | import { readUTF8String, decodeFormatCode } from "../response_decoder" 5 | 6 | export class Annotations { 7 | public static parse(dataReader: DataReader, elementsLength: number): MessageAnnotations { 8 | const numEntries = elementsLength / 2 9 | 10 | return range(numEntries).reduce((acc: MessageAnnotations, _) => { 11 | const propertyKey = readUTF8String(dataReader) 12 | const nextByteType = dataReader.readUInt8() 13 | const propertyValue = decodeFormatCode(dataReader, nextByteType) 14 | if (propertyValue === undefined) throw new Error(`invalid nextByteType %#02x: ${nextByteType}`) 15 | acc[propertyKey] = propertyValue as string | number 16 | return acc 17 | }, {}) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/amqp10/messageHeader.ts: -------------------------------------------------------------------------------- 1 | import { DataReader } from "../responses/raw_response" 2 | import { MessageHeader } from "../publisher" 3 | import { range } from "../util" 4 | import { decodeFormatCode, decodeBooleanType } from "../response_decoder" 5 | import { FormatCode } from "./decoder" 6 | 7 | export class Header { 8 | public static parse(dataResponse: DataReader, fields: number): MessageHeader { 9 | return range(fields).reduce((acc: MessageHeader, index) => { 10 | if (dataResponse.isAtEnd()) return acc 11 | 12 | const type = dataResponse.readUInt8() 13 | if (type !== FormatCode.Null) { 14 | switch (index) { 15 | case 0: 16 | acc.durable = decodeBooleanType(dataResponse, type) 17 | break 18 | case 1: 19 | acc.priority = decodeFormatCode(dataResponse, type) as number 20 | break 21 | case 2: 22 | acc.ttl = decodeFormatCode(dataResponse, type) as number 23 | break 24 | case 3: 25 | acc.firstAcquirer = decodeBooleanType(dataResponse, type) 26 | break 27 | case 4: 28 | acc.deliveryCount = decodeFormatCode(dataResponse, type) as number 29 | break 30 | default: 31 | throw new Error(`HeaderError`) 32 | } 33 | } 34 | return acc 35 | }, {}) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/amqp10/properties.ts: -------------------------------------------------------------------------------- 1 | import { MessageProperties } from "../publisher" 2 | import { decodeFormatCode } from "../response_decoder" 3 | import { DataReader } from "../responses/raw_response" 4 | import { range } from "../util" 5 | import { FormatCode } from "./decoder" 6 | 7 | export class Properties { 8 | public static parse(dataResponse: DataReader, fields: number): MessageProperties { 9 | return range(fields).reduce((acc: MessageProperties, index) => { 10 | if (dataResponse.isAtEnd()) return acc 11 | const formatCode = dataResponse.readUInt8() 12 | if (formatCode === FormatCode.Null) { 13 | return acc 14 | } 15 | switch (index) { 16 | case 0: 17 | acc.messageId = decodeFormatCode(dataResponse, formatCode) as string 18 | break 19 | case 1: 20 | // Reading of binary type 21 | const userIdLength = dataResponse.readUInt8() 22 | acc.userId = dataResponse.readBufferOf(userIdLength) 23 | break 24 | case 2: 25 | acc.to = decodeFormatCode(dataResponse, formatCode) as string 26 | break 27 | case 3: 28 | acc.subject = decodeFormatCode(dataResponse, formatCode) as string 29 | break 30 | case 4: 31 | acc.replyTo = decodeFormatCode(dataResponse, formatCode) as string 32 | break 33 | case 5: 34 | acc.correlationId = decodeFormatCode(dataResponse, formatCode) as string 35 | break 36 | case 6: 37 | acc.contentType = decodeFormatCode(dataResponse, formatCode) as string 38 | break 39 | case 7: 40 | acc.contentEncoding = decodeFormatCode(dataResponse, formatCode) as string 41 | break 42 | case 8: 43 | acc.absoluteExpiryTime = new Date(Number(dataResponse.readInt64())) 44 | break 45 | case 9: 46 | acc.creationTime = new Date(Number(dataResponse.readInt64())) 47 | break 48 | case 10: 49 | acc.groupId = decodeFormatCode(dataResponse, formatCode) as string 50 | break 51 | case 11: 52 | acc.groupSequence = dataResponse.readUInt32() 53 | break 54 | case 12: 55 | acc.replyToGroupId = decodeFormatCode(dataResponse, formatCode) as string 56 | break 57 | default: 58 | throw new Error(`PropertiesError`) 59 | } 60 | return acc 61 | }, {}) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/compression.ts: -------------------------------------------------------------------------------- 1 | import { gunzipSync, gzipSync } from "node:zlib" 2 | 3 | export enum CompressionType { 4 | None = 0, 5 | Gzip = 1, 6 | // Not implemented by default. 7 | // It is possible to add custom codec with StreamCompressionCodecs 8 | Snappy = 2, 9 | Lz4 = 3, 10 | Zstd = 4, 11 | } 12 | 13 | export interface Compression { 14 | getType(): CompressionType 15 | compress(data: Buffer): Buffer 16 | decompress(data: Buffer): Buffer 17 | } 18 | 19 | export class NoneCompression implements Compression { 20 | static create(): NoneCompression { 21 | return new NoneCompression() 22 | } 23 | 24 | getType(): CompressionType { 25 | return CompressionType.None 26 | } 27 | 28 | compress(data: Buffer): Buffer { 29 | return data 30 | } 31 | 32 | decompress(data: Buffer): Buffer { 33 | return data 34 | } 35 | } 36 | 37 | export class GzipCompression implements Compression { 38 | static create(): GzipCompression { 39 | return new GzipCompression() 40 | } 41 | 42 | getType(): CompressionType { 43 | return CompressionType.Gzip 44 | } 45 | 46 | compress(data: Buffer): Buffer { 47 | return gzipSync(data) 48 | } 49 | 50 | decompress(data: Buffer): Buffer { 51 | return gunzipSync(data) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/connection_pool.ts: -------------------------------------------------------------------------------- 1 | import { Connection } from "./connection" 2 | import { getMaxSharedConnectionInstances } from "./util" 3 | 4 | type InstanceKey = string 5 | export type ConnectionPurpose = "consumer" | "publisher" 6 | 7 | export class ConnectionPool { 8 | private static consumerConnectionProxies = new Map() 9 | private static publisherConnectionProxies = new Map() 10 | 11 | public static getUsableCachedConnection(purpose: ConnectionPurpose, streamName: string, vhost: string, host: string) { 12 | const map = 13 | purpose === "publisher" ? ConnectionPool.publisherConnectionProxies : ConnectionPool.consumerConnectionProxies 14 | const key = ConnectionPool.getCacheKey(streamName, vhost, host) 15 | const proxies = map.get(key) || [] 16 | const connection = proxies.at(-1) 17 | const refCount = connection?.refCount 18 | return refCount !== undefined && refCount < getMaxSharedConnectionInstances() ? connection : undefined 19 | } 20 | 21 | public static cacheConnection( 22 | purpose: ConnectionPurpose, 23 | streamName: string, 24 | vhost: string, 25 | host: string, 26 | client: Connection 27 | ) { 28 | const map = 29 | purpose === "publisher" ? ConnectionPool.publisherConnectionProxies : ConnectionPool.consumerConnectionProxies 30 | const key = ConnectionPool.getCacheKey(streamName, vhost, host) 31 | const currentlyCached = map.get(key) || [] 32 | currentlyCached.push(client) 33 | map.set(key, currentlyCached) 34 | } 35 | 36 | public static removeIfUnused(connection: Connection) { 37 | if (connection.refCount <= 0) { 38 | ConnectionPool.removeCachedConnection(connection) 39 | return true 40 | } 41 | return false 42 | } 43 | 44 | public static removeCachedConnection(connection: Connection) { 45 | const { leader, streamName, hostname: host, vhost } = connection 46 | if (streamName === undefined) return 47 | const m = leader ? ConnectionPool.publisherConnectionProxies : ConnectionPool.consumerConnectionProxies 48 | const k = ConnectionPool.getCacheKey(streamName, vhost, host) 49 | const mappedClientList = m.get(k) 50 | if (mappedClientList) { 51 | const filtered = mappedClientList.filter((c) => c !== connection) 52 | m.set(k, filtered) 53 | } 54 | } 55 | 56 | private static getCacheKey(streamName: string, vhost: string, host: string) { 57 | return `${streamName}@${vhost}@${host}` 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/consumer_credit_policy.ts: -------------------------------------------------------------------------------- 1 | export type CreditRequestWrapper = (howMany: number) => Promise 2 | 3 | export abstract class ConsumerCreditPolicy { 4 | constructor(protected readonly startFrom: number) {} 5 | 6 | public async onChunkReceived(_requestWrapper: CreditRequestWrapper) { 7 | return 8 | } 9 | 10 | public async onChunkCompleted(_requestWrapper: CreditRequestWrapper) { 11 | return 12 | } 13 | 14 | public async requestCredits(requestWrapper: CreditRequestWrapper, amount: number) { 15 | return requestWrapper(amount) 16 | } 17 | 18 | public onSubscription() { 19 | return this.startFrom 20 | } 21 | } 22 | 23 | class NewCreditsOnChunkReceived extends ConsumerCreditPolicy { 24 | constructor( 25 | startFrom: number = 1, 26 | private readonly step: number = 1 27 | ) { 28 | super(startFrom) 29 | } 30 | 31 | public async onChunkReceived(requestWrapper: CreditRequestWrapper) { 32 | await this.requestCredits(requestWrapper, this.step) 33 | } 34 | 35 | public onSubscription(): number { 36 | return this.startFrom 37 | } 38 | } 39 | 40 | class NewCreditsOnChunkCompleted extends ConsumerCreditPolicy { 41 | constructor( 42 | startFrom: number = 1, 43 | private readonly step: number = 1 44 | ) { 45 | super(startFrom) 46 | } 47 | 48 | public async onChunkCompleted(requestWrapper: CreditRequestWrapper) { 49 | await this.requestCredits(requestWrapper, this.step) 50 | } 51 | } 52 | 53 | export const creditsOnChunkReceived = (startFrom: number, step: number) => 54 | new NewCreditsOnChunkReceived(startFrom, step) 55 | export const creditsOnChunkCompleted = (startFrom: number, step: number) => 56 | new NewCreditsOnChunkCompleted(startFrom, step) 57 | export const defaultCreditPolicy = creditsOnChunkCompleted(1, 1) 58 | -------------------------------------------------------------------------------- /src/decoder_listener.ts: -------------------------------------------------------------------------------- 1 | import { Response } from "./responses/response" 2 | 3 | export interface DecoderListenerFunc { 4 | (data: Response): void 5 | } 6 | -------------------------------------------------------------------------------- /src/error_codes.ts: -------------------------------------------------------------------------------- 1 | export const STREAM_ALREADY_EXISTS_ERROR_CODE = 0x05 2 | -------------------------------------------------------------------------------- /src/hash/murmur32.ts: -------------------------------------------------------------------------------- 1 | // Original from https://github.com/flagUpDown/murmurhash-node, licensed ISC 2 | // Linted, cleaned up and properly typed for this project 3 | 4 | import { fMix32, imul32, rotl32, stringToBuffer } from "./util" 5 | 6 | const MURMUR_HASH_C1 = 0xcc9e2d51 7 | const MURMUR_HASH_C2 = 0x1b873593 8 | 9 | export const murmur32 = (key: string): number => { 10 | const seed = 104729 // must be the same to all the clients to be compatible 11 | const bKey = stringToBuffer(key) 12 | 13 | const len = bKey.length 14 | let remainder = len & 3 15 | const bytes = len - remainder 16 | 17 | let h1 = seed 18 | 19 | let i = 0 20 | while (i < bytes) { 21 | let k1 = bKey[i++] | (bKey[i++] << 8) | (bKey[i++] << 16) | (bKey[i++] << 24) 22 | 23 | k1 = imul32(k1, MURMUR_HASH_C1) 24 | k1 = ((k1 & 0x1ffff) << 15) | (k1 >>> 17) 25 | k1 = imul32(k1, MURMUR_HASH_C2) 26 | 27 | h1 ^= k1 28 | h1 = ((h1 & 0x7ffff) << 13) | (h1 >>> 19) 29 | h1 = (imul32(h1, 5) >>> 0) + 0xe6546b64 30 | } 31 | 32 | let k1 = 0 33 | while (remainder > 0) { 34 | switch (remainder) { 35 | case 3: 36 | k1 ^= bKey[i + 2] << 16 37 | break 38 | case 2: 39 | k1 ^= bKey[i + 1] << 8 40 | break 41 | case 1: 42 | k1 ^= bKey[i] 43 | 44 | k1 = imul32(k1, MURMUR_HASH_C1) 45 | k1 = rotl32(k1, 15) 46 | k1 = imul32(k1, MURMUR_HASH_C2) 47 | h1 ^= k1 48 | break 49 | } 50 | remainder -= 1 51 | } 52 | h1 ^= len 53 | 54 | return fMix32(h1) >>> 0 55 | } 56 | -------------------------------------------------------------------------------- /src/hash/util.ts: -------------------------------------------------------------------------------- 1 | export const imul32 = (a: number, b: number): number => { 2 | const aHi = (a >>> 16) & 0xffff 3 | const aLo = a & 0xffff 4 | const bHi = (b >>> 16) & 0xffff 5 | const bLo = b & 0xffff 6 | // the shift by 0 fixes the sign on the high part 7 | return aLo * bLo + (((aHi * bLo + aLo * bHi) << 16) >>> 0) 8 | } 9 | 10 | export const rotl32 = (x: number, r: number): number => { 11 | const rMod = r % 32 12 | return ((x & ((1 << (32 - rMod)) - 1)) << rMod) | (x >>> (32 - rMod)) 13 | } 14 | 15 | export const fMix32 = (hi: number): number => { 16 | let h = hi 17 | h ^= h >>> 16 18 | h = imul32(h, 0x85ebca6b) 19 | h ^= h >>> 13 20 | h = imul32(h, 0xc2b2ae35) 21 | h ^= h >>> 16 22 | 23 | return h 24 | } 25 | 26 | export const stringToBuffer = (str: string): Buffer => { 27 | return typeof str === "string" ? Buffer.from(str) : Buffer.from(String(str)) 28 | } 29 | -------------------------------------------------------------------------------- /src/heartbeat.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "./logger" 2 | import { HeartbeatRequest } from "./requests/heartbeat_request" 3 | import { Request } from "./requests/request" 4 | 5 | export interface HeartbeatConnection { 6 | send(cmd: Request): Promise 7 | close(): Promise 8 | } 9 | 10 | export class Heartbeat { 11 | private MAX_HEARTBEATS_MISSED = 2 12 | private interval: number = 0 13 | private lastMessageReceived = new Date() 14 | private lastMessageSent = new Date() 15 | private idleCounter: number = 0 16 | private timeout: NodeJS.Timeout | null = null 17 | private heartBeatStarted = false 18 | 19 | constructor( 20 | private readonly connection: HeartbeatConnection, 21 | private readonly logger: Logger 22 | ) {} 23 | 24 | start(secondsInterval: number) { 25 | if (this.heartBeatStarted) throw new Error("HeartBeat already started") 26 | if (secondsInterval <= 0) return 27 | this.interval = secondsInterval * 1000 28 | this.heartBeatStarted = true 29 | this.timeout = setTimeout(() => this.heartbeat(), this.interval) 30 | } 31 | 32 | stop() { 33 | // TODO -> Wait the cycle of heartbeat... 34 | if (this.timeout) { 35 | clearTimeout(this.timeout) 36 | } 37 | this.interval = 0 38 | this.heartBeatStarted = false 39 | } 40 | 41 | public get started() { 42 | return this.heartBeatStarted 43 | } 44 | 45 | reportLastMessageReceived() { 46 | this.lastMessageReceived = new Date() 47 | this.idleCounter = 0 48 | } 49 | 50 | reportLastMessageSent() { 51 | this.lastMessageSent = new Date() 52 | this.idleCounter = 0 53 | } 54 | 55 | private async heartbeat() { 56 | const now = new Date().getTime() 57 | const lastMessageSent = this.lastMessageSent.getTime() 58 | const noMessagesSentFor = Math.abs(now - lastMessageSent) 59 | this.logger.debug(`No messages sent for the last ${noMessagesSentFor} ms and the interval is ${this.interval}`) 60 | if (noMessagesSentFor >= this.interval) { 61 | await this.sendHeartbeat() 62 | this.reportLastMessageSent() 63 | } 64 | await this.idleDetection() 65 | if (this.interval <= 0) return 66 | this.timeout = setTimeout(() => this.heartbeat(), this.interval) 67 | } 68 | 69 | private sendHeartbeat() { 70 | this.logger.debug("Sending heartbeat") 71 | // TODO -> raise and event instead of send data 72 | return this.connection.send(new HeartbeatRequest()) 73 | } 74 | 75 | private async idleDetection() { 76 | const lastMessageReceived = this.lastMessageReceived.getTime() 77 | const noMessagesReceivedFor = Math.abs(new Date().getTime() - lastMessageReceived) 78 | this.logger.debug( 79 | `No messages received for the last ${noMessagesReceivedFor} ms and the interval is ${this.interval}` 80 | ) 81 | const lastMessageSent = this.lastMessageSent.getTime() 82 | const noMessagesSentFor = Math.abs(new Date().getTime() - lastMessageSent) 83 | this.logger.debug(`No messages sent for the last ${noMessagesSentFor} ms and the interval is ${this.interval}`) 84 | if (noMessagesReceivedFor > this.interval && noMessagesSentFor > this.interval) { 85 | this.lastMessageReceived = new Date() 86 | this.lastMessageSent = new Date() 87 | this.idleCounter++ 88 | this.logger && this.logger.debug(`Heartbeat missed! counter: ${this.idleCounter}`) 89 | } 90 | if (this.idleCounter === this.MAX_HEARTBEATS_MISSED) { 91 | // TODO -> raise an event instead of make the action 92 | await this.connection.close() 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./client" 2 | export { Publisher } from "./publisher" 3 | export { Consumer } from "./consumer" 4 | export { Offset } from "./requests/subscribe_request" 5 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | export interface Logger { 2 | debug(message: string): void 3 | info(message: string): void 4 | error(message: string): void 5 | warn(message: string): void 6 | } 7 | 8 | export class NullLogger implements Logger { 9 | debug(_message: string): void { 10 | // do nothing 11 | } 12 | info(_message: string): void { 13 | // do nothing 14 | } 15 | error(_message: string): void { 16 | // do nothing 17 | } 18 | warn(_message: string): void { 19 | // do nothing 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/promise_resolver.ts: -------------------------------------------------------------------------------- 1 | export interface PromiseResolver { 2 | resolve: (value: T | PromiseLike) => void 3 | reject: (reason: unknown) => void 4 | } 5 | -------------------------------------------------------------------------------- /src/requests/buffer_data_writer.ts: -------------------------------------------------------------------------------- 1 | import { DEFAULT_UNLIMITED_FRAME_MAX } from "../util" 2 | import { DataWriter } from "./data_writer" 3 | import { BufferSizeParams } from "./request" 4 | 5 | export class BufferDataWriter implements DataWriter { 6 | private _offset = 0 7 | private readonly maxBufferSize: number 8 | private readonly growthTriggerRatio: number 9 | private readonly sizeMultiplier: number 10 | 11 | constructor( 12 | private buffer: Buffer, 13 | startFrom: number, 14 | bufferSizeParameters?: BufferSizeParams 15 | ) { 16 | this._offset = startFrom 17 | this.maxBufferSize = bufferSizeParameters?.maxSize ?? 1048576 18 | this.growthTriggerRatio = bufferSizeParameters?.maxRatio ?? 0.9 19 | this.sizeMultiplier = bufferSizeParameters?.multiplier ?? 2 20 | } 21 | 22 | get offset() { 23 | return this._offset 24 | } 25 | 26 | writePrefixSize() { 27 | this.buffer.writeUInt32BE(this._offset - 4, 0) 28 | } 29 | 30 | writeData(data: string | Buffer): void { 31 | this.growIfNeeded(Buffer.byteLength(data, "utf-8")) 32 | if (Buffer.isBuffer(data)) { 33 | this._offset += data.copy(this.buffer, this._offset) 34 | return 35 | } 36 | this._offset += this.buffer.write(data, this._offset) 37 | } 38 | 39 | writeByte(data: number): void { 40 | const bytes = 1 41 | this.growIfNeeded(bytes) 42 | this._offset = this.buffer.writeUInt8(data, this._offset) 43 | } 44 | 45 | writeInt8(data: number) { 46 | const bytes = 1 47 | this.growIfNeeded(bytes) 48 | this._offset = this.buffer.writeInt8(data, this._offset) 49 | } 50 | 51 | writeUInt8(data: number): void { 52 | const bytes = 1 53 | this.growIfNeeded(bytes) 54 | this._offset = this.buffer.writeUInt8(data, this._offset) 55 | } 56 | 57 | writeInt16(data: number) { 58 | const bytes = 2 59 | this.growIfNeeded(bytes) 60 | this._offset = this.buffer.writeInt16BE(data, this._offset) 61 | } 62 | 63 | writeUInt16(data: number) { 64 | const bytes = 2 65 | this.growIfNeeded(bytes) 66 | this._offset = this.buffer.writeUInt16BE(data, this._offset) 67 | } 68 | 69 | writeUInt32(data: number): void { 70 | const bytes = 4 71 | this.growIfNeeded(bytes) 72 | this._offset = this.buffer.writeUInt32BE(data, this._offset) 73 | } 74 | 75 | writeInt32(data: number): void { 76 | const bytes = 4 77 | this.growIfNeeded(bytes) 78 | this._offset = this.buffer.writeInt32BE(data, this._offset) 79 | } 80 | 81 | writeUInt64(data: bigint): void { 82 | const bytes = 8 83 | this.growIfNeeded(bytes) 84 | this._offset = this.buffer.writeBigUInt64BE(data, this._offset) 85 | } 86 | 87 | writeInt64(data: bigint): void { 88 | const bytes = 8 89 | this.growIfNeeded(bytes) 90 | this._offset = this.buffer.writeBigInt64BE(data, this._offset) 91 | } 92 | 93 | writeString(data: string): void { 94 | const bytes = 2 95 | this.growIfNeeded(bytes) 96 | this._offset = this.buffer.writeInt16BE(data.length, this._offset) 97 | this.writeData(data) 98 | } 99 | 100 | toBuffer(): Buffer { 101 | return this.buffer.subarray(0, this._offset) 102 | } 103 | 104 | private growIfNeeded(additionalBytes: number) { 105 | if ((this._offset + additionalBytes) / this.buffer.length > this.growthTriggerRatio) { 106 | this.growBuffer(additionalBytes) 107 | } 108 | } 109 | 110 | private growBuffer(requiredBytes: number) { 111 | const newSize = this.getNewSize(requiredBytes) 112 | const data = Buffer.from(this.buffer) 113 | this.buffer = Buffer.alloc(newSize) 114 | data.copy(this.buffer, 0) 115 | } 116 | 117 | private getNewSize(requiredBytes: number) { 118 | const requiredNewSize = this.buffer.length * this.sizeMultiplier + this._offset + requiredBytes 119 | if (this.maxBufferSize === DEFAULT_UNLIMITED_FRAME_MAX) return requiredNewSize 120 | return Math.min(requiredNewSize, this.maxBufferSize) 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/requests/close_request.ts: -------------------------------------------------------------------------------- 1 | import { CloseResponse } from "../responses/close_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class CloseRequest extends AbstractRequest { 6 | readonly responseKey = CloseResponse.key 7 | static readonly Key = 0x0016 8 | static readonly Version = 1 9 | readonly key = CloseRequest.Key 10 | 11 | constructor(private params: { closingCode: number; closingReason: string }) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeUInt16(this.params.closingCode) 17 | writer.writeString(this.params.closingReason) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/requests/consumer_update_response.ts: -------------------------------------------------------------------------------- 1 | import { ConsumerUpdateQuery } from "../responses/consumer_update_query" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | import { Offset } from "./subscribe_request" 5 | 6 | export class ConsumerUpdateResponse extends AbstractRequest { 7 | readonly responseKey = ConsumerUpdateQuery.key 8 | static readonly Key = 0x801a 9 | static readonly Version = 1 10 | readonly key = ConsumerUpdateResponse.Key 11 | 12 | constructor(private params: { correlationId: number; responseCode: number; offset: Offset }) { 13 | super() 14 | } 15 | 16 | writeContent(b: DataWriter) { 17 | b.writeUInt32(this.params.correlationId) 18 | b.writeUInt16(this.params.responseCode) 19 | this.params.offset.write(b) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/requests/create_stream_request.ts: -------------------------------------------------------------------------------- 1 | import { CreateStreamResponse } from "../responses/create_stream_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export interface CreateStreamArguments { 6 | "queue-leader-locator"?: "random" | "client-local" | "least-leaders" 7 | "max-age"?: string 8 | "stream-max-segment-size-bytes"?: number 9 | "initial-cluster-size"?: number 10 | "max-length-bytes"?: number 11 | } 12 | 13 | export class CreateStreamRequest extends AbstractRequest { 14 | readonly responseKey = CreateStreamResponse.key 15 | static readonly Key = 0x000d 16 | static readonly Version = 1 17 | readonly key = CreateStreamRequest.Key 18 | private readonly _arguments: { key: keyof CreateStreamArguments; value: string | number }[] = [] 19 | private readonly stream: string 20 | 21 | constructor(params: { stream: string; arguments?: CreateStreamArguments }) { 22 | super() 23 | if (params.arguments) { 24 | this._arguments = (Object.keys(params.arguments) as Array).map((key) => { 25 | return { 26 | key, 27 | value: params.arguments![key] ?? "", 28 | } 29 | }) 30 | } 31 | 32 | this.stream = params.stream 33 | } 34 | 35 | writeContent(writer: DataWriter) { 36 | writer.writeString(this.stream) 37 | writer.writeUInt32(this._arguments?.length ?? 0) 38 | this._arguments.forEach(({ key, value }) => { 39 | writer.writeString(key) 40 | writer.writeString(value.toString()) 41 | }) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/requests/create_super_stream_request.ts: -------------------------------------------------------------------------------- 1 | import { CreateSuperStreamResponse } from "../responses/create_super_stream_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { CreateStreamArguments } from "./create_stream_request" 4 | import { DataWriter } from "./data_writer" 5 | 6 | export interface CreateSuperStreamParams { 7 | streamName: string 8 | partitions: string[] 9 | bindingKeys: string[] 10 | arguments?: CreateStreamArguments 11 | } 12 | 13 | export class CreateSuperStreamRequest extends AbstractRequest { 14 | readonly responseKey = CreateSuperStreamResponse.key 15 | static readonly Key = 0x001d 16 | static readonly Version = 1 17 | readonly key = CreateSuperStreamRequest.Key 18 | private readonly _arguments: { key: keyof CreateStreamArguments; value: string | number }[] = [] 19 | private readonly streamName: string 20 | private readonly partitions: string[] 21 | private readonly bindingKeys: string[] 22 | 23 | constructor(params: CreateSuperStreamParams) { 24 | super() 25 | if (params.arguments) { 26 | this._arguments = (Object.keys(params.arguments) as Array).map((key) => { 27 | return { 28 | key, 29 | value: params.arguments![key] ?? "", 30 | } 31 | }) 32 | } 33 | this.streamName = params.streamName 34 | this.partitions = params.partitions 35 | this.bindingKeys = params.bindingKeys 36 | } 37 | 38 | writeContent(writer: DataWriter) { 39 | writer.writeString(this.streamName) 40 | writer.writeInt32(this.partitions.length) 41 | this.partitions.forEach((partition) => writer.writeString(partition)) 42 | writer.writeInt32(this.bindingKeys.length) 43 | this.bindingKeys.forEach((bindingKey) => writer.writeString(bindingKey)) 44 | writer.writeUInt32(this._arguments?.length ?? 0) 45 | this._arguments.forEach(({ key, value }) => { 46 | writer.writeString(key) 47 | writer.writeString(value.toString()) 48 | }) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/requests/credit_request.ts: -------------------------------------------------------------------------------- 1 | import { AbstractRequest } from "./abstract_request" 2 | import { DataWriter } from "./data_writer" 3 | 4 | export type CreditRequestParams = { 5 | subscriptionId: number 6 | credit: number 7 | } 8 | 9 | export class CreditRequest extends AbstractRequest { 10 | static readonly Key = 0x09 11 | readonly key = CreditRequest.Key 12 | static readonly Version = 1 13 | readonly responseKey = -1 14 | 15 | constructor(private params: CreditRequestParams) { 16 | super() 17 | } 18 | 19 | protected writeContent(writer: DataWriter): void { 20 | writer.writeUInt8(this.params.subscriptionId) 21 | writer.writeUInt16(this.params.credit) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/requests/data_writer.ts: -------------------------------------------------------------------------------- 1 | export interface DataWriter { 2 | writeByte(Described: number): void 3 | writeInt8(data: number): void 4 | writeInt16(data: number): void 5 | writeUInt8(data: number): void 6 | writeUInt16(data: number): void 7 | writeUInt32(data: number): void 8 | writeUInt64(data: bigint): void 9 | writeInt32(length: number): void 10 | writeData(data: string | Buffer): void 11 | writeString(data: string): void 12 | writeInt64(data: bigint): void 13 | } 14 | -------------------------------------------------------------------------------- /src/requests/declare_publisher_request.ts: -------------------------------------------------------------------------------- 1 | import { DeclarePublisherResponse } from "../responses/declare_publisher_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class DeclarePublisherRequest extends AbstractRequest { 6 | readonly responseKey = DeclarePublisherResponse.key 7 | static readonly Key = 0x0001 8 | static readonly Version = 1 9 | readonly key = DeclarePublisherRequest.Key 10 | 11 | constructor(private params: { stream: string; publisherId: number; publisherRef?: string }) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeUInt8(this.params.publisherId) 17 | writer.writeString(this.params.publisherRef || "") 18 | writer.writeString(this.params.stream) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/requests/delete_publisher_request.ts: -------------------------------------------------------------------------------- 1 | import { DeletePublisherResponse } from "../responses/delete_publisher_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class DeletePublisherRequest extends AbstractRequest { 6 | readonly responseKey = DeletePublisherResponse.key 7 | static readonly Key = 0x0006 8 | static readonly Version = 1 9 | readonly key = DeletePublisherRequest.Key 10 | 11 | constructor(private publisherId: number) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeUInt8(this.publisherId) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/requests/delete_stream_request.ts: -------------------------------------------------------------------------------- 1 | import { AbstractRequest } from "./abstract_request" 2 | import { DeleteStreamResponse } from "../responses/delete_stream_response" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class DeleteStreamRequest extends AbstractRequest { 6 | static readonly Key = 0x000e 7 | readonly key = DeleteStreamRequest.Key 8 | static readonly Version = 1 9 | readonly responseKey = DeleteStreamResponse.key 10 | private readonly stream: string 11 | 12 | constructor(stream: string) { 13 | super() 14 | this.stream = stream 15 | } 16 | 17 | protected writeContent(writer: DataWriter): void { 18 | writer.writeString(this.stream) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/requests/delete_super_stream_request.ts: -------------------------------------------------------------------------------- 1 | import { DeleteSuperStreamResponse } from "../responses/delete_super_stream_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class DeleteSuperStreamRequest extends AbstractRequest { 6 | static readonly Key = 0x001e 7 | readonly key = DeleteSuperStreamRequest.Key 8 | static readonly Version = 1 9 | readonly responseKey = DeleteSuperStreamResponse.key 10 | private readonly streamName: string 11 | 12 | constructor(streamName: string) { 13 | super() 14 | this.streamName = streamName 15 | } 16 | 17 | protected writeContent(writer: DataWriter): void { 18 | writer.writeString(this.streamName) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/requests/exchange_command_versions_request.ts: -------------------------------------------------------------------------------- 1 | import { Version } from "../versions" 2 | import { ExchangeCommandVersionsResponse } from "../responses/exchange_command_versions_response" 3 | import { AbstractRequest } from "./abstract_request" 4 | import { DataWriter } from "./data_writer" 5 | 6 | export class ExchangeCommandVersionsRequest extends AbstractRequest { 7 | static readonly Key = 0x001b 8 | readonly key = ExchangeCommandVersionsRequest.Key 9 | static readonly Version = 1 10 | readonly responseKey = ExchangeCommandVersionsResponse.key 11 | constructor(readonly versions: Version[]) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter): void { 16 | writer.writeInt32(this.versions.length) 17 | this.versions.forEach((entry: Version) => { 18 | writer.writeUInt16(entry.key) 19 | writer.writeUInt16(entry.minVersion) 20 | writer.writeUInt16(entry.maxVersion) 21 | }) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/requests/frame_size_exception.ts: -------------------------------------------------------------------------------- 1 | export class FrameSizeException extends Error {} 2 | -------------------------------------------------------------------------------- /src/requests/heartbeat_request.ts: -------------------------------------------------------------------------------- 1 | import { HeartbeatResponse } from "../responses/heartbeat_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class HeartbeatRequest extends AbstractRequest { 6 | readonly responseKey = HeartbeatResponse.key 7 | static readonly Key = 0x0017 8 | static readonly Version = 1 9 | readonly key = HeartbeatRequest.Key 10 | 11 | writeContent(_b: DataWriter) { 12 | return 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/requests/metadata_request.ts: -------------------------------------------------------------------------------- 1 | import { MetadataResponse } from "../responses/metadata_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class MetadataRequest extends AbstractRequest { 6 | readonly responseKey = MetadataResponse.key 7 | static readonly Key = 0x000f 8 | static readonly Version = 1 9 | readonly key = MetadataRequest.Key 10 | 11 | constructor(private params: { streams: string[] }) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeInt32(this.params.streams.length) 17 | this.params.streams.forEach((s) => { 18 | writer.writeString(s) 19 | }) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/requests/metadata_update_request.ts: -------------------------------------------------------------------------------- 1 | import { MetadataInfo } from "../responses/raw_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class MetadataUpdateRequest extends AbstractRequest { 6 | readonly responseKey = -1 7 | static readonly Key = 0x0010 8 | static readonly Version = 1 9 | readonly key = MetadataUpdateRequest.Key 10 | 11 | constructor(private params: { metadataInfo: MetadataInfo }) { 12 | super() 13 | } 14 | 15 | writeContent(b: DataWriter) { 16 | b.writeUInt16(this.params.metadataInfo.code) 17 | b.writeString(this.params.metadataInfo.stream) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/requests/open_request.ts: -------------------------------------------------------------------------------- 1 | import { OpenResponse } from "../responses/open_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class OpenRequest extends AbstractRequest { 6 | readonly responseKey = OpenResponse.key 7 | static readonly Key = 0x0015 8 | static readonly Version = 1 9 | readonly key = OpenRequest.Key 10 | 11 | constructor(private params: { virtualHost: string }) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeString(this.params.virtualHost) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/requests/partitions_query.ts: -------------------------------------------------------------------------------- 1 | import { PartitionsResponse } from "../responses/partitions_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class PartitionsQuery extends AbstractRequest { 6 | readonly responseKey = PartitionsResponse.key 7 | static readonly Key = 0x0019 8 | static readonly Version = 1 9 | readonly key = PartitionsQuery.Key 10 | 11 | constructor(private params: { superStream: string }) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeString(this.params.superStream) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/requests/peer_properties_request.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-param-reassign */ 2 | 3 | import { PeerPropertiesResponse } from "../responses/peer_properties_response" 4 | import { AbstractRequest } from "./abstract_request" 5 | import { DataWriter } from "./data_writer" 6 | 7 | export const PROPERTIES = { 8 | product: "RabbitMQ Stream", 9 | version: "0.3.0", 10 | platform: "javascript", 11 | copyright: "Copyright (c) 2020-2024 Coders51 srl", 12 | information: "Licensed under the Apache 2.0 and MPL 2.0 licenses. See https://www.rabbitmq.com/", 13 | connection_name: "Unknown", 14 | } 15 | 16 | export class PeerPropertiesRequest extends AbstractRequest { 17 | static readonly Key = 0x11 18 | static readonly Version = 1 19 | readonly key = PeerPropertiesRequest.Key 20 | readonly responseKey = PeerPropertiesResponse.key 21 | private readonly _properties: { key: string; value: string }[] = [] 22 | 23 | constructor(properties: Record = PROPERTIES) { 24 | super() 25 | this._properties = Object.keys(properties).map((key) => ({ key, value: properties[key] })) 26 | } 27 | 28 | protected writeContent(writer: DataWriter) { 29 | writer.writeUInt32(this._properties.length) 30 | this._properties.forEach(({ key, value }) => { 31 | writer.writeString(key) 32 | writer.writeString(value) 33 | }) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/requests/publish_request.ts: -------------------------------------------------------------------------------- 1 | import { amqpEncode } from "../amqp10/encoder" 2 | import { Message } from "../publisher" 3 | import { AbstractRequest } from "./abstract_request" 4 | import { DataWriter } from "./data_writer" 5 | 6 | export type PublishRequestMessage = { 7 | publishingId: bigint 8 | filterValue?: string 9 | message: Message 10 | } 11 | 12 | interface PublishRequestParams { 13 | publisherId: number 14 | messages: Array 15 | } 16 | 17 | export class PublishRequest extends AbstractRequest { 18 | static readonly Key = 0x02 19 | static readonly Version = 1 20 | readonly key = PublishRequest.Key 21 | readonly responseKey = -1 22 | 23 | constructor(private params: PublishRequestParams) { 24 | super() 25 | } 26 | 27 | protected writeContent(writer: DataWriter): void { 28 | writer.writeUInt8(this.params.publisherId) 29 | writer.writeUInt32(this.params.messages.length) 30 | this.params.messages.forEach(({ publishingId, message }) => { 31 | writer.writeUInt64(publishingId) 32 | amqpEncode(writer, message) 33 | }) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/requests/publish_request_v2.ts: -------------------------------------------------------------------------------- 1 | import { amqpEncode } from "../amqp10/encoder" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | import { PublishRequestMessage } from "./publish_request" 5 | 6 | interface PublishRequestParams { 7 | publisherId: number 8 | messages: Array 9 | } 10 | 11 | export class PublishRequestV2 extends AbstractRequest { 12 | static readonly Key = 0x02 13 | static readonly Version = 2 14 | readonly key = PublishRequestV2.Key 15 | readonly responseKey = -1 16 | 17 | constructor(private params: PublishRequestParams) { 18 | super() 19 | } 20 | 21 | protected writeContent(writer: DataWriter): void { 22 | writer.writeUInt8(this.params.publisherId) 23 | writer.writeUInt32(this.params.messages.length) 24 | this.params.messages.forEach(({ publishingId, filterValue, message }) => { 25 | writer.writeUInt64(publishingId) 26 | filterValue ? writer.writeString(filterValue) : writer.writeInt16(-1) 27 | amqpEncode(writer, message) 28 | }) 29 | } 30 | 31 | get version(): number { 32 | return PublishRequestV2.Version 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/requests/query_offset_request.ts: -------------------------------------------------------------------------------- 1 | import { QueryOffsetResponse } from "../responses/query_offset_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class QueryOffsetRequest extends AbstractRequest { 6 | readonly responseKey = QueryOffsetResponse.key 7 | static readonly Key = 0x000b 8 | static readonly Version = 1 9 | readonly key = QueryOffsetRequest.Key 10 | private readonly reference: string 11 | private readonly stream: string 12 | 13 | constructor(params: { reference: string; stream: string }) { 14 | super() 15 | this.stream = params.stream 16 | this.reference = params.reference 17 | } 18 | 19 | writeContent(writer: DataWriter) { 20 | writer.writeString(this.reference) 21 | writer.writeString(this.stream) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/requests/query_publisher_request.ts: -------------------------------------------------------------------------------- 1 | import { AbstractRequest } from "./abstract_request" 2 | import { QueryPublisherResponse } from "../responses/query_publisher_response" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class QueryPublisherRequest extends AbstractRequest { 6 | static readonly Key = 0x0005 7 | static readonly Version = 1 8 | readonly key = QueryPublisherRequest.Key 9 | readonly responseKey = QueryPublisherResponse.key 10 | 11 | constructor(private params: { stream: string; publisherRef: string }) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeString(this.params.publisherRef) 17 | writer.writeString(this.params.stream) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/requests/request.ts: -------------------------------------------------------------------------------- 1 | export type BufferSizeSettings = { 2 | initialSize?: number 3 | maxRatio?: number 4 | multiplier?: number 5 | } 6 | 7 | export type BufferSizeParams = BufferSizeSettings & { maxSize: number } 8 | 9 | export interface Request { 10 | toBuffer(bufferSizeParams?: BufferSizeParams, correlationId?: number): Buffer 11 | readonly responseKey: number 12 | readonly key: number 13 | readonly version: number 14 | } 15 | -------------------------------------------------------------------------------- /src/requests/requests.ts: -------------------------------------------------------------------------------- 1 | export { CloseRequest } from "./close_request" 2 | export { CreateStreamRequest } from "./create_stream_request" 3 | export { CreateSuperStreamRequest } from "./create_super_stream_request" 4 | export { CreditRequest } from "./credit_request" 5 | export { DeclarePublisherRequest } from "./declare_publisher_request" 6 | export { DeletePublisherRequest } from "./delete_publisher_request" 7 | export { DeleteStreamRequest } from "./delete_stream_request" 8 | export { DeleteSuperStreamRequest } from "./delete_super_stream_request" 9 | export { ExchangeCommandVersionsRequest } from "./exchange_command_versions_request" 10 | export { HeartbeatRequest } from "./heartbeat_request" 11 | export { MetadataRequest } from "./metadata_request" 12 | export { MetadataUpdateRequest } from "./metadata_update_request" 13 | export { OpenRequest } from "./open_request" 14 | export { PeerPropertiesRequest } from "./peer_properties_request" 15 | export { PublishRequest } from "./publish_request" 16 | export { PublishRequestV2 } from "./publish_request_v2" 17 | export { QueryOffsetRequest } from "./query_offset_request" 18 | export { QueryPublisherRequest } from "./query_publisher_request" 19 | export { SaslAuthenticateRequest } from "./sasl_authenticate_request" 20 | export { SaslHandshakeRequest } from "./sasl_handshake_request" 21 | export { StoreOffsetRequest } from "./store_offset_request" 22 | export { StreamStatsRequest } from "./stream_stats_request" 23 | export { SubEntryBatchPublishRequest } from "./sub_entry_batch_publish_request" 24 | export { SubscribeRequest } from "./subscribe_request" 25 | export { TuneRequest } from "./tune_request" 26 | export { UnsubscribeRequest } from "./unsubscribe_request" 27 | export { RouteQuery } from "./route_query" 28 | export { PartitionsQuery } from "./partitions_query" 29 | export { ConsumerUpdateResponse } from "./consumer_update_response" 30 | -------------------------------------------------------------------------------- /src/requests/route_query.ts: -------------------------------------------------------------------------------- 1 | import { RouteResponse } from "../responses/route_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class RouteQuery extends AbstractRequest { 6 | readonly responseKey = RouteResponse.key 7 | static readonly Key = 0x0018 8 | static readonly Version = 1 9 | readonly key = RouteQuery.Key 10 | 11 | constructor(private params: { routingKey: string; superStream: string }) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeString(this.params.routingKey) 17 | writer.writeString(this.params.superStream) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/requests/sasl_authenticate_request.ts: -------------------------------------------------------------------------------- 1 | import { SaslAuthenticateResponse } from "../responses/sasl_authenticate_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | function assertUnreachable(mechanism: string): never { 6 | throw new Error(`Auth mechanism '${mechanism}' not implemented`) 7 | } 8 | 9 | export class SaslAuthenticateRequest extends AbstractRequest { 10 | readonly responseKey = SaslAuthenticateResponse.key 11 | static readonly Key = 0x0013 12 | static readonly Version = 1 13 | readonly key = SaslAuthenticateRequest.Key 14 | 15 | constructor(private params: { mechanism: string; username: string; password: string }) { 16 | super() 17 | } 18 | 19 | protected writeContent(writer: DataWriter): void { 20 | writer.writeString(this.params.mechanism) 21 | switch (this.params.mechanism) { 22 | case "PLAIN": 23 | writer.writeUInt32(this.params.password.length + this.params.username.length + 2) 24 | writer.writeUInt8(0) 25 | writer.writeData(this.params.username) 26 | writer.writeUInt8(0) 27 | writer.writeData(this.params.password) 28 | break 29 | case "EXTERNAL": 30 | writer.writeUInt32(0) 31 | break 32 | default: 33 | assertUnreachable(this.params.mechanism) 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/requests/sasl_handshake_request.ts: -------------------------------------------------------------------------------- 1 | import { SaslHandshakeResponse } from "../responses/sasl_handshake_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class SaslHandshakeRequest extends AbstractRequest { 6 | readonly responseKey = SaslHandshakeResponse.key 7 | static readonly Key = 0x0012 8 | static readonly Version = 1 9 | readonly key = SaslHandshakeRequest.Key 10 | 11 | protected writeContent(_dw: DataWriter) { 12 | // do nothing 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/requests/store_offset_request.ts: -------------------------------------------------------------------------------- 1 | import { StoreOffsetResponse } from "../responses/store_offset_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class StoreOffsetRequest extends AbstractRequest { 6 | readonly responseKey = StoreOffsetResponse.key 7 | static readonly Key = 0x000a 8 | static readonly Version = 1 9 | readonly key = StoreOffsetRequest.Key 10 | private readonly reference: string 11 | private readonly stream: string 12 | private readonly offsetValue: bigint 13 | 14 | constructor(params: { reference: string; stream: string; offsetValue: bigint }) { 15 | super() 16 | this.stream = params.stream 17 | this.reference = params.reference 18 | this.offsetValue = params.offsetValue 19 | } 20 | 21 | writeContent(writer: DataWriter) { 22 | writer.writeString(this.reference) 23 | writer.writeString(this.stream) 24 | writer.writeUInt64(this.offsetValue) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/requests/stream_stats_request.ts: -------------------------------------------------------------------------------- 1 | import { StreamStatsResponse } from "../responses/stream_stats_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class StreamStatsRequest extends AbstractRequest { 6 | readonly responseKey = StreamStatsResponse.key 7 | static readonly Key = 0x001c 8 | static readonly Version = 1 9 | readonly key = StreamStatsRequest.Key 10 | 11 | constructor(private streamName: string) { 12 | super() 13 | } 14 | 15 | writeContent(writer: DataWriter) { 16 | writer.writeString(this.streamName) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/requests/sub_entry_batch_publish_request.ts: -------------------------------------------------------------------------------- 1 | import { amqpEncode, messageSize } from "../amqp10/encoder" 2 | import { Compression, CompressionType } from "../compression" 3 | import { Message } from "../publisher" 4 | import { AbstractRequest, BufferDataWriter } from "./abstract_request" 5 | import { DataWriter } from "./data_writer" 6 | 7 | interface SubEntryBatchPublishRequestParams { 8 | publisherId: number 9 | publishingId: bigint 10 | compression: Compression 11 | maxFrameSize: number 12 | messages: Message[] 13 | } 14 | 15 | export class SubEntryBatchPublishRequest extends AbstractRequest { 16 | static readonly Key = 0x02 17 | static readonly Version = 1 18 | readonly key = SubEntryBatchPublishRequest.Key 19 | readonly responseKey = -1 20 | private readonly maxFrameSize: number 21 | 22 | constructor(private params: SubEntryBatchPublishRequestParams) { 23 | super() 24 | this.maxFrameSize = params.maxFrameSize 25 | } 26 | 27 | protected writeContent(writer: DataWriter): void { 28 | const { compression, messages, publishingId, publisherId } = this.params 29 | writer.writeUInt8(publisherId) 30 | // number of root messages. In this case will be always 1 31 | writer.writeUInt32(1) 32 | writer.writeUInt64(publishingId) 33 | writer.writeByte(this.encodeCompressionType(compression.getType())) 34 | writer.writeUInt16(messages.length) 35 | writer.writeUInt32(messages.reduce((sum, message) => sum + 4 + messageSize(message), 0)) 36 | 37 | const initialDataBufferSize = 65536 38 | const bufferSizeParams = { maxSize: this.maxFrameSize } 39 | const data = new BufferDataWriter(Buffer.alloc(initialDataBufferSize), 0, bufferSizeParams) 40 | messages.forEach((m) => amqpEncode(data, m)) 41 | 42 | const compressedData = compression.compress(data.toBuffer()) 43 | 44 | writer.writeUInt32(compressedData.length) 45 | writer.writeData(compressedData) 46 | } 47 | 48 | private encodeCompressionType(compressionType: CompressionType) { 49 | return 0x80 | (compressionType << 4) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/requests/subscribe_request.ts: -------------------------------------------------------------------------------- 1 | import { SubscribeResponse } from "../responses/subscribe_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | const OFFSET_TYPE = { 6 | first: 1, 7 | last: 2, 8 | next: 3, 9 | numeric: 4, 10 | timestamp: 5, 11 | } as const 12 | 13 | export type OffsetType = keyof typeof OFFSET_TYPE 14 | 15 | export class Offset { 16 | private constructor( 17 | public readonly type: OffsetType, 18 | public readonly value?: bigint 19 | ) {} 20 | 21 | write(writer: DataWriter) { 22 | writer.writeUInt16(OFFSET_TYPE[this.type]) 23 | if (this.type === "numeric" && this.value !== null && this.value !== undefined) writer.writeUInt64(this.value) 24 | if (this.type === "timestamp" && this.value) writer.writeInt64(this.value) 25 | } 26 | 27 | static first() { 28 | return new Offset("first") 29 | } 30 | 31 | static last() { 32 | return new Offset("last") 33 | } 34 | 35 | static next() { 36 | return new Offset("next") 37 | } 38 | 39 | static offset(offset: bigint) { 40 | return new Offset("numeric", offset) 41 | } 42 | 43 | static timestamp(date: Date) { 44 | return new Offset("timestamp", BigInt(date.getTime())) 45 | } 46 | 47 | public clone() { 48 | return new Offset(this.type, this.value) 49 | } 50 | } 51 | 52 | export class SubscribeRequest extends AbstractRequest { 53 | static readonly Key = 0x0007 54 | static readonly Version = 1 55 | readonly key = SubscribeRequest.Key 56 | readonly responseKey = SubscribeResponse.key 57 | private readonly _properties: { key: string; value: string }[] = [] 58 | 59 | constructor( 60 | private params: { 61 | subscriptionId: number 62 | stream: string 63 | credit: number 64 | offset: Offset 65 | properties?: Record 66 | } 67 | ) { 68 | super() 69 | if (params.properties) 70 | this._properties = Object.keys(params.properties).map((key) => ({ key, value: params.properties![key] })) 71 | } 72 | 73 | protected writeContent(writer: DataWriter): void { 74 | writer.writeUInt8(this.params.subscriptionId) 75 | writer.writeString(this.params.stream) 76 | this.params.offset.write(writer) 77 | writer.writeUInt16(this.params.credit) 78 | writer.writeUInt32(this._properties.length) 79 | this._properties.forEach(({ key, value }) => { 80 | writer.writeString(key) 81 | writer.writeString(value) 82 | }) 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/requests/tune_request.ts: -------------------------------------------------------------------------------- 1 | import { TuneResponse } from "../responses/tune_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class TuneRequest extends AbstractRequest { 6 | readonly responseKey = TuneResponse.key 7 | static readonly Key = 0x0014 8 | static readonly Version = 1 9 | readonly key = TuneRequest.Key 10 | 11 | constructor(private params: { frameMax: number; heartbeat: number }) { 12 | super() 13 | } 14 | 15 | writeContent(b: DataWriter) { 16 | b.writeUInt32(this.params.frameMax) 17 | b.writeUInt32(this.params.heartbeat) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/requests/unsubscribe_request.ts: -------------------------------------------------------------------------------- 1 | import { UnsubscribeResponse } from "../responses/unsubscribe_response" 2 | import { AbstractRequest } from "./abstract_request" 3 | import { DataWriter } from "./data_writer" 4 | 5 | export class UnsubscribeRequest extends AbstractRequest { 6 | static readonly Key = 0x000c 7 | static readonly Version = 1 8 | readonly key = UnsubscribeRequest.Key 9 | readonly responseKey = UnsubscribeResponse.key 10 | 11 | constructor(private subscriptionId: number) { 12 | super() 13 | } 14 | 15 | protected writeContent(writer: DataWriter): void { 16 | writer.writeUInt8(this.subscriptionId) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/responses/abstract_response.ts: -------------------------------------------------------------------------------- 1 | import { RawResponse } from "./raw_response" 2 | import { Response } from "./response" 3 | 4 | export interface AbstractTypeClass { 5 | name: string 6 | key: number 7 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 8 | new (...args: any[]): AbstractResponse 9 | } 10 | 11 | export abstract class AbstractResponse implements Response { 12 | constructor(protected response: RawResponse) {} 13 | 14 | protected verifyKey(type: AbstractTypeClass) { 15 | if (this.response.key !== type.key) { 16 | throw new Error(`Unable to create ${type.name} from data of type ${this.response.key}`) 17 | } 18 | } 19 | 20 | get key() { 21 | return this.response.key 22 | } 23 | 24 | get correlationId(): number { 25 | return this.response.correlationId 26 | } 27 | 28 | get code(): number { 29 | return this.response.code 30 | } 31 | 32 | get ok(): boolean { 33 | return this.code === 0x01 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/responses/close_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class CloseResponse extends AbstractResponse { 5 | static key = 0x8016 6 | static readonly Version = 1 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(CloseResponse) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/responses/consumer_update_query.ts: -------------------------------------------------------------------------------- 1 | import { BufferDataWriter } from "../requests/buffer_data_writer" 2 | import { RawConsumerUpdateQueryResponse as RawConsumerUpdateQuery } from "./raw_response" 3 | import { Response } from "./response" 4 | 5 | export class ConsumerUpdateQuery implements Response { 6 | static key = 0x001a 7 | static readonly Version = 1 8 | 9 | constructor(private response: RawConsumerUpdateQuery) { 10 | if (this.response.key !== ConsumerUpdateQuery.key) { 11 | throw new Error(`Unable to create ${ConsumerUpdateQuery.name} from data of type ${this.response.key}`) 12 | } 13 | } 14 | 15 | toBuffer(): Buffer { 16 | const bufferSize = 1024 17 | const bufferSizeParams = { maxSize: bufferSize } 18 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 19 | dw.writeUInt16(ConsumerUpdateQuery.key) 20 | dw.writeUInt16(1) 21 | dw.writeUInt32(this.response.correlationId) 22 | dw.writeUInt8(this.response.subscriptionId) 23 | dw.writeUInt8(this.response.active) 24 | dw.writePrefixSize() 25 | return dw.toBuffer() 26 | } 27 | 28 | get key() { 29 | return this.response.key 30 | } 31 | 32 | get correlationId(): number { 33 | return this.response.correlationId 34 | } 35 | 36 | get code(): number { 37 | return -1 38 | } 39 | 40 | get ok(): boolean { 41 | return true 42 | } 43 | 44 | get subscriptionId(): number { 45 | return this.response.subscriptionId 46 | } 47 | 48 | get active(): number { 49 | return this.response.active 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/responses/create_stream_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class CreateStreamResponse extends AbstractResponse { 5 | static key = 0x800d 6 | static readonly Version = 1 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(CreateStreamResponse) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/responses/create_super_stream_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class CreateSuperStreamResponse extends AbstractResponse { 5 | static key = 0x801d 6 | static readonly Version = 1 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(CreateSuperStreamResponse) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/responses/credit_response.ts: -------------------------------------------------------------------------------- 1 | import { BufferDataWriter } from "../requests/buffer_data_writer" 2 | import { RawCreditResponse } from "./raw_response" 3 | import { Response } from "./response" 4 | 5 | export class CreditResponse implements Response { 6 | static key = 0x8009 as const 7 | static readonly Version = 1 8 | 9 | constructor(private response: RawCreditResponse) { 10 | if (this.response.key !== CreditResponse.key) { 11 | throw new Error(`Unable to create ${CreditResponse.name} from data of type ${this.response.key}`) 12 | } 13 | } 14 | 15 | toBuffer(): Buffer { 16 | const bufferSize = 1024 17 | const bufferSizeParams = { maxSize: bufferSize } 18 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 19 | dw.writeUInt16(CreditResponse.key) 20 | dw.writeUInt16(1) 21 | dw.writeUInt16(this.response.responseCode) 22 | dw.writeUInt8(this.response.subscriptionId) 23 | dw.writePrefixSize() 24 | return dw.toBuffer() 25 | } 26 | 27 | get key() { 28 | return this.response.key 29 | } 30 | 31 | get correlationId(): number { 32 | return -1 33 | } 34 | 35 | get code(): number { 36 | return -1 37 | } 38 | 39 | get ok(): boolean { 40 | return true 41 | } 42 | 43 | get responseCode(): number { 44 | return this.response.responseCode 45 | } 46 | 47 | get subscriptionId(): number { 48 | return this.response.subscriptionId 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/responses/declare_publisher_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class DeclarePublisherResponse extends AbstractResponse { 5 | static key = 0x8001 6 | static readonly Version = 1 7 | 8 | readonly properties: Record = {} 9 | 10 | constructor(response: RawResponse) { 11 | super(response) 12 | this.verifyKey(DeclarePublisherResponse) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/responses/delete_publisher_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class DeletePublisherResponse extends AbstractResponse { 5 | static key = 0x8006 6 | static readonly Version = 1 7 | 8 | readonly properties: Record = {} 9 | 10 | constructor(response: RawResponse) { 11 | super(response) 12 | this.verifyKey(DeletePublisherResponse) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/responses/delete_stream_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class DeleteStreamResponse extends AbstractResponse { 5 | static key = 0x800e 6 | static readonly Version = 1 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(DeleteStreamResponse) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/responses/delete_super_stream_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class DeleteSuperStreamResponse extends AbstractResponse { 5 | static key = 0x801e 6 | static readonly Version = 1 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(DeleteSuperStreamResponse) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/responses/deliver_response.ts: -------------------------------------------------------------------------------- 1 | import { Message } from "../publisher" 2 | import { BufferDataWriter } from "../requests/buffer_data_writer" 3 | import { RawDeliverResponse } from "./raw_response" 4 | import { Response } from "./response" 5 | 6 | export class DeliverResponse implements Response { 7 | static key = 0x0008 8 | static readonly Version = 1 9 | 10 | constructor(private response: RawDeliverResponse) { 11 | if (this.response.key !== DeliverResponse.key) { 12 | throw new Error(`Unable to create ${DeliverResponse.name} from data of type ${this.response.key}`) 13 | } 14 | } 15 | 16 | toBuffer(): Buffer { 17 | const bufferSize = 1024 18 | const bufferSizeParams = { maxSize: bufferSize } 19 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 20 | dw.writeUInt16(DeliverResponse.key) 21 | dw.writeUInt16(1) 22 | dw.writeUInt8(this.response.subscriptionId) 23 | dw.writePrefixSize() 24 | return dw.toBuffer() 25 | } 26 | 27 | get key() { 28 | return this.response.key 29 | } 30 | 31 | get correlationId(): number { 32 | return -1 33 | } 34 | 35 | get code(): number { 36 | return -1 37 | } 38 | 39 | get ok(): boolean { 40 | return true 41 | } 42 | 43 | get subscriptionId(): number { 44 | return this.response.subscriptionId 45 | } 46 | 47 | get messages(): Message[] { 48 | return this.response.messages 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/responses/deliver_response_v2.ts: -------------------------------------------------------------------------------- 1 | import { Message } from "../publisher" 2 | import { BufferDataWriter } from "../requests/buffer_data_writer" 3 | import { RawDeliverResponseV2 } from "./raw_response" 4 | import { Response } from "./response" 5 | 6 | export class DeliverResponseV2 implements Response { 7 | static key = 0x0008 8 | static readonly Version = 2 9 | 10 | constructor(private response: RawDeliverResponseV2) { 11 | if (this.response.key !== DeliverResponseV2.key) { 12 | throw new Error(`Unable to create ${DeliverResponseV2.name} from data of type ${this.response.key}`) 13 | } 14 | } 15 | 16 | toBuffer(): Buffer { 17 | const bufferSize = 1024 18 | const bufferSizeParams = { maxSize: bufferSize } 19 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 20 | dw.writeUInt16(DeliverResponseV2.key) 21 | dw.writeUInt16(2) 22 | dw.writeUInt8(this.response.subscriptionId) 23 | dw.writeUInt64(this.response.committedChunkId) 24 | dw.writePrefixSize() 25 | return dw.toBuffer() 26 | } 27 | 28 | get key() { 29 | return this.response.key 30 | } 31 | 32 | get correlationId(): number { 33 | return -1 34 | } 35 | 36 | get code(): number { 37 | return -1 38 | } 39 | 40 | get ok(): boolean { 41 | return true 42 | } 43 | 44 | get subscriptionId(): number { 45 | return this.response.subscriptionId 46 | } 47 | 48 | get committedChunkId(): bigint { 49 | return this.response.committedChunkId 50 | } 51 | 52 | get messages(): Message[] { 53 | return this.response.messages 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/responses/exchange_command_versions_response.ts: -------------------------------------------------------------------------------- 1 | import { Version } from "../versions" 2 | import { AbstractResponse } from "./abstract_response" 3 | import { RawResponse } from "./raw_response" 4 | 5 | export class ExchangeCommandVersionsResponse extends AbstractResponse { 6 | static key = 0x801b 7 | static readonly Version = 1 8 | 9 | readonly serverDeclaredVersions: Version[] 10 | 11 | constructor(response: RawResponse) { 12 | super(response) 13 | this.verifyKey(ExchangeCommandVersionsResponse) 14 | this.serverDeclaredVersions = [] as Version[] 15 | const serverDeclaredVersionsCount = response.payload.readInt32() 16 | 17 | for (let i: number = 0; i < serverDeclaredVersionsCount; i++) { 18 | const declaredVersion: Version = { 19 | key: response.payload.readUInt16(), 20 | minVersion: response.payload.readUInt16(), 21 | maxVersion: response.payload.readUInt16(), 22 | } 23 | this.serverDeclaredVersions.push(declaredVersion) 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/responses/heartbeat_response.ts: -------------------------------------------------------------------------------- 1 | import { RawTuneResponse } from "./raw_response" 2 | import { Response } from "./response" 3 | 4 | export class HeartbeatResponse implements Response { 5 | static key = 0x0017 6 | static readonly Version = 1 7 | 8 | constructor(private response: RawTuneResponse) { 9 | if (this.response.key !== HeartbeatResponse.key) { 10 | throw new Error(`Unable to create ${HeartbeatResponse.name} from data of type ${this.response.key}`) 11 | } 12 | } 13 | 14 | get key() { 15 | return this.response.key 16 | } 17 | 18 | get correlationId(): number { 19 | return -1 20 | } 21 | 22 | get code(): number { 23 | return -1 24 | } 25 | 26 | get ok(): boolean { 27 | return true 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/responses/metadata_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { DataReader, RawResponse } from "./raw_response" 3 | 4 | export interface Broker { 5 | reference: number 6 | host: string 7 | port: number 8 | } 9 | 10 | export interface StreamMetadata { 11 | streamName: string 12 | responseCode: number 13 | leader?: Broker 14 | replicas?: Broker[] 15 | } 16 | 17 | export class MetadataResponse extends AbstractResponse { 18 | static key = 0x800f as const 19 | static readonly Version = 1 20 | 21 | readonly streamInfos: StreamMetadata[] = [] 22 | 23 | constructor(response: RawResponse) { 24 | super(response) 25 | this.verifyKey(MetadataResponse) 26 | 27 | const payload = response.payload 28 | 29 | const brokers: Broker[] = [] 30 | 31 | const noOfBrokers = payload.readInt32() 32 | for (let i = 0; i < noOfBrokers; i++) { 33 | brokers.push({ 34 | reference: payload.readUInt16(), 35 | host: payload.readString(), 36 | port: payload.readUInt32(), 37 | }) 38 | } 39 | 40 | const noOfStreamInfos = payload.readInt32() 41 | for (let i = 0; i < noOfStreamInfos; i++) { 42 | const streamName = payload.readString() 43 | const streamInfo = { 44 | streamName, 45 | responseCode: payload.readUInt16(), 46 | } 47 | const leaderReference = payload.readUInt16() 48 | const replicasReferences = this.readReplicasReferencesFrom(response.payload) 49 | const leader = brokers?.find((b) => b.reference === leaderReference) 50 | const replicas = brokers?.filter((b) => replicasReferences.includes(b.reference)) 51 | this.streamInfos.push({ ...streamInfo, leader, replicas }) 52 | } 53 | } 54 | 55 | private readReplicasReferencesFrom(payload: DataReader) { 56 | const replicasReferences: number[] = [] 57 | const howMany = payload.readInt32() 58 | for (let index = 0; index < howMany; index++) { 59 | const reference = payload.readUInt16() 60 | replicasReferences.push(reference) 61 | } 62 | 63 | return replicasReferences 64 | } 65 | 66 | get ok(): boolean { 67 | return true 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/responses/metadata_update_response.ts: -------------------------------------------------------------------------------- 1 | import { BufferDataWriter } from "../requests/buffer_data_writer" 2 | import { MetadataInfo, RawMetadataUpdateResponse } from "./raw_response" 3 | import { Response } from "./response" 4 | 5 | export class MetadataUpdateResponse implements Response { 6 | static key = 0x0010 7 | static readonly Version = 1 8 | 9 | constructor(private response: RawMetadataUpdateResponse) { 10 | if (this.response.key !== MetadataUpdateResponse.key) { 11 | throw new Error(`Unable to create ${MetadataUpdateResponse.name} from data of type ${this.response.key}`) 12 | } 13 | } 14 | 15 | toBuffer(): Buffer { 16 | const bufferSize = 1024 17 | const bufferSizeParams = { maxSize: bufferSize } 18 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 19 | dw.writeUInt16(MetadataUpdateResponse.key) 20 | dw.writeUInt16(1) 21 | dw.writeUInt16(this.response.metadataInfo.code) 22 | dw.writeString(this.response.metadataInfo.stream) 23 | dw.writePrefixSize() 24 | return dw.toBuffer() 25 | } 26 | 27 | get key() { 28 | return this.response.key 29 | } 30 | 31 | get correlationId(): number { 32 | return -1 33 | } 34 | 35 | get code(): number { 36 | return -1 37 | } 38 | 39 | get ok(): boolean { 40 | return true 41 | } 42 | 43 | get metadataInfo(): MetadataInfo { 44 | return this.response.metadataInfo 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/responses/open_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class OpenResponse extends AbstractResponse { 5 | static key = 0x8015 6 | static readonly Version = 1 7 | 8 | readonly properties: Record = {} 9 | 10 | constructor(response: RawResponse) { 11 | super(response) 12 | this.verifyKey(OpenResponse) 13 | 14 | const howMany = this.response.payload.readInt32() 15 | for (let index = 0; index < howMany; index++) { 16 | const resKey = this.response.payload.readString() 17 | const resValue = this.response.payload.readString() 18 | this.properties[resKey] = resValue 19 | } 20 | } 21 | 22 | get data(): string { 23 | // TODO how to manage this data?? 24 | return this.response.payload.toString() 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/responses/partitions_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class PartitionsResponse extends AbstractResponse { 5 | static key = 0x8019 6 | public streams: string[] = [] 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(PartitionsResponse) 11 | 12 | const numStreams = this.response.payload.readInt32() 13 | for (let i = 0; i < numStreams; i++) { 14 | this.streams.push(this.response.payload.readString()) 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/responses/peer_properties_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class PeerPropertiesResponse extends AbstractResponse { 5 | static key = 0x8011 6 | static readonly Version = 1 7 | 8 | readonly properties: Record = {} 9 | 10 | constructor(response: RawResponse) { 11 | super(response) 12 | this.verifyKey(PeerPropertiesResponse) 13 | 14 | const howMany = this.response.payload.readInt32() 15 | for (let index = 0; index < howMany; index++) { 16 | const resKey = this.response.payload.readString() 17 | const resValue = this.response.payload.readString() 18 | this.properties[resKey] = resValue 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/responses/publish_confirm_response.ts: -------------------------------------------------------------------------------- 1 | import { BufferDataWriter } from "../requests/buffer_data_writer" 2 | import { RawPublishConfirmResponse } from "./raw_response" 3 | import { Response } from "./response" 4 | 5 | export class PublishConfirmResponse implements Response { 6 | static key = 0x0003 7 | static readonly Version = 1 8 | 9 | public publishingIds: bigint[] 10 | private publisherId: number 11 | constructor(private response: RawPublishConfirmResponse) { 12 | if (this.response.key !== PublishConfirmResponse.key) { 13 | throw new Error(`Unable to create ${PublishConfirmResponse.name} from data of type ${this.response.key}`) 14 | } 15 | this.publishingIds = response.publishingIds 16 | this.publisherId = response.publisherId 17 | } 18 | 19 | toBuffer(): Buffer { 20 | const bufferSize = 1024 21 | const bufferSizeParams = { maxSize: bufferSize } 22 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 23 | dw.writeUInt16(PublishConfirmResponse.key) 24 | dw.writeUInt16(1) 25 | dw.writeUInt8(this.publisherId) 26 | dw.writeUInt32(this.publishingIds.length) 27 | for (const pubId of this.publishingIds) { 28 | dw.writeUInt64(pubId) 29 | } 30 | dw.writePrefixSize() 31 | return dw.toBuffer() 32 | } 33 | 34 | get key() { 35 | return this.response.key 36 | } 37 | 38 | get correlationId(): number { 39 | return -1 40 | } 41 | 42 | get code(): number { 43 | return -1 44 | } 45 | 46 | get ok(): boolean { 47 | return true 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/responses/publish_error_response.ts: -------------------------------------------------------------------------------- 1 | import { BufferDataWriter } from "../requests/buffer_data_writer" 2 | import { RawPublishErrorResponse } from "./raw_response" 3 | import { Response } from "./response" 4 | 5 | interface PublishingError { 6 | publishingId: bigint 7 | code: number 8 | } 9 | 10 | export class PublishErrorResponse implements Response { 11 | static key = 0x0004 12 | static readonly Version = 1 13 | 14 | private publisherId: number 15 | public publishingError: PublishingError 16 | constructor(private response: RawPublishErrorResponse) { 17 | if (this.response.key !== PublishErrorResponse.key) { 18 | throw new Error(`Unable to create ${PublishErrorResponse.name} from data of type ${this.response.key}`) 19 | } 20 | this.publishingError = { publishingId: response.publishingId, code: response.code } 21 | this.publisherId = response.publisherId 22 | } 23 | 24 | toBuffer(): Buffer { 25 | const bufferSize = 1024 26 | const bufferSizeParams = { maxSize: bufferSize } 27 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 28 | dw.writeUInt16(PublishErrorResponse.key) 29 | dw.writeUInt16(1) 30 | dw.writeUInt8(this.publisherId) 31 | dw.writeUInt64(this.publishingError.publishingId) 32 | dw.writeUInt16(this.publishingError.code) 33 | dw.writePrefixSize() 34 | return dw.toBuffer() 35 | } 36 | 37 | get key() { 38 | return this.response.key 39 | } 40 | 41 | get correlationId(): number { 42 | return -1 43 | } 44 | 45 | get code(): number { 46 | return -1 47 | } 48 | 49 | get ok(): boolean { 50 | return true 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/responses/query_offset_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class QueryOffsetResponse extends AbstractResponse { 5 | static key = 0x800b 6 | static readonly Version = 1 7 | 8 | readonly offsetValue: bigint 9 | 10 | constructor(response: RawResponse) { 11 | super(response) 12 | this.verifyKey(QueryOffsetResponse) 13 | this.offsetValue = response.payload.readUInt64() 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/responses/query_publisher_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class QueryPublisherResponse extends AbstractResponse { 5 | static key = 0x8005 6 | static readonly Version = 1 7 | 8 | readonly sequence: bigint = 0n 9 | 10 | constructor(response: RawResponse) { 11 | super(response) 12 | this.verifyKey(QueryPublisherResponse) 13 | this.sequence = this.response.payload.readUInt64() 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/responses/raw_response.ts: -------------------------------------------------------------------------------- 1 | import { Message } from "../publisher" 2 | 3 | export interface DataReader { 4 | readBufferOf(length: number): Buffer 5 | readTo(size: number): DataReader 6 | readToEnd(): DataReader 7 | readInt8(): number 8 | readUInt8(): number 9 | readUInt16(): number 10 | readUInt32(): number 11 | readInt32(): number 12 | readUInt64(): bigint 13 | readInt64(): bigint 14 | readString(): string 15 | readString8(): string 16 | readString32(): string 17 | rewind(count: number): void 18 | forward(count: number): void 19 | position(): number 20 | isAtEnd(): boolean 21 | available(): number 22 | } 23 | 24 | export interface RawResponse { 25 | size: number 26 | key: number 27 | version: number 28 | correlationId: number 29 | code: number 30 | payload: DataReader 31 | } 32 | 33 | export interface RawTuneResponse { 34 | size: number 35 | key: 0x0014 36 | version: number 37 | frameMax: number 38 | heartbeat: number 39 | } 40 | 41 | export interface RawConsumerUpdateQueryResponse { 42 | size: number 43 | key: 0x001a 44 | version: number 45 | correlationId: number 46 | subscriptionId: number 47 | active: number 48 | } 49 | 50 | export interface RawDeliverResponse { 51 | size: number 52 | key: 0x0008 53 | version: number 54 | subscriptionId: number 55 | messages: Message[] 56 | } 57 | 58 | export interface RawDeliverResponseV2 { 59 | size: number 60 | key: 0x0008 61 | version: number 62 | subscriptionId: number 63 | committedChunkId: bigint 64 | messages: Message[] 65 | } 66 | 67 | export interface RawMetadataUpdateResponse { 68 | size: number 69 | key: 0x0010 70 | version: number 71 | metadataInfo: MetadataInfo 72 | } 73 | 74 | export interface MetadataInfo { 75 | code: number 76 | stream: string 77 | } 78 | 79 | export interface RawCreditResponse { 80 | size: number 81 | key: 0x8009 82 | version: number 83 | responseCode: number 84 | subscriptionId: number 85 | } 86 | 87 | export interface RawHeartbeatResponse { 88 | key: 0x0014 89 | version: number 90 | } 91 | 92 | export interface RawPublishConfirmResponse { 93 | size: number 94 | key: 0x0003 95 | version: number 96 | publisherId: number 97 | publishingIds: bigint[] 98 | } 99 | 100 | export interface RawPublishErrorResponse { 101 | size: number 102 | key: 0x0004 103 | version: number 104 | publisherId: number 105 | publishingId: bigint 106 | code: number 107 | } 108 | -------------------------------------------------------------------------------- /src/responses/response.ts: -------------------------------------------------------------------------------- 1 | export interface Response { 2 | code: number 3 | ok: boolean 4 | key: number 5 | correlationId: number 6 | } 7 | -------------------------------------------------------------------------------- /src/responses/responses.ts: -------------------------------------------------------------------------------- 1 | export { CloseResponse } from "./close_response" 2 | export { CreateStreamResponse } from "./create_stream_response" 3 | export { CreateSuperStreamResponse } from "./create_super_stream_response" 4 | export { CreditResponse } from "./credit_response" 5 | export { DeclarePublisherResponse } from "./declare_publisher_response" 6 | export { DeletePublisherResponse } from "./delete_publisher_response" 7 | export { DeleteStreamResponse } from "./delete_stream_response" 8 | export { DeleteSuperStreamResponse } from "./delete_super_stream_response" 9 | export { DeliverResponse } from "./deliver_response" 10 | export { DeliverResponseV2 } from "./deliver_response_v2" 11 | export { ExchangeCommandVersionsResponse } from "./exchange_command_versions_response" 12 | export { HeartbeatResponse } from "./heartbeat_response" 13 | export { MetadataResponse } from "./metadata_response" 14 | export { MetadataUpdateResponse } from "./metadata_update_response" 15 | export { OpenResponse } from "./open_response" 16 | export { PeerPropertiesResponse } from "./peer_properties_response" 17 | export { PublishConfirmResponse } from "./publish_confirm_response" 18 | export { PublishErrorResponse } from "./publish_error_response" 19 | export { QueryOffsetResponse } from "./query_offset_response" 20 | export { QueryPublisherResponse } from "./query_publisher_response" 21 | export { SaslAuthenticateResponse } from "./sasl_authenticate_response" 22 | export { SaslHandshakeResponse } from "./sasl_handshake_response" 23 | export { StoreOffsetResponse } from "./store_offset_response" 24 | export { StreamStatsResponse } from "./stream_stats_response" 25 | export { SubscribeResponse } from "./subscribe_response" 26 | export { TuneResponse } from "./tune_response" 27 | export { UnsubscribeResponse } from "./unsubscribe_response" 28 | export { ConsumerUpdateQuery } from "./consumer_update_query" 29 | -------------------------------------------------------------------------------- /src/responses/route_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class RouteResponse extends AbstractResponse { 5 | static key = 0x8018 6 | public streams: string[] = [] 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(RouteResponse) 11 | 12 | const numStreams = this.response.payload.readUInt32() 13 | for (let i = 0; i < numStreams; i++) { 14 | this.streams.push(this.response.payload.readString()) 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/responses/sasl_authenticate_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class SaslAuthenticateResponse extends AbstractResponse { 5 | static key = 0x8013 6 | static readonly Version = 1 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(SaslAuthenticateResponse) 11 | } 12 | 13 | get data(): string { 14 | // TODO how to manage this data?? 15 | return this.response.payload.toString() 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/responses/sasl_handshake_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class SaslHandshakeResponse extends AbstractResponse { 5 | static key = 0x8012 6 | static readonly Version = 1 7 | 8 | readonly mechanisms: string[] = [] 9 | 10 | constructor(response: RawResponse) { 11 | super(response) 12 | this.verifyKey(SaslHandshakeResponse) 13 | 14 | const numOfMechanisms = this.response.payload.readInt32() 15 | for (let index = 0; index < numOfMechanisms; index++) { 16 | const mechanism = this.response.payload.readString() 17 | this.mechanisms.push(mechanism) 18 | } 19 | } 20 | 21 | get key() { 22 | return this.response.key 23 | } 24 | 25 | public get correlationId(): number { 26 | return this.response.correlationId 27 | } 28 | 29 | get code(): number { 30 | return this.response.code 31 | } 32 | get ok(): boolean { 33 | return this.code === 0x01 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/responses/store_offset_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export class StoreOffsetResponse extends AbstractResponse { 5 | static key = 0x000a 6 | static readonly Version = 1 7 | 8 | constructor(response: RawResponse) { 9 | super(response) 10 | this.verifyKey(StoreOffsetResponse) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/responses/stream_stats_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | import { RawResponse } from "./raw_response" 3 | 4 | export interface Statistics { 5 | committedChunkId: bigint 6 | firstChunkId: bigint 7 | lastChunkId: bigint 8 | } 9 | 10 | export class StreamStatsResponse extends AbstractResponse { 11 | static key = 0x801c 12 | static readonly Version = 1 13 | 14 | private rawStats: Record = {} 15 | readonly statistics: Statistics = { 16 | committedChunkId: BigInt(0), 17 | firstChunkId: BigInt(0), 18 | lastChunkId: BigInt(0), 19 | } 20 | 21 | constructor(response: RawResponse) { 22 | super(response) 23 | this.verifyKey(StreamStatsResponse) 24 | 25 | const stats = this.response.payload.readInt32() 26 | for (let i = 0; i < stats; i++) { 27 | const statKey = this.response.payload.readString() 28 | const statVal = this.response.payload.readInt64() 29 | this.rawStats[statKey] = statVal 30 | } 31 | 32 | this.statistics.committedChunkId = this.rawStats["committed_chunk_id"] 33 | this.statistics.firstChunkId = this.rawStats["first_chunk_id"] 34 | this.statistics.lastChunkId = this.rawStats["last_chunk_id"] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/responses/subscribe_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | 3 | export class SubscribeResponse extends AbstractResponse { 4 | static key = 0x8007 5 | static readonly Version = 1 6 | } 7 | -------------------------------------------------------------------------------- /src/responses/tune_response.ts: -------------------------------------------------------------------------------- 1 | import { BufferDataWriter } from "../requests/buffer_data_writer" 2 | import { RawTuneResponse } from "./raw_response" 3 | import { Response } from "./response" 4 | 5 | export class TuneResponse implements Response { 6 | static key = 0x0014 // I know it isn't 8014 7 | static readonly Version = 1 8 | 9 | constructor(private response: RawTuneResponse) { 10 | if (this.response.key !== TuneResponse.key) { 11 | throw new Error(`Unable to create ${TuneResponse.name} from data of type ${this.response.key}`) 12 | } 13 | } 14 | 15 | toBuffer(): Buffer { 16 | const bufferSize = 1024 17 | const bufferSizeParams = { maxSize: bufferSize } 18 | const dw = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 19 | dw.writeUInt16(TuneResponse.key) 20 | dw.writeUInt16(1) 21 | dw.writeUInt32(this.response.frameMax) 22 | dw.writeUInt32(this.response.heartbeat) 23 | dw.writePrefixSize() 24 | return dw.toBuffer() 25 | } 26 | 27 | get key() { 28 | return this.response.key 29 | } 30 | 31 | get correlationId(): number { 32 | return -1 33 | } 34 | 35 | get code(): number { 36 | return -1 37 | } 38 | 39 | get ok(): boolean { 40 | return true 41 | } 42 | 43 | get frameMax(): number { 44 | return this.response.frameMax 45 | } 46 | 47 | get heartbeat(): number { 48 | return this.response.heartbeat 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/responses/unsubscribe_response.ts: -------------------------------------------------------------------------------- 1 | import { AbstractResponse } from "./abstract_response" 2 | 3 | export class UnsubscribeResponse extends AbstractResponse { 4 | static key = 0x800c 5 | static readonly Version = 1 6 | } 7 | -------------------------------------------------------------------------------- /src/super_stream_consumer.ts: -------------------------------------------------------------------------------- 1 | import { Client } from "./client" 2 | import { Consumer, ConsumerFunc } from "./consumer" 3 | import { ConsumerCreditPolicy, defaultCreditPolicy } from "./consumer_credit_policy" 4 | import { Offset } from "./requests/subscribe_request" 5 | 6 | export class SuperStreamConsumer { 7 | private consumers: Map = new Map() 8 | public consumerRef: string 9 | readonly superStream: string 10 | private locator: Client 11 | private partitions: string[] 12 | private offset: Offset 13 | private creditPolicy: ConsumerCreditPolicy 14 | 15 | private constructor( 16 | readonly handle: ConsumerFunc, 17 | params: { 18 | superStream: string 19 | locator: Client 20 | partitions: string[] 21 | consumerRef: string 22 | offset: Offset 23 | creditPolicy?: ConsumerCreditPolicy 24 | } 25 | ) { 26 | this.superStream = params.superStream 27 | this.consumerRef = params.consumerRef 28 | this.locator = params.locator 29 | this.partitions = params.partitions 30 | this.offset = params.offset 31 | this.creditPolicy = params.creditPolicy || defaultCreditPolicy 32 | } 33 | 34 | async start(): Promise { 35 | await Promise.all( 36 | this.partitions.map(async (p) => { 37 | const partitionConsumer = await this.locator.declareConsumer( 38 | { 39 | stream: p, 40 | consumerRef: this.consumerRef, 41 | offset: this.offset, 42 | singleActive: true, 43 | creditPolicy: this.creditPolicy, 44 | }, 45 | this.handle, 46 | this 47 | ) 48 | this.consumers.set(p, partitionConsumer) 49 | return 50 | }) 51 | ) 52 | } 53 | 54 | static async create( 55 | handle: ConsumerFunc, 56 | params: { 57 | superStream: string 58 | locator: Client 59 | partitions: string[] 60 | consumerRef: string 61 | offset: Offset 62 | creditPolicy?: ConsumerCreditPolicy 63 | } 64 | ): Promise { 65 | const superStreamConsumer = new SuperStreamConsumer(handle, params) 66 | await superStreamConsumer.start() 67 | return superStreamConsumer 68 | } 69 | 70 | async close(): Promise { 71 | await Promise.all([...this.consumers.values()].map((c) => c.close(true))) 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | export function removeFrom(l: T[], predicate: (x: T) => boolean): T | undefined { 2 | const i = l.findIndex(predicate) 3 | if (i === -1) return 4 | const [e] = l.splice(i, 1) 5 | return e 6 | } 7 | 8 | export function range(count: number): number[] { 9 | const ret = Array(count) 10 | for (let index = 0; index < count; index++) { 11 | ret[index] = index 12 | } 13 | return ret 14 | } 15 | 16 | export const DEFAULT_FRAME_MAX = 1048576 17 | export const DEFAULT_UNLIMITED_FRAME_MAX = 0 18 | export const REQUIRED_MANAGEMENT_VERSION = "3.13.0" 19 | export const getMaxSharedConnectionInstances = () => { 20 | return Math.max(+(process.env.MAX_SHARED_CLIENT_INSTANCES ?? 100), 256) 21 | } 22 | 23 | export const getAddressResolverFromEnv = (): { host: string; port: number } => { 24 | const envValue = process.env.RABBIT_MQ_TEST_ADDRESS_BALANCER ?? "localhost:5552" 25 | const [host, port] = envValue.split(":") 26 | return { host: host ?? "localhost", port: parseInt(port) ?? 5553 } 27 | } 28 | 29 | export const sample = (items: (T | undefined)[]): T | undefined => { 30 | const actualItems = items.filter((c) => !!c) 31 | if (!actualItems.length) { 32 | return undefined 33 | } 34 | const index = Math.floor(Math.random() * actualItems.length) 35 | return actualItems[index]! 36 | } 37 | 38 | export const bigIntMax = (n: bigint[]): bigint | undefined => { 39 | if (!n.length) return undefined 40 | return n.reduce((acc, i) => (i > acc ? i : acc), n[0]) 41 | } 42 | 43 | export const wait = async (ms: number) => { 44 | return new Promise((res) => { 45 | setTimeout(() => res(true), ms) 46 | }) 47 | } 48 | 49 | export const ResponseCode = { 50 | StreamDoesNotExist: 2, 51 | SubscriptionIdDoesNotExist: 4, 52 | } as const 53 | 54 | export const isString = (value: unknown): boolean => typeof value === "string" 55 | -------------------------------------------------------------------------------- /src/waiting_response.ts: -------------------------------------------------------------------------------- 1 | import { PromiseResolver } from "./promise_resolver" 2 | import { Response } from "./responses/response" 3 | 4 | export class WaitingResponse { 5 | constructor( 6 | private correlationId: number, 7 | private key: number, 8 | private promise: PromiseResolver 9 | ) {} 10 | 11 | waitingFor(response: Response): boolean { 12 | const correlationFound = this.correlationId === response.correlationId 13 | if (correlationFound && this.key !== response.key) { 14 | throw new Error( 15 | `Waiting response correlationId: ${this.correlationId} but key mismatch waiting: ${this.key} found ${response.key}` 16 | ) 17 | } 18 | return correlationFound 19 | } 20 | 21 | resolve(response: T) { 22 | this.promise.resolve(response) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /test/data/header_amqpvalue_message: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coders51/rabbitmq-stream-js-client/910b8c5490306982579a24da67b4b8477726d58a/test/data/header_amqpvalue_message -------------------------------------------------------------------------------- /test/e2e/address_resolver.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client, connect } from "../../src" 3 | import { Offset } from "../../src/requests/subscribe_request" 4 | import { getAddressResolverFromEnv } from "../../src/util" 5 | import { createStreamName } from "../support/fake_data" 6 | import { Rabbit } from "../support/rabbit" 7 | import { getTestNodesFromEnv, password, username, wait } from "../support/util" 8 | 9 | describe("address resolver", () => { 10 | let streamName: string 11 | const rabbit = new Rabbit(username, password) 12 | let client: Client 13 | 14 | beforeEach(async () => { 15 | const [firstNode] = getTestNodesFromEnv() 16 | const resolver = getAddressResolverFromEnv() 17 | client = await connect({ 18 | hostname: firstNode.host, 19 | port: firstNode.port, 20 | username, 21 | password, 22 | vhost: "/", 23 | frameMax: 0, 24 | heartbeat: 0, 25 | addressResolver: { enabled: true, endpoint: resolver }, 26 | }) 27 | streamName = createStreamName() 28 | await rabbit.createStream(streamName) 29 | // wait for replicas to be created 30 | await wait(200) 31 | }) 32 | 33 | afterEach(async () => { 34 | try { 35 | await client.close() 36 | await rabbit.deleteStream(streamName) 37 | await rabbit.closeAllConnections() 38 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 39 | } catch (e) {} 40 | }) 41 | 42 | it("declaring a consumer - should not throw", async () => { 43 | await client.declareConsumer({ stream: streamName, offset: Offset.first() }, () => { 44 | return 45 | }) 46 | }) 47 | 48 | it("declaring a consumer - if multiple nodes are present the consumer should be connected to a replica", async () => { 49 | const consumer = await client.declareConsumer({ stream: streamName, offset: Offset.first() }, () => { 50 | return 51 | }) 52 | 53 | const connectionInfo = consumer.getConnectionInfo() 54 | const queueInfo = await rabbit.getQueueInfo(streamName) 55 | const nodes = await rabbit.getNodes() 56 | if (nodes.length > 1) { 57 | expect(extractNodeName(queueInfo.node)).not.to.be.eql(connectionInfo.host) 58 | } 59 | }) 60 | 61 | it("declaring a publisher - should not throw", async () => { 62 | await client.declarePublisher({ stream: streamName }) 63 | }) 64 | 65 | it("declaring a publisher - the publisher should be connected to the leader", async () => { 66 | const publisher = await client.declarePublisher({ stream: streamName }) 67 | 68 | const connectionInfo = publisher.getConnectionInfo() 69 | const queueInfo = await rabbit.getQueueInfo(streamName) 70 | expect(extractNodeName(queueInfo.node)).to.be.eql(connectionInfo.host) 71 | }) 72 | }) 73 | 74 | const extractNodeName = (node: string): string => { 75 | const [_, name] = node.split("@") 76 | return name 77 | } 78 | -------------------------------------------------------------------------------- /test/e2e/close_publisher.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client, Publisher } from "../../src" 3 | import { Rabbit } from "../support/rabbit" 4 | import { password, username } from "../support/util" 5 | import { createClient, createPublisher } from "../support/fake_data" 6 | import { getMaxSharedConnectionInstances } from "../../src/util" 7 | 8 | describe("close publisher", () => { 9 | const rabbit = new Rabbit(username, password) 10 | const testStreamName = "test-stream" 11 | let client: Client 12 | const previousMaxSharedClientInstances = process.env.MAX_SHARED_CLIENT_INSTANCES 13 | 14 | before(() => { 15 | process.env.MAX_SHARED_CLIENT_INSTANCES = "10" 16 | }) 17 | 18 | after(() => { 19 | if (previousMaxSharedClientInstances !== undefined) { 20 | process.env.MAX_SHARED_CLIENT_INSTANCES = previousMaxSharedClientInstances 21 | return 22 | } 23 | delete process.env.MAX_SHARED_CLIENT_INSTANCES 24 | }) 25 | 26 | beforeEach(async () => { 27 | await rabbit.createStream(testStreamName) 28 | client = await createClient(username, password) 29 | }) 30 | 31 | afterEach(async () => { 32 | await client.close() 33 | await rabbit.deleteStream(testStreamName) 34 | }) 35 | 36 | it("closing a publisher", async () => { 37 | const publisher = await client.declarePublisher({ stream: testStreamName }) 38 | 39 | const response = await client.deletePublisher(publisher.extendedId) 40 | 41 | const publisherInfo = publisher.getConnectionInfo() 42 | expect(response).eql(true) 43 | 44 | expect(publisherInfo.writable).eql(false) 45 | }).timeout(5000) 46 | 47 | it("closing a publisher does not close the underlying connection if it is still in use", async () => { 48 | const publisher1 = await createPublisher(testStreamName, client) 49 | const publisher2 = await createPublisher(testStreamName, client) 50 | 51 | await client.deletePublisher(publisher1.extendedId) 52 | 53 | const publisher2Info = publisher2.getConnectionInfo() 54 | expect(publisher2Info.writable).eql(true) 55 | }) 56 | 57 | it("closing all publishers sharing a connection also close the connection", async () => { 58 | const publisher1 = await createPublisher(testStreamName, client) 59 | const publisher2 = await createPublisher(testStreamName, client) 60 | 61 | await client.deletePublisher(publisher1.extendedId) 62 | await client.deletePublisher(publisher2.extendedId) 63 | 64 | const publisher1Info = publisher1.getConnectionInfo() 65 | const publisher2Info = publisher2.getConnectionInfo() 66 | expect(publisher1Info.writable).eql(false) 67 | expect(publisher2Info.writable).eql(false) 68 | }) 69 | 70 | it("if publishers for the same stream have different underlying clients, then closing one client does not affect the others publishers", async () => { 71 | const publishersToCreate = getMaxSharedConnectionInstances() + 2 72 | const publishers = new Map() 73 | for (let i = 0; i < publishersToCreate; i++) { 74 | const publisher = await createPublisher(testStreamName, client) 75 | const { localPort } = publisher.getConnectionInfo() 76 | const key = localPort || -1 77 | const currentPublishers = publishers.get(key) || [] 78 | currentPublishers.push(publisher) 79 | publishers.set(key, currentPublishers) 80 | } 81 | const localPort = Array.from(publishers.keys()).at(0) 82 | const closingPublishersSubset = publishers.get(localPort!) || [] 83 | const otherPublishers: Publisher[] = [] 84 | for (const k of publishers.keys()) { 85 | if (k !== localPort) { 86 | otherPublishers.push(...(publishers.get(k) || [])) 87 | } 88 | } 89 | 90 | for (const p of closingPublishersSubset) { 91 | await client.deletePublisher(p.extendedId) 92 | } 93 | 94 | expect(localPort).not.undefined 95 | expect(closingPublishersSubset.length).gt(0) 96 | expect(otherPublishers.length).gt(0) 97 | expect(otherPublishers).satisfies((publisherArray: Publisher[]) => 98 | publisherArray.every((publisher) => { 99 | const { writable } = publisher.getConnectionInfo() 100 | return writable === true 101 | }) 102 | ) 103 | expect(closingPublishersSubset).satisfies((publisherArray: Publisher[]) => 104 | publisherArray.every((publisher) => { 105 | const { writable } = publisher.getConnectionInfo() 106 | return writable !== true 107 | }) 108 | ) 109 | }).timeout(5000) 110 | }) 111 | -------------------------------------------------------------------------------- /test/e2e/cluster_connection_management.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client } from "../../src" 3 | import { createClient, createConsumer, createPublisher, createStreamName } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { password, username } from "../support/util" 6 | 7 | describe("connection management for clusters (applicable even on a single node)", () => { 8 | const rabbit = new Rabbit(username, password) 9 | let client: Client 10 | let streamName: string 11 | 12 | beforeEach(async () => { 13 | client = await createClient(username, password) 14 | streamName = createStreamName() 15 | await rabbit.createStream(streamName) 16 | }) 17 | 18 | afterEach(async () => { 19 | try { 20 | await client.close() 21 | await rabbit.deleteStream(streamName) 22 | await rabbit.closeAllConnections() 23 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 24 | } catch (e) {} 25 | }) 26 | 27 | it("when we create a consumer, a new connection is opened", async () => { 28 | const clientConnectionInfo = client.getConnectionInfo() 29 | 30 | const consumer = await createConsumer(streamName, client) 31 | 32 | const consumerConnectionInfo = consumer.getConnectionInfo() 33 | expect(clientConnectionInfo.id).to.not.be.equal(consumerConnectionInfo.id) 34 | }).timeout(10000) 35 | 36 | it("when we create a publisher, a new connection is opened", async () => { 37 | const clientConnectionInfo = client.getConnectionInfo() 38 | 39 | const publisher = await createPublisher(streamName, client) 40 | 41 | const publisherConnectionInfo = publisher.getConnectionInfo() 42 | expect(clientConnectionInfo.id).to.not.be.equal(publisherConnectionInfo.id) 43 | }).timeout(10000) 44 | 45 | it("when we create a publisher, the connection should be done on the leader", async () => { 46 | const streamInfo = await rabbit.getQueue("%2F", streamName) 47 | const leader = streamInfo.node 48 | const [leaderHostName] = leader.split("@").slice(-1) 49 | 50 | const publisher = await createPublisher(streamName, client) 51 | 52 | const connectionInfo = publisher.getConnectionInfo() 53 | expect(connectionInfo.host).to.be.equal(leaderHostName) 54 | }).timeout(10000) 55 | 56 | it("closing the client closes all publisher and consumers - no connection is left hanging", async () => { 57 | await createConsumer(streamName, client) 58 | await createConsumer(streamName, client) 59 | await createPublisher(streamName, client) 60 | 61 | await client.close() 62 | 63 | expect(client.consumerCounts()).to.be.equal(0) 64 | expect(client.publisherCounts()).to.be.equal(0) 65 | }).timeout(10000) 66 | }) 67 | -------------------------------------------------------------------------------- /test/e2e/connect.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client, connect } from "../../src" 3 | import { createClient } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { eventually, username, password, getTestNodesFromEnv, expectToThrowAsync } from "../support/util" 6 | import { Version } from "../../src/versions" 7 | import { randomUUID } from "node:crypto" 8 | import { readFile } from "node:fs/promises" 9 | 10 | async function createTlsClient(): Promise { 11 | const [firstNode] = getTestNodesFromEnv() 12 | return connect({ 13 | hostname: firstNode.host, 14 | port: 5551, 15 | mechanism: "EXTERNAL", 16 | ssl: { 17 | ca: await readFile("./tls-gen/basic/result/ca_certificate.pem", "utf8"), 18 | cert: await readFile(`./tls-gen/basic/result/client_${firstNode.host}_certificate.pem`, "utf8"), 19 | key: await readFile(`./tls-gen/basic/result/client_${firstNode.host}_key.pem`, "utf8"), 20 | }, 21 | username: "", 22 | password: "", 23 | vhost: "/", 24 | }) 25 | } 26 | 27 | describe("connect", () => { 28 | let client: Client 29 | const rabbit = new Rabbit(username, password) 30 | 31 | afterEach(async () => { 32 | try { 33 | await client.close() 34 | } catch (e) {} 35 | 36 | try { 37 | await rabbit.closeAllConnections() 38 | } catch (e) {} 39 | }) 40 | 41 | it("using parameters", async () => { 42 | client = await createClient(username, password) 43 | 44 | await eventually(async () => { 45 | expect(await rabbit.getConnections()).lengthOf(1) 46 | }, 5000) 47 | }).timeout(10000) 48 | 49 | it("throw exception if vhost is not valid", async () => { 50 | const [firstNode] = getTestNodesFromEnv() 51 | 52 | await expectToThrowAsync( 53 | async () => { 54 | client = await connect({ 55 | hostname: firstNode.host, 56 | port: firstNode.port, 57 | username, 58 | password, 59 | vhost: "", 60 | frameMax: 0, 61 | heartbeat: 0, 62 | }) 63 | }, 64 | Error, 65 | `[ERROR]: VirtualHost '' is not valid` 66 | ) 67 | }).timeout(10000) 68 | 69 | it("using EXTERNAL auth", async () => { 70 | client = await createTlsClient() 71 | 72 | await eventually(async () => { 73 | expect(await rabbit.getConnections()).lengthOf(1) 74 | }, 5000) 75 | }).timeout(10000) 76 | 77 | it("declaring connection name", async () => { 78 | const connectionName = `connection-name-${randomUUID()}` 79 | client = await createClient(username, password, undefined, undefined, undefined, undefined, connectionName) 80 | 81 | await eventually(async () => { 82 | const connections = await rabbit.getConnections() 83 | expect(connections.length).eql(1) 84 | expect(connections[0].client_properties?.connection_name).eql(connectionName) 85 | }, 5000) 86 | }).timeout(10000) 87 | 88 | it("and receive server-side message version declarations during handshake", async () => { 89 | client = await createClient(username, password) 90 | 91 | await eventually(async () => { 92 | const serverVersions = client.serverVersions 93 | expect(serverVersions.length).gt(0) 94 | expect(serverVersions).satisfies((versions: Version[]) => versions.every((version) => version.minVersion >= 1)) 95 | }, 5000) 96 | }).timeout(10000) 97 | 98 | it("raise exception if server refuse port", async () => { 99 | createClient(username, password, undefined, undefined, undefined, 5550).catch((err) => { 100 | expect(err).to.not.be.null 101 | }) 102 | }).timeout(10000) 103 | }) 104 | -------------------------------------------------------------------------------- /test/e2e/connect_frame_size_negotiation.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { createClient } from "../support/fake_data" 3 | import { Rabbit } from "../support/rabbit" 4 | import { eventually, username, password } from "../support/util" 5 | 6 | describe("connect frame size negotiation", () => { 7 | const rabbit = new Rabbit(username, password) 8 | 9 | it("using 65536 as frameMax", async () => { 10 | const frameMax = 65536 11 | 12 | const client = await createClient(username, password, undefined, frameMax) 13 | 14 | await eventually(async () => { 15 | expect(client.maxFrameSize).lte(frameMax) 16 | expect(await rabbit.getConnections()).lengthOf(1) 17 | }, 5000) 18 | try { 19 | await client.close() 20 | await rabbit.closeAllConnections() 21 | } catch (e) {} 22 | }).timeout(10000) 23 | 24 | it("using 1024 as frameMax", async () => { 25 | const frameMax = 1024 26 | 27 | const client = await createClient(username, password, undefined, frameMax) 28 | 29 | await eventually(async () => { 30 | expect(client.maxFrameSize).lte(frameMax) 31 | expect(await rabbit.getConnections()).lengthOf(1) 32 | }, 5000) 33 | try { 34 | await client.close() 35 | await rabbit.closeAllConnections() 36 | } catch (e) {} 37 | }).timeout(10000) 38 | }) 39 | -------------------------------------------------------------------------------- /test/e2e/metadata_update.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client, Offset } from "../../src" 3 | import { createClient, createStreamName } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { eventually, password, username } from "../support/util" 6 | 7 | describe("react to a metadata update message from the server", () => { 8 | const rabbit = new Rabbit(username, password) 9 | let client: Client 10 | let streamName: string 11 | 12 | beforeEach(async () => { 13 | client = await createClient(username, password) 14 | streamName = createStreamName() 15 | await rabbit.createStream(streamName) 16 | }) 17 | 18 | afterEach(async function () { 19 | try { 20 | await client.close() 21 | await rabbit.deleteStream(streamName) 22 | await rabbit.closeAllConnections() 23 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 24 | } catch (e) { 25 | console.error("Error while trying to clean up Rabbit's state after testing", e) 26 | } 27 | }) 28 | 29 | it("when we have a metadata update on a stream any consumer on that stream gets removed from the consumers list", async () => { 30 | await client.declareConsumer({ offset: Offset.first(), stream: streamName }, () => { 31 | return 32 | }) 33 | 34 | await rabbit.deleteStream(streamName) 35 | 36 | await eventually(() => { 37 | expect(client.consumerCounts()).to.eql(0) 38 | }, 3000) 39 | }) 40 | 41 | it("when we have a metadata update on a stream the connection closed callback of its consumers fires", async () => { 42 | let cbCalled = 0 43 | await client.declareConsumer( 44 | { offset: Offset.first(), stream: streamName, connectionClosedListener: (_) => cbCalled++ }, 45 | () => { 46 | return 47 | } 48 | ) 49 | 50 | await rabbit.deleteStream(streamName) 51 | 52 | await eventually(() => { 53 | expect(client.consumerCounts()).to.eql(0) 54 | expect(cbCalled).to.eql(1) 55 | }, 3000) 56 | }).timeout(5000) 57 | 58 | it("when we have a metadata update on a stream any publisher on that stream gets closed", async () => { 59 | const publisher = await client.declarePublisher({ stream: streamName }) 60 | 61 | await rabbit.deleteStream(streamName) 62 | 63 | await eventually(() => { 64 | expect(client.publisherCounts()).to.eql(0) 65 | expect(publisher.closed).to.eql(true) 66 | }, 3000) 67 | }) 68 | 69 | it("when we have a metadata update on a stream the connection closed callback of its publishers fires", async () => { 70 | let cbCalled = 0 71 | await client.declarePublisher({ stream: streamName, connectionClosedListener: (_) => cbCalled++ }) 72 | 73 | await rabbit.deleteStream(streamName) 74 | 75 | await eventually(() => { 76 | expect(cbCalled).to.eql(1) 77 | }, 3000) 78 | }) 79 | }) 80 | -------------------------------------------------------------------------------- /test/e2e/partitions_query.test.ts: -------------------------------------------------------------------------------- 1 | import { Client } from "../../src" 2 | import { createClient } from "../support/fake_data" 3 | import { expect } from "chai" 4 | import { username, password } from "../support/util" 5 | import { Rabbit } from "../support/rabbit" 6 | 7 | describe("PartitionsQuery command", () => { 8 | let client: Client 9 | const superStream = "super-stream-test" 10 | const rabbit = new Rabbit(username, password) 11 | 12 | beforeEach(async () => { 13 | client = await createClient(username, password) 14 | }) 15 | 16 | afterEach(async () => { 17 | await client.close() 18 | await rabbit.deleteSuperStream(superStream) 19 | }) 20 | 21 | it("returns a list of stream names", async () => { 22 | await rabbit.createSuperStream(superStream) 23 | 24 | const route = await client.partitionsQuery({ superStream: superStream }) 25 | 26 | expect(route).contains("super-stream-test-0") 27 | }).timeout(10000) 28 | }) 29 | -------------------------------------------------------------------------------- /test/e2e/publish_confirm.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { randomUUID } from "crypto" 3 | import { Client } from "../../src" 4 | import { Rabbit } from "../support/rabbit" 5 | import { eventually, password, username } from "../support/util" 6 | import { createClient } from "../support/fake_data" 7 | 8 | describe("publish a message and get confirmation", () => { 9 | const rabbit = new Rabbit(username, password) 10 | let client: Client 11 | let stream: string 12 | const publishResponses: { error: number | null; ids: bigint[] }[] = [] 13 | const publisherRef = "my publisher" 14 | 15 | beforeEach(async () => { 16 | client = await createClient(username, password) 17 | stream = `my-stream-${randomUUID()}` 18 | await rabbit.createStream(stream) 19 | publishResponses.splice(0) 20 | }) 21 | afterEach(async () => await client.close()) 22 | afterEach(() => rabbit.closeAllConnections()) 23 | 24 | it("after the server replies with a confirm, the confirm callback is invoked", async () => { 25 | const publisher = await client.declarePublisher({ stream, publisherRef }) 26 | const publishingId = 1n 27 | publisher.on("publish_confirm", (error, ids) => publishResponses.push({ error, ids })) 28 | 29 | await publisher.basicSend(publishingId, Buffer.from(`test${randomUUID()}`)) 30 | 31 | await eventually(async () => expect((await rabbit.getQueueInfo(stream)).messages).eql(1), 10000) 32 | expect(publishResponses).eql([{ error: null, ids: [publishingId] }]) 33 | }).timeout(10000) 34 | 35 | it("after the server replies with a confirm, the confirm callback is invoked with the publishingId as an argument", async () => { 36 | const publisher = await client.declarePublisher({ stream, publisherRef }) 37 | publisher.on("publish_confirm", (error, ids) => publishResponses.push({ error, ids })) 38 | 39 | await publisher.send(Buffer.from(`test${randomUUID()}`)) 40 | 41 | await eventually(async () => expect((await rabbit.getQueueInfo(stream)).messages).eql(1), 10000) 42 | const lastPublishingId = await publisher.getLastPublishingId() 43 | expect(publishResponses).eql([{ error: null, ids: [lastPublishingId] }]) 44 | }).timeout(12000) 45 | }) 46 | -------------------------------------------------------------------------------- /test/e2e/query_metadata.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client } from "../../src" 3 | import { createClient, createStreamName } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { getTestNodesFromEnv, password, username } from "../support/util" 6 | 7 | describe("query metadata", () => { 8 | let streamName: string 9 | let nonExistingStreamName: string 10 | const rabbit = new Rabbit(username, password) 11 | let client: Client 12 | const nodes = getTestNodesFromEnv() 13 | beforeEach(async () => { 14 | client = await createClient(username, password) 15 | streamName = createStreamName() 16 | nonExistingStreamName = createStreamName() 17 | await rabbit.createStream(streamName) 18 | }) 19 | 20 | afterEach(async () => { 21 | try { 22 | await client.close() 23 | await rabbit.deleteStream(streamName) 24 | await rabbit.closeAllConnections() 25 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 26 | } catch (e) { 27 | console.error("Error on metadata query test teardown", e) 28 | } 29 | }) 30 | 31 | it("query the metadata - the response gets parsed correctly and no exception is thrown", async () => { 32 | await client.queryMetadata({ streams: [streamName] }) 33 | }) 34 | 35 | it("query the metadata - the server should return streamMetaData", async () => { 36 | const [streamInfo] = await client.queryMetadata({ streams: [streamName] }) 37 | 38 | expect(streamInfo).to.exist 39 | expect(streamInfo.streamName).to.eql(streamName) 40 | }) 41 | 42 | it("query the metadata - on a non-existing stream the leader or replicas should not be defined", async () => { 43 | const [streamInfo] = await client.queryMetadata({ streams: [nonExistingStreamName] }) 44 | 45 | expect(streamInfo.streamName).to.eql(nonExistingStreamName) 46 | expect(streamInfo.leader).not.to.exist 47 | expect(streamInfo.replicas).to.have.lengthOf(0) 48 | }) 49 | 50 | it("querying the metadata - on an existing stream on a single node", async () => { 51 | const [streamInfo] = await client.queryMetadata({ streams: [streamName] }) 52 | 53 | expect(streamInfo.streamName).to.eql(streamName) 54 | expect(streamInfo.responseCode).to.eql(1) 55 | expect({ host: streamInfo.leader?.host, port: streamInfo.leader?.port }).to.be.deep.oneOf(nodes) 56 | }) 57 | 58 | it("querying the metadata - query for multiple streams", async () => { 59 | const secondStreamName = createStreamName() 60 | await rabbit.createStream(secondStreamName) 61 | 62 | const res = await client.queryMetadata({ streams: [streamName, secondStreamName] }) 63 | await rabbit.deleteStream(secondStreamName) 64 | 65 | const firstStreamInfo = res.find((i) => i.streamName === streamName) 66 | const secondStreamInfo = res.find((i) => i.streamName === secondStreamName) 67 | expect(firstStreamInfo).to.exist 68 | expect(firstStreamInfo!.streamName).to.eql(streamName) 69 | expect(firstStreamInfo!.responseCode).to.eql(1) 70 | expect({ host: firstStreamInfo!.leader?.host, port: firstStreamInfo!.leader?.port }).to.be.deep.oneOf(nodes) 71 | expect(secondStreamInfo).to.exist 72 | expect(secondStreamInfo!.streamName).to.eql(secondStreamName) 73 | expect(secondStreamInfo!.responseCode).to.eql(1) 74 | expect({ host: secondStreamInfo!.leader?.host, port: secondStreamInfo!.leader?.port }).to.be.deep.oneOf(nodes) 75 | }) 76 | }) 77 | -------------------------------------------------------------------------------- /test/e2e/query_publisher_sequence.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { randomUUID } from "crypto" 3 | import { Client } from "../../src" 4 | import { createClient, createStreamName } from "../support/fake_data" 5 | import { Rabbit } from "../support/rabbit" 6 | import { username, password } from "../support/util" 7 | 8 | describe("query publisher sequence", () => { 9 | let streamName: string 10 | let client: Client 11 | let publisherRef: string 12 | const rabbit = new Rabbit(username, password) 13 | 14 | beforeEach(async () => { 15 | client = await createClient(username, password) 16 | streamName = createStreamName() 17 | publisherRef = randomUUID() 18 | await rabbit.createStream(streamName) 19 | }) 20 | 21 | afterEach(async () => { 22 | try { 23 | await client.close() 24 | await rabbit.deleteStream(streamName) 25 | await rabbit.closeAllConnections() 26 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 27 | } catch (e) {} 28 | }) 29 | 30 | it("asking for the last sequence read from a publisher returns the last sequence id", async () => { 31 | const publisher = await client.declarePublisher({ stream: streamName, publisherRef }) 32 | await publisher.basicSend(1n, Buffer.from(`test${randomUUID()}`)) 33 | await publisher.basicSend(2n, Buffer.from(`test${randomUUID()}`)) 34 | await publisher.basicSend(3n, Buffer.from(`test${randomUUID()}`)) 35 | await publisher.basicSend(4n, Buffer.from(`test${randomUUID()}`)) 36 | await publisher.flush() 37 | 38 | const lastPublishingId = await publisher.getLastPublishingId() 39 | 40 | expect(lastPublishingId).to.be.equal(4n) 41 | }).timeout(10000) 42 | 43 | it("asking for the last sequence read from a publisher whose never sent any message should return 0", async () => { 44 | const publisher = await client.declarePublisher({ stream: streamName, publisherRef }) 45 | 46 | const lastPublishingId = await publisher.getLastPublishingId() 47 | 48 | expect(lastPublishingId).to.be.equal(0n) 49 | await client.close() 50 | }).timeout(10000) 51 | }) 52 | -------------------------------------------------------------------------------- /test/e2e/route_query.test.ts: -------------------------------------------------------------------------------- 1 | import { Client } from "../../src" 2 | import { createClient } from "../support/fake_data" 3 | import { expect } from "chai" 4 | import { username, password, expectToThrowAsync } from "../support/util" 5 | import { randomUUID } from "crypto" 6 | import { Rabbit } from "../support/rabbit" 7 | 8 | describe("RouteQuery command", () => { 9 | let client: Client 10 | const rabbit = new Rabbit(username, password) 11 | const superStream = `super-stream-test` 12 | 13 | beforeEach(async () => { 14 | client = await createClient(username, password) 15 | }) 16 | 17 | afterEach(async () => { 18 | await client.close() 19 | await rabbit.deleteSuperStream(superStream) 20 | }) 21 | 22 | it("returns a list of stream names", async () => { 23 | await rabbit.createSuperStream(superStream) 24 | 25 | const route = await client.routeQuery({ routingKey: "0", superStream: superStream }) 26 | 27 | expect(route).contains(`${superStream}-0`) 28 | }).timeout(10000) 29 | 30 | it("throws when the super stream does not exist", async () => { 31 | const nonExistingStream = randomUUID() 32 | 33 | await expectToThrowAsync(() => client.routeQuery({ routingKey: "0", superStream: nonExistingStream }), Error) 34 | }) 35 | 36 | it("throws when the stream is not a super stream", async () => { 37 | const streamName = randomUUID() 38 | await rabbit.createStream(streamName) 39 | 40 | try { 41 | await expectToThrowAsync(() => client.routeQuery({ routingKey: "0", superStream: streamName }), Error) 42 | } finally { 43 | await rabbit.deleteStream(streamName) 44 | } 45 | }) 46 | }) 47 | -------------------------------------------------------------------------------- /test/e2e/shared_consumer_clients.test.ts: -------------------------------------------------------------------------------- 1 | import { expect, spy } from "chai" 2 | import { Client, Offset, Publisher } from "../../src" 3 | import { createClient, createStreamName } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { eventually, password, username } from "../support/util" 6 | import { Message } from "../../src/publisher" 7 | import { randomUUID } from "crypto" 8 | 9 | describe("consume messages through multiple consumers", () => { 10 | const rabbit = new Rabbit(username, password) 11 | const testStreamName = createStreamName() 12 | let client: Client 13 | let publisher: Publisher 14 | 15 | beforeEach(async () => { 16 | await rabbit.createStream(testStreamName) 17 | client = await createClient(username, password) 18 | publisher = await client.declarePublisher({ stream: testStreamName }) 19 | }) 20 | 21 | afterEach(async () => { 22 | await client.close() 23 | await rabbit.deleteStream(testStreamName) 24 | }) 25 | 26 | it("single publisher, all consumer callbacks are called", async () => { 27 | const howManyConsumers = 3 28 | const howManyPublished = 10 29 | const spiedCallbacks: ChaiSpies.SpyFunc1Proxy[] = [] 30 | for (let i = 0; i < howManyConsumers; i++) { 31 | const cb = (_msg: Message) => { 32 | return 33 | } 34 | const spied = spy(cb) 35 | 36 | await client.declareConsumer({ stream: testStreamName, offset: Offset.first() }, spied) 37 | spiedCallbacks.push(spied) 38 | } 39 | 40 | for (let index = 0; index < howManyPublished; index++) { 41 | await publisher.send(Buffer.from(`test${randomUUID()}`)) 42 | } 43 | 44 | await eventually(async () => { 45 | spiedCallbacks.forEach((cb) => { 46 | expect(cb).to.have.been.called.exactly(howManyPublished) 47 | }) 48 | }, 5000) 49 | }).timeout(6000) 50 | 51 | it("multiple publishers, all consumer callbacks are called", async () => { 52 | const howManyConsumers = 3 53 | const howManyPublished = 10 54 | const publishers = [ 55 | publisher, 56 | await client.declarePublisher({ stream: testStreamName }), 57 | await client.declarePublisher({ stream: testStreamName }), 58 | ] 59 | const spiedCallbacks: ChaiSpies.SpyFunc1Proxy[] = [] 60 | for (let i = 0; i < howManyConsumers; i++) { 61 | const cb = (_msg: Message) => { 62 | return 63 | } 64 | const spied = spy(cb) 65 | 66 | await client.declareConsumer({ stream: testStreamName, offset: Offset.first() }, spied) 67 | spiedCallbacks.push(spied) 68 | } 69 | 70 | for (let index = 0; index < howManyPublished; index++) { 71 | for (const p of publishers) { 72 | await p.send(Buffer.from(`test${randomUUID()}`)) 73 | } 74 | } 75 | 76 | await eventually(async () => { 77 | spiedCallbacks.forEach((cb) => { 78 | expect(cb).to.have.been.called.exactly(howManyPublished * publishers.length) 79 | }) 80 | }, 5000) 81 | }).timeout(6000) 82 | }) 83 | -------------------------------------------------------------------------------- /test/e2e/shared_publisher_clients.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { randomUUID } from "crypto" 3 | import { Client, Offset } from "../../src" 4 | import { Publisher } from "../../src/publisher" 5 | import { createClient, createPublisher, createStreamName } from "../support/fake_data" 6 | import { Rabbit } from "../support/rabbit" 7 | import { eventually, username, password } from "../support/util" 8 | 9 | describe("publish messages through multiple publishers", () => { 10 | const rabbit = new Rabbit(username, password) 11 | let client: Client 12 | let streamName: string 13 | let publisher: Publisher 14 | 15 | beforeEach(async () => { 16 | client = await createClient(username, password) 17 | streamName = createStreamName() 18 | await rabbit.createStream(streamName) 19 | publisher = await createPublisher(streamName, client) 20 | }) 21 | 22 | afterEach(async () => { 23 | try { 24 | await client.close() 25 | await rabbit.deleteStream(streamName) 26 | await rabbit.closeAllConnections() 27 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 28 | } catch (e) {} 29 | }) 30 | 31 | it("multiple publishers working on the same stream (no message is lost)", async () => { 32 | const publishers = [publisher, await client.declarePublisher({ stream: streamName })] 33 | const localPort = publisher.getConnectionInfo().localPort 34 | const howMany = 50 35 | 36 | for (let index = 0; index < howMany; index++) { 37 | for (const p of publishers) { 38 | await p.send(Buffer.from(`test${randomUUID()}`)) 39 | } 40 | } 41 | 42 | expect(localPort).not.undefined 43 | expect(publishers).satisfies((plist: Publisher[]) => 44 | plist.every((p) => { 45 | const connInfo = p.getConnectionInfo() 46 | return connInfo.localPort === localPort && connInfo.writable === true 47 | }) 48 | ) 49 | await eventually( 50 | async () => expect((await rabbit.getQueueInfo(streamName)).messages).eql(howMany * publishers.length), 51 | 10000 52 | ) 53 | }).timeout(15000) 54 | 55 | it("multiple publishers working on the same stream (the order is enforced when using flush)", async () => { 56 | const publishers = [publisher, await client.declarePublisher({ stream: streamName })] 57 | const howMany = 10 58 | const received: string[] = [] 59 | for (let index = 0; index < howMany; index++) { 60 | for (const k of publishers.keys()) { 61 | const p = publishers[k] 62 | await p.send(Buffer.from(`${k};${index}`)) 63 | await p.flush() 64 | } 65 | } 66 | 67 | await client.declareConsumer({ stream: streamName, offset: Offset.first() }, (msg) => { 68 | received.push(msg.content.toString("utf-8")) 69 | }) 70 | 71 | await eventually(async () => expect(received.length).eql(howMany * publishers.length), 10000) 72 | expect(received).satisfies((msgs: string[]) => { 73 | let check = true 74 | for (const receivedMsgKey of msgs.keys()) { 75 | const publisherKey = receivedMsgKey % publishers.length 76 | const msgKey = Math.floor(receivedMsgKey / publishers.length) 77 | const msg = msgs[receivedMsgKey] 78 | check = check && msg === `${publisherKey};${msgKey}` 79 | } 80 | return check 81 | }) 82 | }).timeout(15000) 83 | }) 84 | -------------------------------------------------------------------------------- /test/e2e/stream_cache.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import got from "got" 3 | import { Client } from "../../src" 4 | import { createClient, createStreamName } from "../support/fake_data" 5 | import { Rabbit, RabbitConnectionResponse } from "../support/rabbit" 6 | import { getTestNodesFromEnv, password, username } from "../support/util" 7 | 8 | async function createVhost(vhost: string): Promise { 9 | const uriVhost = encodeURIComponent(vhost) 10 | const port = process.env.RABBIT_MQ_MANAGEMENT_PORT || 15672 11 | const firstNode = getTestNodesFromEnv().shift()! 12 | await got.put(`http://${firstNode.host}:${port}/api/vhosts/${uriVhost}`, { 13 | username: username, 14 | password: password, 15 | }) 16 | await got 17 | .put(`http://${firstNode.host}:${port}/api/permissions/${uriVhost}/${username}`, { 18 | json: { 19 | read: ".*", 20 | write: ".*", 21 | configure: ".*", 22 | }, 23 | username: username, 24 | password: password, 25 | }) 26 | .json() 27 | } 28 | 29 | async function deleteVhost(vhost: string): Promise { 30 | const uriVhost = encodeURIComponent(vhost) 31 | const port = process.env.RABBIT_MQ_MANAGEMENT_PORT || 15672 32 | const firstNode = getTestNodesFromEnv().shift()! 33 | const r = await got.delete(`http://${firstNode.host}:${port}/api/vhosts/${uriVhost}`, { 34 | username: username, 35 | password: password, 36 | }) 37 | 38 | return r.body 39 | } 40 | 41 | describe("cache", () => { 42 | const vhost1 = "vhost1" 43 | let streamName: string 44 | const rabbit = new Rabbit(username, password) 45 | let client: Client 46 | let client2: Client 47 | before(async () => { 48 | await createVhost(vhost1) 49 | }) 50 | beforeEach(async () => { 51 | client = await createClient(username, password) 52 | client2 = await createClient(username, password, undefined, undefined, undefined, undefined, undefined, vhost1) 53 | streamName = createStreamName() 54 | await client.createStream({ stream: streamName }) 55 | await client2.createStream({ stream: streamName }) 56 | }) 57 | afterEach(async () => { 58 | try { 59 | await client.close() 60 | await client2.close() 61 | await deleteVhost(vhost1) 62 | await rabbit.deleteStream(streamName) 63 | await rabbit.closeAllConnections() 64 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 65 | } catch (_e) {} 66 | }) 67 | 68 | it("should cache using the vhost as well as the stream name", async () => { 69 | const publisher1 = await client.declarePublisher({ 70 | stream: streamName, 71 | }) 72 | expect(publisher1.getConnectionInfo().vhost).eql("/") 73 | const publisher2 = await client2.declarePublisher({ 74 | stream: streamName, 75 | }) 76 | expect(publisher2.getConnectionInfo().vhost).eql(vhost1) 77 | }) 78 | }) 79 | -------------------------------------------------------------------------------- /test/e2e/sub_entry_publish.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client, Publisher } from "../../src" 3 | import { createClient, createPublisher, createStreamName } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { eventually, username, password } from "../support/util" 6 | import { CompressionType } from "../../src/compression" 7 | 8 | describe("publish a batch of messages", () => { 9 | const rabbit = new Rabbit(username, password) 10 | let client: Client 11 | let streamName: string 12 | let publisher: Publisher 13 | 14 | beforeEach(async () => { 15 | client = await createClient(username, password) 16 | streamName = createStreamName() 17 | await rabbit.createStream(streamName) 18 | publisher = await createPublisher(streamName, client) 19 | }) 20 | 21 | afterEach(async () => { 22 | try { 23 | await client.close() 24 | await rabbit.deleteStream(streamName) 25 | await rabbit.closeAllConnections() 26 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 27 | } catch (e) {} 28 | }) 29 | 30 | it("publish a batch of messages - without compression", async () => { 31 | const messages = [ 32 | { content: Buffer.from("Ciao") }, 33 | { content: Buffer.from("Ciao1") }, 34 | { content: Buffer.from("Ciao2") }, 35 | { content: Buffer.from("Ciao3") }, 36 | ] 37 | 38 | await publisher.sendSubEntries(messages) 39 | 40 | await eventually(async () => { 41 | const info = await rabbit.getQueueInfo(streamName) 42 | expect(info.messages).eql(messages.length) 43 | }, 10000) 44 | }).timeout(10000) 45 | 46 | it("publish a batch of messages with compression", async () => { 47 | const messages = [ 48 | { content: Buffer.from("Ciao") }, 49 | { content: Buffer.from("Ciao1") }, 50 | { content: Buffer.from("Ciao2") }, 51 | { content: Buffer.from("Ciao3") }, 52 | ] 53 | 54 | await publisher.sendSubEntries(messages, CompressionType.Gzip) 55 | 56 | await eventually(async () => { 57 | const info = await rabbit.getQueueInfo(streamName) 58 | expect(info.messages).eql(messages.length) 59 | }, 10000) 60 | }).timeout(10000) 61 | }) 62 | -------------------------------------------------------------------------------- /test/e2e/subscribe.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client } from "../../src" 3 | import { Offset } from "../../src/requests/subscribe_request" 4 | import { createClient, createStreamName } from "../support/fake_data" 5 | import { Rabbit } from "../support/rabbit" 6 | import { eventually, password, username } from "../support/util" 7 | 8 | describe("subscribe", () => { 9 | const rabbit = new Rabbit(username, password) 10 | let streamName: string 11 | let client: Client 12 | 13 | beforeEach(async () => { 14 | client = await createClient(username, password) 15 | streamName = createStreamName() 16 | await rabbit.createStream(streamName) 17 | }) 18 | 19 | afterEach(async () => { 20 | try { 21 | await client.close() 22 | await rabbit.deleteStream(streamName) 23 | await rabbit.closeAllConnections() 24 | await rabbit.deleteAllQueues({ match: /my-stream-/ }) 25 | } catch (e) {} 26 | }) 27 | 28 | it("subscribe to next message", async () => { 29 | await eventually(async () => { 30 | const res = await client.subscribe({ 31 | subscriptionId: 1, 32 | stream: streamName, 33 | offset: Offset.next(), 34 | credit: 0, 35 | }) 36 | 37 | expect(res.ok).eql(true) 38 | }, 5000) 39 | }).timeout(10000) 40 | 41 | it("subscribe to first message", async () => { 42 | await eventually(async () => { 43 | const res = await client.subscribe({ 44 | subscriptionId: 2, 45 | stream: streamName, 46 | offset: Offset.first(), 47 | credit: 0, 48 | }) 49 | 50 | expect(res.ok).eql(true) 51 | }, 5000) 52 | }).timeout(10000) 53 | 54 | it("subscribe to last message", async () => { 55 | await eventually(async () => { 56 | const res = await client.subscribe({ 57 | subscriptionId: 3, 58 | stream: streamName, 59 | offset: Offset.last(), 60 | credit: 0, 61 | }) 62 | 63 | expect(res.ok).eql(true) 64 | }, 5000) 65 | }).timeout(10000) 66 | 67 | it("subscribe to offset message", async () => { 68 | await eventually(async () => { 69 | const res = await client.subscribe({ 70 | subscriptionId: 4, 71 | stream: streamName, 72 | offset: Offset.offset(BigInt(1)), 73 | credit: 0, 74 | }) 75 | 76 | expect(res.ok).eql(true) 77 | }, 5000) 78 | }).timeout(10000) 79 | 80 | it("subscribe to date message", async () => { 81 | await eventually(async () => { 82 | const res = await client.subscribe({ 83 | subscriptionId: 5, 84 | stream: streamName, 85 | offset: Offset.timestamp(new Date()), 86 | credit: 0, 87 | }) 88 | 89 | expect(res.ok).eql(true) 90 | }, 5000) 91 | }).timeout(10000) 92 | }) 93 | -------------------------------------------------------------------------------- /test/index.ts: -------------------------------------------------------------------------------- 1 | process.env.NODE_ENV = "test" 2 | -------------------------------------------------------------------------------- /test/setup.ts: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coders51/rabbitmq-stream-js-client/910b8c5490306982579a24da67b4b8477726d58a/test/setup.ts -------------------------------------------------------------------------------- /test/support/fake_data.ts: -------------------------------------------------------------------------------- 1 | import { randomUUID } from "crypto" 2 | import { Client, ClientListenersParams, connect } from "../../src/client" 3 | import { MessageProperties } from "../../src/publisher" 4 | import { BufferSizeSettings } from "../../src/requests/request" 5 | import { Offset } from "../../src/requests/subscribe_request" 6 | import { Consumer, Publisher } from "../../src" 7 | import { getTestNodesFromEnv } from "./util" 8 | import { createLogger, format, transports } from "winston" 9 | import { inspect } from "util" 10 | import { connect as amqpConnect, Connection } from "amqplib" 11 | 12 | export function createProperties(): MessageProperties { 13 | return { 14 | contentType: `contentType`, 15 | contentEncoding: `contentEncoding`, 16 | replyTo: `replyTo`, 17 | to: `to`, 18 | subject: `subject`, 19 | correlationId: `correlationIdAAA`, 20 | messageId: `messageId`, 21 | userId: Buffer.from(`userId`), 22 | absoluteExpiryTime: new Date(), 23 | creationTime: new Date(), 24 | groupId: `groupId`, 25 | groupSequence: 666, 26 | replyToGroupId: `replyToGroupId`, 27 | } 28 | } 29 | 30 | export function createStreamName(): string { 31 | return `my-stream-${randomUUID()}` 32 | } 33 | 34 | export function createConsumerRef(): string { 35 | return `my-consumer-${randomUUID()}` 36 | } 37 | 38 | export async function createPublisher( 39 | streamName: string, 40 | client: Client, 41 | deduplication: Boolean = false 42 | ): Promise { 43 | const publisher = await client.declarePublisher({ 44 | stream: streamName, 45 | ...(deduplication && { publisherRef: `my-publisher-${randomUUID()}` }), 46 | }) 47 | return publisher 48 | } 49 | 50 | export async function createConsumer(streamName: string, client: Client): Promise { 51 | const id = randomUUID() 52 | const consumer = await client.declareConsumer( 53 | { stream: streamName, offset: Offset.first(), consumerRef: `my-consumer-${id}` }, 54 | async () => { 55 | console.log(`Test consumer with id ${id} received a message`) 56 | } 57 | ) 58 | return consumer 59 | } 60 | 61 | export async function createClient( 62 | username: string, 63 | password: string, 64 | listeners?: ClientListenersParams, 65 | frameMax?: number, 66 | bufferSizeSettings?: BufferSizeSettings, 67 | port?: number, 68 | connectionName?: string, 69 | vhost?: string 70 | ): Promise { 71 | const [firstNode] = getTestNodesFromEnv() 72 | return connect( 73 | { 74 | hostname: firstNode.host, 75 | port: port ?? firstNode.port, 76 | username, 77 | password, 78 | vhost: vhost ?? "/", 79 | frameMax: frameMax ?? 0, 80 | heartbeat: 0, 81 | listeners: listeners, 82 | bufferSizeSettings: bufferSizeSettings, 83 | connectionName: connectionName, 84 | } 85 | // testLogger 86 | ) 87 | } 88 | 89 | export const testLogger = createLogger({ 90 | level: "debug", 91 | format: format.combine( 92 | format.colorize(), 93 | format.timestamp(), 94 | format.align(), 95 | format.splat(), 96 | format.label(), 97 | format.printf((info) => `${info.timestamp} ${info.level}: ${info.message} ${info.meta ? inspect(info.meta) : ""}`) 98 | ), 99 | transports: new transports.Console(), 100 | }) 101 | 102 | export async function createAmqpClient(username: string, password: string): Promise { 103 | const [firstNode] = getTestNodesFromEnv() 104 | return await amqpConnect(`amqp://${username}:${password}@${firstNode.host}:5672/`) 105 | } 106 | -------------------------------------------------------------------------------- /test/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "include": ["./**/*"] 4 | } 5 | -------------------------------------------------------------------------------- /test/unit/buffer_data_writer.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { DEFAULT_FRAME_MAX, DEFAULT_UNLIMITED_FRAME_MAX } from "../../src/util" 3 | import { BufferDataWriter } from "../../src/requests/buffer_data_writer" 4 | describe("Buffer Data Writer functionalities", () => { 5 | const bufferMaxSize = 1024 6 | const bufferInitialSize = 1 7 | const stringPayload = "a long string that requires the buffer to grow" 8 | 9 | it("allocate a functioning buffer data writer", () => { 10 | const bufferSizeParams = { maxSize: bufferMaxSize } 11 | const b = new BufferDataWriter(Buffer.alloc(bufferInitialSize), 0, bufferSizeParams) 12 | b.writeByte(1) 13 | 14 | const result = b.toBuffer() 15 | 16 | expect(result).eql(Buffer.from([1])) 17 | }) 18 | 19 | it("grow the buffer when needed", () => { 20 | const bufferSizeParams = { maxSize: bufferMaxSize } 21 | const b = new BufferDataWriter(Buffer.alloc(bufferInitialSize), 0, bufferSizeParams) 22 | 23 | b.writeString(stringPayload) 24 | 25 | const result = b.toBuffer() 26 | const header = result.subarray(0, 2) 27 | const pl = result.subarray(2) 28 | expect(header).eql(Buffer.from([0, 46])) 29 | expect(pl.length).eql(46) 30 | expect(pl.toString()).eql(stringPayload) 31 | }) 32 | 33 | it("the buffer max size is a hard limit", () => { 34 | const maxSize = 32 35 | const bufferSizeParams = { maxSize: maxSize } 36 | const b = new BufferDataWriter(Buffer.alloc(bufferInitialSize), 0, bufferSizeParams) 37 | 38 | b.writeString(stringPayload) 39 | 40 | const result = b.toBuffer() 41 | const pl = result.subarray(2) 42 | expect(pl.toString()).eql("a long string that requires th") 43 | }) 44 | 45 | it("when maxSize === DEFAULT_UNLIMITED_FRAME_MAX, the buffer can grow", () => { 46 | const bufferSizeParams = { maxSize: DEFAULT_UNLIMITED_FRAME_MAX } 47 | const b = new BufferDataWriter(Buffer.alloc(bufferInitialSize), 0, bufferSizeParams) 48 | const payload = Buffer.from( 49 | Array.from(Array(DEFAULT_FRAME_MAX + 1).keys()) 50 | .map((_k) => "") 51 | .join(",") 52 | ) 53 | 54 | b.writeData(payload) 55 | 56 | const result = b.toBuffer() 57 | expect(result).eql(payload) 58 | }) 59 | }) 60 | -------------------------------------------------------------------------------- /test/unit/create_stream.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { randomUUID } from "crypto" 3 | import { Client } from "../../src" 4 | import { createClient } from "../support/fake_data" 5 | import { Rabbit } from "../support/rabbit" 6 | import { expectToThrowAsync, password, username } from "../support/util" 7 | 8 | describe("Stream", () => { 9 | const rabbit = new Rabbit(username, password) 10 | const streamName = `test-stream-${randomUUID()}` 11 | const payload = { 12 | "queue-leader-locator": "random" as const, 13 | "max-age": "120s", 14 | "stream-max-segment-size-bytes": 1000, 15 | "initial-cluster-size": 5, 16 | "max-length-bytes": 20000, 17 | } 18 | let client: Client 19 | 20 | beforeEach(async () => { 21 | client = await createClient(username, password) 22 | }) 23 | 24 | afterEach(async () => { 25 | try { 26 | await rabbit.deleteQueue("%2F", streamName) 27 | } catch (error) {} 28 | }) 29 | afterEach(async () => { 30 | try { 31 | await client.close() 32 | } catch (error) {} 33 | }) 34 | 35 | after(() => rabbit.closeAllConnections()) 36 | 37 | describe("Create", () => { 38 | it("Should create a new Stream", async () => { 39 | const resp = await client.createStream({ stream: streamName, arguments: payload }) 40 | 41 | expect(resp).to.be.true 42 | const result = await rabbit.getQueue("%2F", streamName) 43 | expect(result.name).to.be.eql(streamName) 44 | }) 45 | 46 | it("Should create a new Stream with the given arguments", async () => { 47 | const resp = await client.createStream({ stream: streamName, arguments: payload }) 48 | 49 | expect(resp).to.be.true 50 | const result = await rabbit.getQueueInfo(streamName) 51 | expect(result.arguments).to.be.eql({ 52 | "x-queue-type": "stream", 53 | "x-queue-leader-locator": payload["queue-leader-locator"], 54 | "x-max-age": payload["max-age"], 55 | "x-stream-max-segment-size-bytes": payload["stream-max-segment-size-bytes"], 56 | "x-initial-cluster-size": payload["initial-cluster-size"], 57 | "x-max-length-bytes": payload["max-length-bytes"], 58 | }) 59 | }) 60 | 61 | it("Should be idempotent and ignore a duplicate Stream error", async () => { 62 | await client.createStream({ stream: streamName, arguments: payload }) 63 | const resp = await client.createStream({ stream: streamName, arguments: payload }) 64 | 65 | expect(resp).to.be.true 66 | }) 67 | 68 | it("Should raise an error if creation goes wrong", async () => { 69 | await expectToThrowAsync( 70 | () => client.createStream({ stream: "", arguments: payload }), 71 | Error, 72 | "Create Stream command returned error with code 17" 73 | ) 74 | }) 75 | }) 76 | }) 77 | -------------------------------------------------------------------------------- /test/unit/create_super_stream.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client } from "../../src" 3 | import { randomUUID } from "crypto" 4 | import { createClient } from "../support/fake_data" 5 | import { Rabbit } from "../support/rabbit" 6 | import { expectToThrowAsync, password, username } from "../support/util" 7 | import { coerce, lt } from "semver" 8 | 9 | describe("Super Stream", () => { 10 | const rabbit = new Rabbit(username, password) 11 | const streamName = `test-stream-${randomUUID()}` 12 | const payload = { 13 | "queue-leader-locator": "random" as const, 14 | "max-age": "120s", 15 | "stream-max-segment-size-bytes": 1000, 16 | "initial-cluster-size": 5, 17 | "max-length-bytes": 20000, 18 | } 19 | let client: Client 20 | 21 | before(async function () { 22 | client = await createClient(username, password) 23 | // eslint-disable-next-line no-invalid-this 24 | if (lt(coerce(client.rabbitManagementVersion)!, "3.13.0")) this.skip() 25 | }) 26 | 27 | afterEach(async () => { 28 | try { 29 | await rabbit.deleteAllQueues({ match: /test-stream-/ }) 30 | await rabbit.deleteExchange(streamName) 31 | } catch (error) {} 32 | }) 33 | 34 | after(async () => { 35 | try { 36 | await client.close() 37 | await rabbit.closeAllConnections() 38 | } catch (error) {} 39 | }) 40 | 41 | describe("Create", () => { 42 | it("Should create a new Super Stream with 3 partitions by default", async () => { 43 | const resp = await client.createSuperStream({ streamName, arguments: payload }) 44 | 45 | expect(resp).to.be.true 46 | const result = await rabbit.getSuperStreamQueues("%2F", streamName) 47 | expect(result.map((r) => r.name)).to.have.members(Array.from(Array(3).keys()).map((n) => `${streamName}-${n}`)) 48 | }) 49 | 50 | it("Should create a new Super Stream with 3 partitions by default with the given arguments", async () => { 51 | const resp = await client.createSuperStream({ streamName, arguments: payload }) 52 | 53 | expect(resp).to.be.true 54 | const result = await rabbit.getSuperStreamQueues("%2F", streamName) 55 | expect(result.map((r) => r.name)).to.have.members(Array.from(Array(3).keys()).map((n) => `${streamName}-${n}`)) 56 | await Promise.all( 57 | Array.from(Array(3).keys()).map(async (n) => { 58 | const queue = await rabbit.getQueueInfo(`${streamName}-${n}`) 59 | expect(queue.arguments).to.be.eql({ 60 | "x-queue-type": "stream", 61 | "x-queue-leader-locator": payload["queue-leader-locator"], 62 | "x-max-age": payload["max-age"], 63 | "x-stream-max-segment-size-bytes": payload["stream-max-segment-size-bytes"], 64 | "x-initial-cluster-size": payload["initial-cluster-size"], 65 | "x-max-length-bytes": payload["max-length-bytes"], 66 | }) 67 | }) 68 | ) 69 | }) 70 | 71 | it("Should create a new Super Stream with 2 partitions", async () => { 72 | const resp = await client.createSuperStream({ streamName, arguments: payload }, undefined, 2) 73 | 74 | expect(resp).to.be.true 75 | const result = await rabbit.getSuperStreamQueues("%2F", streamName, 2) 76 | expect(result.map((r) => r.name)).to.have.members(Array.from(Array(2).keys()).map((n) => `${streamName}-${n}`)) 77 | }) 78 | 79 | it("Should create a new Super Stream with 2 partitions and with bindingKeys", async () => { 80 | const resp = await client.createSuperStream({ streamName, arguments: payload }, ["A", "B"], 2) 81 | 82 | expect(resp).to.be.true 83 | const result = await rabbit.getSuperStreamQueues("%2F", streamName, 2, ["A", "B"]) 84 | expect(result.map((r) => r.name)).to.have.members(["A", "B"].map((bk) => `${streamName}-${bk}`)) 85 | }) 86 | 87 | it("Should be idempotent and ignore a duplicate Stream error", async () => { 88 | await client.createSuperStream({ streamName, arguments: payload }) 89 | const resp = await client.createSuperStream({ streamName, arguments: payload }) 90 | 91 | expect(resp).to.be.true 92 | }) 93 | 94 | it("Should raise an error if creation goes wrong", async () => { 95 | await expectToThrowAsync( 96 | () => client.createSuperStream({ streamName: "", arguments: payload }), 97 | Error, 98 | "Create Super Stream command returned error with code 17" 99 | ) 100 | }) 101 | }) 102 | }) 103 | -------------------------------------------------------------------------------- /test/unit/delete_publisher.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { randomUUID } from "crypto" 3 | import { Client } from "../../src" 4 | import { computeExtendedPublisherId } from "../../src/publisher" 5 | import { createClient } from "../support/fake_data" 6 | import { Rabbit } from "../support/rabbit" 7 | import { expectToThrowAsync, password, username } from "../support/util" 8 | 9 | describe("DeletePublisher command", () => { 10 | const rabbit = new Rabbit(username, password) 11 | const testStreamName = "test-stream" 12 | let client: Client 13 | let publisherRef: string 14 | 15 | beforeEach(async () => { 16 | publisherRef = randomUUID() 17 | await rabbit.createStream(testStreamName) 18 | client = await createClient(username, password) 19 | }) 20 | 21 | afterEach(async () => { 22 | await client.close() 23 | await rabbit.deleteStream(testStreamName) 24 | }) 25 | 26 | it("can delete a publisher", async () => { 27 | const publisher = await client.declarePublisher({ stream: testStreamName, publisherRef }) 28 | await publisher.send(Buffer.from(`test${randomUUID()}`)) 29 | 30 | const deletePublisher = await client.deletePublisher(publisher.extendedId) 31 | expect(deletePublisher).eql(true) 32 | }).timeout(10000) 33 | 34 | it("errors when deleting a publisher that does not exist", async () => { 35 | const nonExistentPublisherId = computeExtendedPublisherId(42, randomUUID()) 36 | 37 | await expectToThrowAsync( 38 | () => client.deletePublisher(nonExistentPublisherId), 39 | Error, 40 | "Delete Publisher command returned error with code 18 - Publisher does not exist" 41 | ) 42 | }).timeout(10000) 43 | }) 44 | -------------------------------------------------------------------------------- /test/unit/delete_stream.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client } from "../../src" 3 | import { createClient } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { expectToThrowAsync, password, username } from "../support/util" 6 | 7 | describe("Delete command", () => { 8 | const rabbit: Rabbit = new Rabbit(username, password) 9 | let client: Client 10 | const queue_name = `queue_${(Math.random() * 10) | 0}` 11 | 12 | beforeEach(async () => { 13 | client = await createClient(username, password) 14 | }) 15 | 16 | afterEach(async () => { 17 | await rabbit.deleteAllQueues({ match: /queue/ }) 18 | }) 19 | 20 | afterEach(async () => { 21 | try { 22 | await client.close() 23 | } catch (error) {} 24 | }) 25 | 26 | after(() => rabbit.closeAllConnections()) 27 | 28 | it("delete a nonexisting stream (raises error)", async () => { 29 | await expectToThrowAsync( 30 | () => client?.deleteStream({ stream: "AAA" }), 31 | Error, 32 | "Delete Stream command returned error with code 2" 33 | ) 34 | }) 35 | 36 | it("delete an existing stream", async () => { 37 | await rabbit.createQueue("%2F", queue_name) 38 | await rabbit.getQueue("%2F", queue_name) 39 | let errorOnRetrieveAfterDeletion = null 40 | 41 | const result = await client?.deleteStream({ stream: queue_name }) 42 | 43 | try { 44 | await rabbit.getQueue("%2F", queue_name) 45 | } catch (e) { 46 | errorOnRetrieveAfterDeletion = e 47 | } 48 | expect(result).to.be.true 49 | expect(errorOnRetrieveAfterDeletion).is.not.null 50 | }) 51 | }) 52 | -------------------------------------------------------------------------------- /test/unit/delete_super_stream.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Client } from "../../src" 3 | import { createClient } from "../support/fake_data" 4 | import { Rabbit } from "../support/rabbit" 5 | import { expectToThrowAsync, password, username } from "../support/util" 6 | import { coerce, lt } from "semver" 7 | 8 | describe("Delete Super Stream command", () => { 9 | const rabbit: Rabbit = new Rabbit(username, password) 10 | let client: Client 11 | const streamName = `stream_${(Math.random() * 10) | 0}` 12 | 13 | before(async function () { 14 | client = await createClient(username, password) 15 | // eslint-disable-next-line no-invalid-this 16 | if (lt(coerce(client.rabbitManagementVersion)!, "3.13.0")) this.skip() 17 | }) 18 | 19 | afterEach(async () => { 20 | try { 21 | await rabbit.deleteAllQueues({ match: /stream_/ }) 22 | await rabbit.deleteExchange(streamName) 23 | } catch (error) {} 24 | }) 25 | 26 | after(async () => { 27 | try { 28 | await client.close() 29 | await rabbit.closeAllConnections() 30 | } catch (error) {} 31 | }) 32 | 33 | it("delete a nonexisting super stream (raises error)", async () => { 34 | await expectToThrowAsync( 35 | () => client.deleteSuperStream({ streamName: "AAA" }), 36 | Error, 37 | "Delete Super Stream command returned error with code 2" 38 | ) 39 | }) 40 | 41 | it("delete an existing stream", async () => { 42 | await client.createSuperStream({ streamName, arguments: {} }) 43 | let errorOnRetrieveAfterDeletion = null 44 | 45 | const result = await client.deleteSuperStream({ streamName }) 46 | 47 | try { 48 | await rabbit.getSuperStreamQueues("%2F", streamName) 49 | } catch (e) { 50 | errorOnRetrieveAfterDeletion = e 51 | } 52 | expect(result).to.be.true 53 | expect(errorOnRetrieveAfterDeletion).is.not.null 54 | }) 55 | }) 56 | -------------------------------------------------------------------------------- /test/unit/heartbeat.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { Heartbeat, HeartbeatConnection } from "../../src/heartbeat" 3 | import { NullLogger } from "../../src/logger" 4 | import { Request } from "../../src/requests/request" 5 | import { eventually, expectToThrowAsync, wait } from "../support/util" 6 | 7 | class ConnectionMock implements HeartbeatConnection { 8 | private sendCount = 0 9 | 10 | close(): Promise { 11 | throw new Error("Method not implemented.") 12 | } 13 | 14 | send(_cmd: Request): Promise { 15 | this.sendCount++ 16 | return Promise.resolve() 17 | } 18 | 19 | getSendCount() { 20 | return this.sendCount 21 | } 22 | } 23 | 24 | describe("heartbeat", () => { 25 | const logger = new NullLogger() 26 | 27 | it("sent heartbeat every seconds", async () => { 28 | const connectionMock = new ConnectionMock() 29 | const hb = new Heartbeat(connectionMock, logger) 30 | 31 | hb.start(1) 32 | 33 | await eventually(async () => expect(connectionMock.getSendCount()).eq(4), 6000) 34 | hb.stop() 35 | }).timeout(10000) 36 | 37 | it("stop check", async () => { 38 | const connectionMock = new ConnectionMock() 39 | const hb = new Heartbeat(connectionMock, logger) 40 | hb.start(1) 41 | hb.stop() 42 | 43 | await wait(4000) 44 | expect(connectionMock.getSendCount()).lessThanOrEqual(1) 45 | }).timeout(10000) 46 | 47 | it("stop current timeout so we could exit immediately", () => { 48 | const connectionMock = new ConnectionMock() 49 | const hb = new Heartbeat(connectionMock, logger) 50 | hb.start(200) 51 | 52 | hb.stop() 53 | }) 54 | 55 | it("start two times same object raise exception", async () => { 56 | const connectionMock = new ConnectionMock() 57 | const hb = new Heartbeat(connectionMock, logger) 58 | hb.start(1) 59 | 60 | await expectToThrowAsync(async () => hb.start(1), Error, "HeartBeat already started") 61 | hb.stop() 62 | }) 63 | }) 64 | -------------------------------------------------------------------------------- /test/unit/murmur32.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { murmur32 } from "../../src/hash/murmur32" 3 | 4 | describe("Murmur32x86 hashing algorithm", () => { 5 | it("the hashing function should produce results coherent with the implementation on other clients", () => { 6 | expect(murmur32("rabbit")).to.be.eql(3591948756) 7 | expect(murmur32("coders51")).to.be.eql(1856831182) 8 | expect(murmur32("d4c39ae6-2fc3-41a2-8771-161251e57d1a")).to.be.eql(1772425613) 9 | }) 10 | }) 11 | -------------------------------------------------------------------------------- /test/unit/publish_request.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { randomUUID } from "crypto" 3 | import { PublishRequest } from "../../src/requests/publish_request" 4 | 5 | describe("PublishRequest", () => { 6 | it("Produce a buffer for a long list of messages", () => { 7 | const publisherId = 1 8 | const maxFrameSize = 1024 9 | const messages = [...Array(100).keys()].map((idx) => { 10 | return { publishingId: BigInt(idx), message: { content: Buffer.from(randomUUID()) } } 11 | }) 12 | const pr = new PublishRequest({ publisherId, messages }) 13 | 14 | const written = pr.toBuffer({ maxSize: maxFrameSize }) 15 | 16 | expect(written.byteLength).eql(5313) 17 | }) 18 | }) 19 | -------------------------------------------------------------------------------- /test/unit/response_decoder.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { NoneCompression } from "../../src/compression" 3 | import { DecoderListenerFunc } from "../../src/decoder_listener" 4 | import { ResponseDecoder } from "../../src/response_decoder" 5 | import { PeerPropertiesResponse } from "../../src/responses/peer_properties_response" 6 | import { Response } from "../../src/responses/response" 7 | import { createConsoleLog } from "../support/util" 8 | import { BufferDataWriter } from "../../src/requests/buffer_data_writer" 9 | 10 | class MockDecoderListener { 11 | readonly responses: Response[] = [] 12 | 13 | reset() { 14 | this.responses.splice(0) 15 | } 16 | 17 | responseReceived(data: Response) { 18 | this.responses.push(data) 19 | } 20 | 21 | buildListener(): DecoderListenerFunc { 22 | this.reset() 23 | return (...args) => this.responseReceived(...args) 24 | } 25 | } 26 | 27 | describe("ResponseDecoder", () => { 28 | let decoder: ResponseDecoder 29 | const mockListener = new MockDecoderListener() 30 | const getCompressionBy = () => NoneCompression.create() 31 | 32 | beforeEach(() => { 33 | decoder = new ResponseDecoder(mockListener.buildListener(), createConsoleLog()) 34 | }) 35 | 36 | it("decode a buffer that contains a single response", () => { 37 | const data = createResponse({ key: PeerPropertiesResponse.key }) 38 | 39 | decoder.add(data, getCompressionBy) 40 | 41 | expect(mockListener.responses).lengthOf(1) 42 | }) 43 | 44 | it("decode a buffer that contains multiple responses", () => { 45 | const data = [ 46 | createResponse({ key: PeerPropertiesResponse.key }), 47 | createResponse({ key: PeerPropertiesResponse.key }), 48 | ] 49 | 50 | decoder.add(Buffer.concat(data), getCompressionBy) 51 | 52 | expect(mockListener.responses).lengthOf(2) 53 | }) 54 | }) 55 | 56 | function createResponse(params: { key: number; correlationId?: number; responseCode?: number }): Buffer { 57 | const bufferSize = 1024 58 | const bufferSizeParams = { maxSize: bufferSize } 59 | const dataWriter = new BufferDataWriter(Buffer.alloc(bufferSize), 4, bufferSizeParams) 60 | dataWriter.writeUInt16(params.key) 61 | dataWriter.writeUInt16(1) 62 | dataWriter.writeUInt32(params.correlationId || 101) 63 | dataWriter.writeUInt16(params.responseCode || 1) 64 | 65 | switch (params.key) { 66 | case PeerPropertiesResponse.key: 67 | dataWriter.writeInt32(0) 68 | break 69 | 70 | default: 71 | break 72 | } 73 | 74 | dataWriter.writePrefixSize() 75 | return dataWriter.toBuffer() 76 | } 77 | -------------------------------------------------------------------------------- /test/unit/stream_stats.test.ts: -------------------------------------------------------------------------------- 1 | import { Client } from "../../src" 2 | import { expect } from "chai" 3 | import { Rabbit } from "../support/rabbit" 4 | import { randomUUID } from "crypto" 5 | import { expectToThrowAsync, username, password } from "../support/util" 6 | import { createClient } from "../support/fake_data" 7 | 8 | describe("StreamStats", () => { 9 | const rabbit = new Rabbit(username, password) 10 | const testStreamName = "test-stream" 11 | let client: Client 12 | let publisherRef: string 13 | 14 | beforeEach(async () => { 15 | publisherRef = randomUUID() 16 | await rabbit.createStream(testStreamName) 17 | client = await createClient(username, password) 18 | }) 19 | 20 | afterEach(async () => { 21 | await client.close() 22 | await rabbit.deleteStream(testStreamName) 23 | }) 24 | 25 | it("gets statistics for a stream", async () => { 26 | const publisher = await client.declarePublisher({ stream: testStreamName, publisherRef }) 27 | for (let i = 0; i < 5; i++) { 28 | await publisher.send(Buffer.from(`test${randomUUID()}`)) 29 | } 30 | 31 | const stats = await client.streamStatsRequest(testStreamName) 32 | 33 | expect(stats.committedChunkId).to.be.a("BigInt") 34 | expect(stats.firstChunkId).to.be.a("BigInt") 35 | expect(stats.lastChunkId).to.be.a("BigInt") 36 | }).timeout(10000) 37 | 38 | it("returns an error when the stream does not exist", async () => { 39 | await expectToThrowAsync( 40 | () => client.streamStatsRequest("stream-does-not-exist"), 41 | Error, 42 | "Stream Stats command returned error with code 2 - Stream does not exist" 43 | ) 44 | }).timeout(10000) 45 | }) 46 | -------------------------------------------------------------------------------- /test/unit/util.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { isString } from "../../src/util" 3 | 4 | describe("Util tests", () => { 5 | describe("isString", () => { 6 | it("return false with a number", () => { 7 | const value = 1 8 | 9 | expect(isString(value)).false 10 | }) 11 | 12 | it("return false with a boolean", () => { 13 | const value = false 14 | 15 | expect(isString(value)).false 16 | }) 17 | 18 | it("return true with a string", () => { 19 | const value = "abc" 20 | 21 | expect(isString(value)).true 22 | }) 23 | 24 | it("return true with an empty string", () => { 25 | const value = "" 26 | 27 | expect(isString(value)).true 28 | }) 29 | }) 30 | }) 31 | -------------------------------------------------------------------------------- /test/unit/versions.test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai" 2 | import { checkServerDeclaredVersions, getClientSupportedVersions } from "../../src/versions" 3 | import { NullLogger } from "../../src/logger" 4 | 5 | describe("Versions", () => { 6 | const serverVersion = "3.13.0-rc.4" 7 | const logger = new NullLogger() 8 | it("client-side version declaration", () => { 9 | expect(getClientSupportedVersions(serverVersion).sort()).eql([ 10 | { key: 22, maxVersion: 1, minVersion: 1 }, 11 | { key: 13, maxVersion: 1, minVersion: 1 }, 12 | { key: 29, maxVersion: 1, minVersion: 1 }, 13 | { key: 9, maxVersion: 1, minVersion: 1 }, 14 | { key: 1, maxVersion: 1, minVersion: 1 }, 15 | { key: 6, maxVersion: 1, minVersion: 1 }, 16 | { key: 14, maxVersion: 1, minVersion: 1 }, 17 | { key: 30, maxVersion: 1, minVersion: 1 }, 18 | { key: 27, maxVersion: 1, minVersion: 1 }, 19 | { key: 23, maxVersion: 1, minVersion: 1 }, 20 | { key: 15, maxVersion: 1, minVersion: 1 }, 21 | { key: 16, maxVersion: 1, minVersion: 1 }, 22 | { key: 21, maxVersion: 1, minVersion: 1 }, 23 | { key: 17, maxVersion: 1, minVersion: 1 }, 24 | { key: 2, maxVersion: 2, minVersion: 1 }, 25 | { key: 11, maxVersion: 1, minVersion: 1 }, 26 | { key: 5, maxVersion: 1, minVersion: 1 }, 27 | { key: 19, maxVersion: 1, minVersion: 1 }, 28 | { key: 18, maxVersion: 1, minVersion: 1 }, 29 | { key: 10, maxVersion: 1, minVersion: 1 }, 30 | { key: 28, maxVersion: 1, minVersion: 1 }, 31 | { key: 7, maxVersion: 1, minVersion: 1 }, 32 | { key: 20, maxVersion: 1, minVersion: 1 }, 33 | { key: 12, maxVersion: 1, minVersion: 1 }, 34 | { key: 24, maxVersion: 1, minVersion: 1 }, 35 | { key: 25, maxVersion: 1, minVersion: 1 }, 36 | { key: 8, maxVersion: 2, minVersion: 1 }, 37 | { key: 3, maxVersion: 1, minVersion: 1 }, 38 | { key: 4, maxVersion: 1, minVersion: 1 }, 39 | { key: 26, maxVersion: 1, minVersion: 1 }, 40 | ]) 41 | }) 42 | 43 | it("client-side version declaration with an older version of the server", () => { 44 | expect(getClientSupportedVersions("3.12.12").sort()).eql([ 45 | { key: 22, maxVersion: 1, minVersion: 1 }, 46 | { key: 13, maxVersion: 1, minVersion: 1 }, 47 | { key: 9, maxVersion: 1, minVersion: 1 }, 48 | { key: 1, maxVersion: 1, minVersion: 1 }, 49 | { key: 6, maxVersion: 1, minVersion: 1 }, 50 | { key: 14, maxVersion: 1, minVersion: 1 }, 51 | { key: 27, maxVersion: 1, minVersion: 1 }, 52 | { key: 23, maxVersion: 1, minVersion: 1 }, 53 | { key: 15, maxVersion: 1, minVersion: 1 }, 54 | { key: 16, maxVersion: 1, minVersion: 1 }, 55 | { key: 21, maxVersion: 1, minVersion: 1 }, 56 | { key: 17, maxVersion: 1, minVersion: 1 }, 57 | { key: 2, maxVersion: 1, minVersion: 1 }, 58 | { key: 11, maxVersion: 1, minVersion: 1 }, 59 | { key: 5, maxVersion: 1, minVersion: 1 }, 60 | { key: 19, maxVersion: 1, minVersion: 1 }, 61 | { key: 18, maxVersion: 1, minVersion: 1 }, 62 | { key: 10, maxVersion: 1, minVersion: 1 }, 63 | { key: 28, maxVersion: 1, minVersion: 1 }, 64 | { key: 7, maxVersion: 1, minVersion: 1 }, 65 | { key: 20, maxVersion: 1, minVersion: 1 }, 66 | { key: 12, maxVersion: 1, minVersion: 1 }, 67 | { key: 24, maxVersion: 1, minVersion: 1 }, 68 | { key: 25, maxVersion: 1, minVersion: 1 }, 69 | { key: 8, maxVersion: 1, minVersion: 1 }, 70 | { key: 3, maxVersion: 1, minVersion: 1 }, 71 | { key: 4, maxVersion: 1, minVersion: 1 }, 72 | { key: 26, maxVersion: 1, minVersion: 1 }, 73 | ]) 74 | }) 75 | 76 | it("compare versions, server-side all defaults, ok", () => { 77 | expect(checkServerDeclaredVersions([], logger)).to.eql(true) 78 | }) 79 | 80 | it("compare versions, server-side specifies greater max version, ok", () => { 81 | expect(checkServerDeclaredVersions([{ key: 20, maxVersion: 99, minVersion: 1 }], logger)).to.eql(true) 82 | }) 83 | 84 | it("compare versions, server-side specifies message, fallback on client side, ok", () => { 85 | expect(checkServerDeclaredVersions([{ key: -99, maxVersion: 1, minVersion: 1 }], logger)).to.eql(true) 86 | }) 87 | 88 | it("compare versions, server-side specifies greater min version, ko", () => { 89 | expect(checkServerDeclaredVersions([{ key: 20, maxVersion: 10, minVersion: 2 }], logger)).to.eql(false) 90 | }) 91 | 92 | it("compare versions, server-side specifies smaller max version, ko", () => { 93 | expect(checkServerDeclaredVersions([{ key: 20, maxVersion: -1, minVersion: -2 }], logger)).to.eql(false) 94 | }) 95 | }) 96 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node-lts/tsconfig.json", 3 | "compilerOptions": { 4 | "preserveConstEnums": true, 5 | "noImplicitReturns": true, 6 | "noFallthroughCasesInSwitch": true, 7 | "noImplicitThis": true, 8 | "strictNullChecks": true, 9 | "noUnusedLocals": true, 10 | "noUnusedParameters": true, 11 | "sourceMap": true, 12 | "outDir": "dist", 13 | "declaration": true 14 | }, 15 | "include": ["src/**/*"], 16 | "exclude": ["node_modules", "**/*.spec.ts"] 17 | } 18 | --------------------------------------------------------------------------------