├── .github ├── ISSUE_TEMPLATE │ └── bug_report.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── tests.yml ├── .gitignore ├── .gitlab-ci.yml ├── .rspec ├── .rubocop.yml ├── Appraisals ├── CHANGELOG.md ├── Gemfile ├── LICENSE ├── README.md ├── Rakefile ├── bin ├── console ├── setup └── test ├── config.ru ├── dip.yml ├── docker-compose.yml ├── exe └── kafka_consumer ├── lefthook-local.dip_example.yml ├── lefthook.yml ├── lib ├── generators │ └── kafka_consumer │ │ ├── concerns │ │ └── configuration.rb │ │ ├── consumer │ │ ├── USAGE │ │ ├── consumer_generator.rb │ │ └── templates │ │ │ ├── consumer.rb.erb │ │ │ └── consumer_group.yml.erb │ │ ├── inbox_consumer │ │ ├── USAGE │ │ ├── inbox_consumer_generator.rb │ │ └── templates │ │ │ └── consumer_group.yml.erb │ │ └── install │ │ ├── USAGE │ │ ├── install_generator.rb │ │ └── templates │ │ ├── Kafkafile │ │ └── kafka_consumer.yml └── sbmt │ ├── kafka_consumer.rb │ └── kafka_consumer │ ├── app_initializer.rb │ ├── base_consumer.rb │ ├── cli.rb │ ├── client_configurer.rb │ ├── config.rb │ ├── config │ ├── auth.rb │ ├── consumer.rb │ ├── consumer_group.rb │ ├── deserializer.rb │ ├── kafka.rb │ ├── metrics.rb │ ├── probes.rb │ ├── probes │ │ ├── endpoints.rb │ │ ├── liveness_probe.rb │ │ └── readiness_probe.rb │ └── topic.rb │ ├── inbox_consumer.rb │ ├── instrumentation │ ├── base_monitor.rb │ ├── chainable_monitor.rb │ ├── listener_helper.rb │ ├── liveness_listener.rb │ ├── logger_listener.rb │ ├── open_telemetry_loader.rb │ ├── open_telemetry_tracer.rb │ ├── readiness_listener.rb │ ├── sentry_tracer.rb │ ├── tracer.rb │ ├── tracing_monitor.rb │ └── yabeda_metrics_listener.rb │ ├── probes │ ├── host.rb │ └── probe.rb │ ├── railtie.rb │ ├── routing │ ├── consumer_mapper │ │ └── base.rb │ ├── karafka_v1_consumer_mapper.rb │ └── karafka_v2_consumer_mapper.rb │ ├── serialization │ ├── base_deserializer.rb │ ├── json_deserializer.rb │ ├── null_deserializer.rb │ └── protobuf_deserializer.rb │ ├── server.rb │ ├── simple_logging_consumer.rb │ ├── testing.rb │ ├── testing │ └── shared_contexts │ │ └── with_sbmt_karafka_consumer.rb │ ├── types.rb │ ├── version.rb │ └── yabeda_configurer.rb ├── rubocop └── rspec.yml ├── sbmt-kafka_consumer.gemspec └── spec ├── factories ├── inbox_item.rb └── karafka │ ├── batch_metadata.rb │ ├── message.rb │ └── metadata.rb ├── internal ├── Kafkafile ├── app │ ├── interactors │ │ └── test_inbox_item_transport.rb │ └── models │ │ ├── application_record.rb │ │ └── test_inbox_item.rb ├── config │ ├── database.yml │ ├── initializers │ │ ├── open_telemetry.rb │ │ ├── outbox.rb │ │ ├── protobuf.rb │ │ └── sentry.rb │ ├── kafka_consumer.yml │ └── outbox.yml ├── db │ └── schema.rb ├── deps │ └── services │ │ └── sso │ │ └── events │ │ └── shopper_registration.proto └── pkg │ └── client │ └── sso │ └── events │ └── shopper_registration_pb.rb ├── rails_helper.rb ├── sbmt ├── kafka_consumer │ ├── base_consumer_spec.rb │ ├── client_configurer_spec.rb │ ├── config │ │ ├── auth_config_spec.rb │ │ ├── kafka_config_spec.rb │ │ ├── metrics_spec.rb │ │ └── probes_spec.rb │ ├── config_spec.rb │ ├── inbox_consumer_spec.rb │ ├── instrumentation │ │ ├── listener_helper_spec.rb │ │ ├── liveness_listener_spec.rb │ │ ├── logger_listener_spec.rb │ │ ├── open_telemetry_tracer_spec.rb │ │ ├── readiness_listener_spec.rb │ │ ├── sentry_tracer_spec.rb │ │ ├── tracing_monitor_spec.rb │ │ └── yabeda_metrics_listener_spec.rb │ ├── probes │ │ ├── host_spec.rb │ │ └── probe_spec.rb │ ├── routing │ │ └── karafka_v1_consumer_mapper_spec.rb │ └── serialization │ │ └── null_deserializer_spec.rb └── kafka_consumer_spec.rb └── spec_helper.rb /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Provide kafka_producer.yml config 16 | 2. Run command 17 | 3. See error 18 | 19 | **Expected behavior** 20 | A clear and concise description of what you expected to happen. 21 | 22 | **Screenshots** 23 | If applicable, add screenshots to help explain your problem. 24 | 25 | **Context (please complete the following information):** 26 | - Ruby version 27 | - Rails version 28 | - Gem version 29 | 30 | **Additional context** 31 | Add any other context about the problem here. 32 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | # Context 2 | 3 | 6 | - 7 | 8 | ## Related tickets 9 | 10 | - 11 | 12 | # What's inside 13 | 14 | 19 | - [x] A 20 | 21 | # Checklist: 22 | 23 | - [ ] I have added tests 24 | - [ ] I have made corresponding changes to the documentation 25 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Ruby 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ '**' ] 8 | 9 | jobs: 10 | lint: 11 | runs-on: ubuntu-latest 12 | env: 13 | RUBY_VERSION: "3.3" 14 | name: Rubocop 15 | steps: 16 | - name: Checkout code 17 | uses: actions/checkout@v3 18 | - name: Setup Ruby w/ same version as image 19 | uses: ruby/setup-ruby@v1 20 | with: 21 | ruby-version: "3.3" 22 | - name: Install dependencies 23 | run: | 24 | gem install dip 25 | dip bundle install 26 | - name: Run linter 27 | run: dip rubocop 28 | 29 | test: 30 | runs-on: ubuntu-latest 31 | strategy: 32 | fail-fast: false 33 | matrix: 34 | ruby: [ '3.0', '3.1', '3.2', '3.3' ] 35 | env: 36 | RUBY_VERSION: ${{ matrix.ruby }} 37 | name: Ruby ${{ matrix.ruby }} 38 | steps: 39 | - name: Checkout code 40 | uses: actions/checkout@v3 41 | - name: Setup Ruby w/ same version as image 42 | uses: ruby/setup-ruby@v1 43 | with: 44 | ruby-version: ${{ matrix.ruby }} 45 | - name: Install dependencies 46 | run: | 47 | gem install dip 48 | dip provision 49 | - name: Run tests 50 | run: dip appraisal rspec --format RspecJunitFormatter --out test-results/rspec_${{ matrix.ruby }}.xml --format documentation 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | /.bundle/ 3 | /.yardoc 4 | /_yardoc/ 5 | /coverage/ 6 | /doc/ 7 | /pkg/ 8 | /spec/reports/ 9 | /tmp/ 10 | /log/ 11 | /test-results/ 12 | .rspec_status 13 | lefthook-local.yml 14 | /Gemfile.lock 15 | /gemfiles/*gemfile* 16 | /spec/internal/log/*.log 17 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | include: 2 | - project: "nstmrt/rubygems/templates" 3 | ref: master 4 | file: "build-rubygems.yml" 5 | 6 | lint: 7 | stage: test 8 | image: ${BUILD_CONF_HARBOR_REGISTRY}/dhub/library/ruby:3.3 9 | tags: 10 | - paas-tests 11 | script: 12 | - bundle install 13 | - bundle exec rubocop 14 | 15 | tests: 16 | stage: test 17 | image: ${BUILD_CONF_HARBOR_REGISTRY}/dhub/library/ruby:$RUBY_VERSION 18 | tags: 19 | - paas-tests 20 | parallel: 21 | matrix: 22 | - RUBY_VERSION: ['3.0', '3.1', '3.2', '3.3'] 23 | services: 24 | - name: ${BUILD_CONF_HARBOR_REGISTRY}/dhub/library/postgres:13 25 | alias: postgres 26 | variables: 27 | POSTGRES_HOST_AUTH_METHOD: trust 28 | DATABASE_URL: postgres://postgres:secret@postgres:5432 29 | before_script: 30 | - gem sources --remove https://rubygems.org/ 31 | - gem sources --add ${RUBYGEMS_PUBLIC_SOURCE} 32 | - gem install bundler -v 2.3.26 33 | - bin/setup 34 | script: 35 | - bundle exec appraisal rspec --format RspecJunitFormatter --out test-results/rspec_$RUBY_VERSION.xml --format documentation 36 | artifacts: 37 | reports: 38 | junit: test-results/rspec*.xml 39 | -------------------------------------------------------------------------------- /.rspec: -------------------------------------------------------------------------------- 1 | --color 2 | --require spec_helper 3 | --require rails_helper 4 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | inherit_mode: 2 | merge: 3 | - Exclude 4 | 5 | plugins: 6 | - rubocop-performance 7 | - rubocop-rails 8 | - rubocop-rspec 9 | 10 | require: 11 | - standard 12 | 13 | inherit_gem: 14 | standard: config/base.yml 15 | 16 | inherit_from: 17 | - rubocop/rspec.yml 18 | 19 | AllCops: 20 | NewCops: enable 21 | SuggestExtensions: false 22 | TargetRubyVersion: 2.7 23 | TargetRailsVersion: 6.0 24 | Exclude: 25 | - spec/internal/pkg/**/* 26 | 27 | RSpec/VerifiedDoubles: 28 | Exclude: 29 | - spec/**/*_spec.rb 30 | 31 | Style/SingleLineMethods: 32 | Enabled: false 33 | 34 | Style/EmptyMethod: 35 | Enabled: false 36 | -------------------------------------------------------------------------------- /Appraisals: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # See compatibility table at https://www.fastruby.io/blog/ruby/rails/versions/compatibility-table.html 4 | 5 | versions_map = { 6 | "6.1" => %w[2.7 3.0], 7 | "7.0" => %w[3.1], 8 | "7.1" => %w[3.2], 9 | "7.2" => %w[3.3], 10 | "8.0" => %w[3.3] 11 | } 12 | 13 | current_ruby_version = RUBY_VERSION.split(".").first(2).join(".") 14 | 15 | versions_map.each do |rails_version, ruby_versions| 16 | ruby_versions.each do |ruby_version| 17 | next if ruby_version != current_ruby_version 18 | 19 | appraise "rails-#{rails_version}" do 20 | gem "rails", "~> #{rails_version}.0" 21 | end 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](http://keepachangelog.com/) 6 | and this project adheres to [Semantic Versioning](http://semver.org/). 7 | 8 | ## [Unreleased] - yyyy-mm-dd 9 | 10 | ### Added 11 | 12 | ### Changed 13 | 14 | ### Fixed 15 | 16 | ## [3.5.1] - 2025-04-18 17 | 18 | ### Fixed 19 | 20 | - Rename `yabeda_kafka_consumer_consumer_group_rebalances` gauge to `yabeda_kafka_consumer_group_rebalances` 21 | - Change type of `yabeda_kafka_consumer_group_rebalances` from `Counter` to `Gauge` 22 | 23 | ## [3.5.0] - 2025-04-17 24 | 25 | ### Added 26 | 27 | - Increase default timeout to 300 seconds for better accommodation of temporary delays. 28 | - Make liveness probe listener thread-safe using a mutex. 29 | - Use monotonic clock for more accurate time measurements in liveness checks. 30 | - Simplify liveness status checking logic. 31 | - Support Rack v3 32 | 33 | ## [3.4.2] - 2025-04-02 34 | 35 | ### Fixed 36 | - Properly resolve constant `YabedaConfigurer` 37 | 38 | ## [3.4.1] - 2025-03-06 39 | 40 | ### Fixed 41 | - reset consumer offset lag metric to zero after cg rebalance to avoid reporting stale metrics 42 | 43 | ## [3.4.0] - 2025-01-27 44 | 45 | ### Added 46 | 47 | - Added support for warning log level in LoggerListener error handling 48 | - Consider duplicate messages as warnings in logs 49 | 50 | ### Changed 51 | 52 | ### Fixed 53 | 54 | ## [3.3.2] - 2024-12-27 55 | 56 | ### Fixed 57 | 58 | - Fix gemspec URL 59 | 60 | ## [3.3.1] - 2024-12-26 61 | 62 | ### Fixed 63 | 64 | - Fix initialize yabeda metrics 65 | 66 | ## [3.3.0] - 2024-09-30 67 | 68 | ### Added 69 | 70 | - inheritance of kafka settings for topic 71 | 72 | ### Fixed 73 | 74 | - support `karafka 2.4.12` 75 | - properly report metrics for consumer offset lag when partition fetch assignment was lost 76 | 77 | ## [3.2.2] - 2024-09-23 78 | 79 | ### Fixed 80 | 81 | - log OTEL `trace_id` 82 | 83 | ## [3.2.1] - 2024-09-20 84 | 85 | ### Fixed 86 | 87 | - Limit Karafka version to less than 2.4.12 since `bootstrap.servers` has been made required 88 | 89 | ## [3.2.0] - 2024-09-17 90 | 91 | ### Added 92 | 93 | - For synchronous messages and errors, we place logs in tags 94 | 95 | ## [3.1.0] - 2024-09-09 96 | 97 | ### Fixed 98 | 99 | - Refactor consumer class initialization 100 | 101 | ## [3.0.0] - 2024-09-04 102 | 103 | ## BREAKING 104 | 105 | - Drop support for Ruby 2.7 106 | - Drop support for Rails 6.0 107 | - Add support for Karafka 2.4 108 | 109 | ### Fixed 110 | 111 | - Support consumer group mappers to support backward compatibility of consumer group naming 112 | 113 | ## [2.8.0] - 2024-09-09 114 | 115 | ### Fixed 116 | 117 | - Refactor consumer class initialization 118 | 119 | ## [2.7.1] - 2024-08-01 120 | 121 | ### Fixed 122 | 123 | - Add mock `mark_as_consumed` for tests 124 | 125 | ## [2.7.0] - 2024-07-30 126 | 127 | ### Changed 128 | 129 | - When using strategy `cooperative-sticky`, method `mark_as_consumed` is used ([more details](https://github.com/karafka/karafka/wiki/FAQ#why-when-using-cooperative-sticky-rebalance-strategy-all-topics-get-revoked-on-rebalance)) 130 | 131 | ## [2.6.1] - 2024-07-05 132 | 133 | ### Fixed 134 | 135 | - Use the current `trace_id` instead of creating a new one 136 | 137 | ## [2.6.0] - 2024-07-01 138 | 139 | ### Added 140 | 141 | - Added instrumentation for methods `process_message` and `mark_as_consumed!` 142 | 143 | ### Fixed 144 | 145 | - From `do_consume(message)` to `yield` 146 | 147 | ## [2.5.0] - 2024-06-24 148 | 149 | ### Added 150 | 151 | - Added option `max_error_count` for liveness probes, which is triggered when `librdkafka.error` 152 | 153 | ## [2.4.1] - 2024-06-15 154 | 155 | ### Fixed 156 | 157 | - Fixed display of metrics `kafka_api_calls` and `kafka_api_errors` 158 | 159 | ## [2.4.0] - 2024-06-06 160 | 161 | ### Added 162 | 163 | - Added option `midlewares` to add middleware before message processing 164 | 165 | ## [2.3.1] - 2024-06-05 166 | 167 | ### Fixed 168 | 169 | - Rename from `export_batch` to `process_batch` 170 | 171 | ## [2.3.0] - 2024-05-30 172 | 173 | ### Added 174 | 175 | - New config option `partition_assignment_strategy` 176 | 177 | ### Changed 178 | 179 | - Raise an exception when using the `partition.assignment.strategy` option within `kafka_options` for topics. 180 | 181 | ## [2.2.0] - 2024-05-13 182 | 183 | ### Changed 184 | 185 | - Drop support for Rails 5.2 186 | - Add support for Ruby 3.3 187 | 188 | ## [2.1.0] - 2024-05-13 189 | 190 | ### Added 191 | 192 | - Implemented method `export_batch` for processing messages in batches 193 | 194 | ## [2.0.1] - 2024-05-08 195 | 196 | ### Fixed 197 | 198 | - Limit the Karafka version to less than `2.4` because they dropped the consumer group mapping 199 | 200 | ## [2.0.0] - 2024-01-30 201 | 202 | ### Changed 203 | 204 | - Remove `sbmt-dev` 205 | 206 | ## [1.0.0] - 2024-01-12 207 | 208 | ### Added 209 | 210 | - Use mainstream karafka instead of custom fork 211 | 212 | ## [0.23.0] - 2024-01-12 213 | 214 | ### Added 215 | 216 | - ability to override `kafka_options` for topic 217 | 218 | ## [0.22.0] - 2024-01-09 219 | 220 | ### Added 221 | 222 | - removed useless `outbox_producer` param for `InboxConsumer` class 223 | - removed useless log messages from `InboxConsumer` class 224 | 225 | ## [0.21.0] - 2024-01-09 226 | 227 | ### Fixed 228 | 229 | - initialization of proxy consumer classes 230 | - consumer class name in sentry's transaction name 231 | 232 | ## [0.20.0] - 2024-01-09 233 | 234 | ### Added 235 | 236 | - New config options `metrics` 237 | - `metrics.port` for a metrics port that is different from the probes port 238 | - `metrics.path` for a metrics path 239 | 240 | ## [0.19.2] - 2023-10-18 241 | 242 | ### Fixed 243 | 244 | - Stub kafka_client to prevent calls to librdkafka: fixes SEGFAULT in parallel tests 245 | 246 | ## [0.19.1] - 2023-10-05 247 | 248 | ### Fixed 249 | 250 | - disable karafka's `config.strict_topics_namespacing` 251 | 252 | ## [0.19.0] - 2023-09-29 253 | 254 | ### Added 255 | 256 | - `outbox_producer` configuration flag 257 | 258 | ## [0.18.4] - 2023-09-26 259 | 260 | ### Fixed 261 | 262 | - Use `Rails.application.executor.wrap` instead manual AR connection clearing 263 | 264 | ## [0.18.3] - 2023-09-15 265 | 266 | ### Fixed 267 | 268 | - Fix broken outbox item generator call in the `kafka_consumer:inbox_consumer` generator 269 | 270 | ## [0.18.2] - 2023-09-14 271 | 272 | ### Fixed 273 | 274 | - Properly extract opentelemetry context from kafka message headers 275 | 276 | ## [0.18.1] - 2023-09-13 277 | 278 | ### Fixed 279 | 280 | - Port `v0.17.5` (properly clear `ActiveRecord` connections in case `skip_on_error` option is used) to master (v0.18) 281 | 282 | ## [0.18.0] - 2023-09-11 283 | 284 | ### Added 285 | 286 | - OpenTelemetry tracing 287 | 288 | ## [0.17.5] - 2023-09-13 289 | 290 | ### Fixed 291 | 292 | - Properly clear `ActiveRecord` connections in case `skip_on_error` option is used 293 | 294 | ## [0.17.4] - 2023-09-05 295 | 296 | ### Fixed 297 | 298 | - Latency metrics in seconds instead ms 299 | 300 | ## [0.17.3] - 2023-08-31 301 | 302 | ### Fixed 303 | 304 | - Decreased sleep time on db error in a consumer 305 | 306 | ## [0.17.2] - 2023-08-16 307 | 308 | ### Fixed 309 | 310 | - Fix `message.metadata.key` validation if key is an empty string 311 | 312 | ## [0.17.1] - 2023-08-08 313 | 314 | ### Fixed 315 | 316 | - Check Idempotency-Key for a empty string 317 | 318 | ## [0.17.0] - 2023-08-07 319 | 320 | ### Added 321 | 322 | - ability to configure consumer group mapper in `kafka_consumer.yml` (needed for proper migration from existing karafka v2 based consumers) 323 | - ability to define/override inbox-item attributes in InboxConsumer 324 | 325 | ### Fixed 326 | - report `kafka_consumer_inbox_consumes` metric with tag `status = skipped` (instead `failure`) if skip_on_error is enabled on InboxConsumer 327 | 328 | ## [0.16.0] - 2023-07-27 329 | 330 | ### Added 331 | 332 | - additional tags (client, group_id, partition, topic) for metric `kafka_consumer_inbox_consumes` 333 | 334 | ## [0.15.0] - 2023-07-21 335 | 336 | ### Added 337 | 338 | - `kafka_consumer:install` generator 339 | - `kafka_consumer:consumer_group` generator 340 | - `kafka_consumer:consumer` generator 341 | 342 | ## [0.14.2] - 2023-07-19 343 | 344 | ### Changed 345 | - `.clear_all_connections!` is now called for all DB roles 346 | 347 | ## [0.14.1] - yyyy-mm-dd 348 | 349 | ### Added 350 | - add label `api` for group `kafka_api` 351 | 352 | ### Changed 353 | - README improvements 354 | 355 | ## [0.14.0] - 2023-07-06 356 | 357 | ### Added 358 | - report message payload and headers to Sentry if consumer detailed logging is enabled 359 | 360 | ## [0.13.1] - 2023-07-05 361 | 362 | ### Added 363 | - `event_key` callback added to `Sbmt::KafkaConsumer::InboxConsumer` 364 | 365 | ## [0.13.0] - 2023-06-20 366 | 367 | ### Changed 368 | - logging / instrumentation improvements 369 | 370 | ## [0.12.0] - 2023-06-20 371 | 372 | ### Changed 373 | - README improvements 374 | - update sbmt-waterdrop (via sbmt-karafka) to fix karafka-rdkafka 0.13 compatibility issue 375 | 376 | ## [0.11.0] - 2023-06-13 377 | 378 | ### Added 379 | - `skip_on_error` consumer option to skip message processing (and commit offsets) if exception was raised 380 | 381 | ## [0.10.0] - 2023-06-07 382 | 383 | ### Added 384 | - `SimpleLoggingConsumer`, which just consumes/logs messages, can be used for debug purposes 385 | 386 | ## [0.9.0] - 2023-06-06 387 | 388 | ### Changed 389 | - add custom `ConsumerMapper` to be consistent with KarafkaV1 consumer-group naming conventions (e.g. karafka v1 uses underscored client-id in consumer group name) 390 | - reuse with_db_retry: release ActiveRecord conn everytime after message processing, in case there's a connection-pool degradation 391 | 392 | ## [0.8.0] - 2023-06-01 393 | 394 | ### Changed 395 | - update sbmt-karafka to 2.1.3 396 | - remove db retries logic as `ActiveRecord::Base::clear_active_connections!` is already handled by karafka v2 after processing a batch 397 | - async metrics reporting for `statistics.emitted` event to prevent rdkafka's main thread hanging, see https://github.com/karafka/karafka/pull/1420/files 398 | - use Rails logger by default 399 | - use `$stdout.sync = true` in consumer server process to avoid STDOUT buffering issues in docker/k8s 400 | 401 | ## [0.7.1] - 2023-05-31 402 | 403 | ### Fixed 404 | - db error logging in base consumer 405 | 406 | ## [0.7.0] - 2023-05-30 407 | 408 | ### Added 409 | - add `Sbmt::KafkaConsumer::Instrumentation::LivenessListener` and `Sbmt::KafkaConsumer::Instrumentation::ReadinessListener` listeners 410 | - add `probes` option 411 | - add `HttpHealthCheck` server with probes' endpoints 412 | 413 | ## [0.6.1] - 2023-05-30 414 | 415 | ### Added 416 | - set default `source: "KAFKA"` option when creating `inbox_item` in `InboxConsumer` 417 | 418 | ## [0.6.0] - 2023-05-29 419 | 420 | ### Added 421 | - add `manual_offset_management` topic's option (defaults to true) 422 | - add consumer `group_id` to inbox-item metadata (InboxConsumer) 423 | 424 | ## [0.5.1] - 2023-05-25 425 | 426 | ### Fixed 427 | - sentry tracing when instrumentation event is not an exception 428 | - payload deserialization if skip_decoding_error is enabled 429 | 430 | ## [0.5.0] - 2023-05-23 431 | 432 | ### Changed 433 | - add default deserializer (NullDeserializer) to config 434 | - refactor logging 435 | 436 | ## [0.4.0] - 2023-05-19 437 | 438 | ### Changed 439 | - refactor consumer groups config 440 | 441 | ## [0.3.0] - 2023-05-19 442 | 443 | ### Added 444 | - add timeout aliases to kafka config 445 | - README actualization 446 | 447 | ## [0.2.0] - 2023-05-16 448 | 449 | ### Added 450 | - implement consumer metrics 451 | 452 | ## [Unreleased] - 2023-05-03 453 | 454 | ### Added 455 | - base config loader via AnywayConfig 456 | 457 | ### Changed 458 | 459 | ### Fixed 460 | 461 | ## [Unreleased] - 2023-04-26 462 | 463 | ### Added 464 | - BaseConsumer 465 | - InboxConsumer 466 | - Instrumentation listeners: sentry, logger, yabeda 467 | 468 | ### Changed 469 | 470 | ### Fixed 471 | 472 | ## [Unreleased] 473 | 474 | ## [0.1.0] - 2023-04-19 475 | 476 | - Initial release 477 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source ENV.fetch("RUBYGEMS_PUBLIC_SOURCE", "https://rubygems.org/") 4 | 5 | gemspec 6 | 7 | # FIXME: remove this after drop support for Ruby 2.7 8 | gem "ffi", "< 1.17" 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Kuper Tech 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "bundler/gem_tasks" 4 | require "rspec/core/rake_task" 5 | 6 | RSpec::Core::RakeTask.new(:spec) 7 | 8 | require "rubocop/rake_task" 9 | 10 | RuboCop::RakeTask.new 11 | 12 | task default: %i[spec rubocop] 13 | -------------------------------------------------------------------------------- /bin/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require "bundler/setup" 5 | require "sbmt/kafka_consumer" 6 | 7 | # You can add fixtures and/or initialization code here to make experimenting 8 | # with your gem easier. You can also use a different console, if you like. 9 | 10 | # (If you use this, don't forget to add pry to your Gemfile!) 11 | # require "pry" 12 | # Pry.start 13 | 14 | require "irb" 15 | IRB.start(__FILE__) 16 | -------------------------------------------------------------------------------- /bin/setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euxo pipefail 4 | 5 | bundle install 6 | bundle exec appraisal install 7 | -------------------------------------------------------------------------------- /bin/test: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euxo pipefail 4 | 5 | bundle exec rubocop 6 | bundle exec appraisal rspec 7 | -------------------------------------------------------------------------------- /config.ru: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rubygems" 4 | require "bundler" 5 | 6 | Bundler.require :default, :development 7 | 8 | Combustion.initialize! :all 9 | run Combustion::Application 10 | -------------------------------------------------------------------------------- /dip.yml: -------------------------------------------------------------------------------- 1 | version: '7' 2 | 3 | environment: 4 | RUBY_VERSION: '3.3' 5 | 6 | compose: 7 | files: 8 | - docker-compose.yml 9 | 10 | interaction: 11 | bash: 12 | description: Open the Bash shell in app's container 13 | service: ruby 14 | command: /bin/bash 15 | 16 | bundle: 17 | description: Run Bundler commands 18 | service: ruby 19 | command: bundle 20 | 21 | rails: 22 | description: Run RoR commands 23 | service: ruby 24 | command: bundle exec rails 25 | 26 | appraisal: 27 | description: Run Appraisal commands 28 | service: ruby 29 | command: bundle exec appraisal 30 | 31 | rspec: 32 | description: Run Rspec commands 33 | service: ruby 34 | command: bundle exec rspec 35 | subcommands: 36 | all: 37 | command: bundle exec appraisal rspec 38 | rails-6.1: 39 | command: bundle exec appraisal rails-6.1 rspec 40 | rails-7.0: 41 | command: bundle exec appraisal rails-7.0 rspec 42 | rails-7.1: 43 | command: bundle exec appraisal rails-7.1 rspec 44 | rails-7.2: 45 | command: bundle exec appraisal rails-7.2 rspec 46 | 47 | rubocop: 48 | description: Run Ruby linter 49 | service: ruby 50 | command: bundle exec rubocop 51 | 52 | setup: 53 | description: Install deps 54 | service: ruby 55 | command: bin/setup 56 | 57 | test: 58 | description: Run linters, run all tests 59 | service: ruby 60 | command: bin/test 61 | 62 | kafka-consumer: 63 | description: Run kafka consumer 64 | service: ruby 65 | command: bundle exec kafka_consumer 66 | 67 | kafka-producer: 68 | description: Run kafka producer commands 69 | service: kafka 70 | command: kafka-console-producer.sh --bootstrap-server kafka:9092 71 | subcommands: 72 | inbox: 73 | command: kafka-console-producer.sh --bootstrap-server kafka:9092 --topic topic_with_inbox_items 74 | json: 75 | command: kafka-console-producer.sh --bootstrap-server kafka:9092 --topic topic_with_json_data 76 | protobuf: 77 | command: kafka-console-producer.sh --bootstrap-server kafka:9092 --topic topic_with_protobuf_data 78 | 79 | provision: 80 | - dip compose down --volumes 81 | - cp -f lefthook-local.dip_example.yml lefthook-local.yml 82 | - rm -f Gemfile.lock 83 | - rm -f gemfiles/*gemfile* 84 | - dip setup 85 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | ruby: 3 | image: ruby:${RUBY_VERSION:-3.2} 4 | environment: 5 | HISTFILE: /app/tmp/.bash_history 6 | BUNDLE_PATH: /usr/local/bundle 7 | BUNDLE_CONFIG: /app/.bundle/config 8 | DATABASE_URL: postgres://postgres:@postgres:5432 9 | KAFKAFILE: spec/internal/Kafkafile 10 | depends_on: 11 | kafka: 12 | condition: service_started 13 | postgres: 14 | condition: service_started 15 | command: bash 16 | working_dir: /app 17 | volumes: 18 | - .:/app:cached 19 | - bundler_data:/usr/local/bundle 20 | 21 | postgres: 22 | image: postgres:13 23 | environment: 24 | POSTGRES_HOST_AUTH_METHOD: trust 25 | ports: 26 | - 5432 27 | healthcheck: 28 | test: pg_isready -U postgres -h 127.0.0.1 29 | interval: 10s 30 | 31 | kafka: 32 | image: bitnami/kafka:2.7.0 33 | ports: 34 | - '9092:9092' 35 | environment: 36 | - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 37 | - ALLOW_PLAINTEXT_LISTENER=yes 38 | - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true 39 | - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,INTERNAL:PLAINTEXT 40 | - KAFKA_CFG_LISTENERS=CLIENT://:9092,INTERNAL://:9091 41 | - KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:9092,INTERNAL://kafka:9091 42 | - KAFKA_INTER_BROKER_LISTENER_NAME=INTERNAL 43 | depends_on: 44 | - zookeeper 45 | healthcheck: 46 | # we don't have `nc` installed in kafka image :( 47 | test: 48 | - CMD-SHELL 49 | - echo 'exit' | curl --silent -f telnet://0.0.0.0:9092 50 | interval: 15s 51 | timeout: 5s 52 | retries: 15 53 | 54 | zookeeper: 55 | image: bitnami/zookeeper:3.5 56 | ports: 57 | - '2181:2181' 58 | environment: 59 | - ALLOW_ANONYMOUS_LOGIN=yes 60 | healthcheck: 61 | test: ["CMD-SHELL", "echo ruok | nc localhost 2181"] 62 | interval: 2s 63 | timeout: 2s 64 | retries: 15 65 | 66 | volumes: 67 | bundler_data: 68 | kafka: 69 | -------------------------------------------------------------------------------- /exe/kafka_consumer: -------------------------------------------------------------------------------- 1 | #!/usr/local/bin/ruby 2 | # frozen_string_literal: true 3 | 4 | require "bundler/setup" 5 | require "sbmt/kafka_consumer" 6 | 7 | # rubocop:disable Lint/RescueException 8 | begin 9 | Sbmt::KafkaConsumer::CLI.start(ARGV) 10 | rescue Exception => e 11 | warn "KafkaConsumer exited with error" 12 | warn(e.message) if e.respond_to?(:message) 13 | warn(e.backtrace.join("\n")) if e.respond_to?(:backtrace) && e.backtrace.respond_to?(:join) 14 | exit 1 15 | end 16 | # rubocop:enable Lint/RescueException 17 | -------------------------------------------------------------------------------- /lefthook-local.dip_example.yml: -------------------------------------------------------------------------------- 1 | pre-commit: 2 | commands: 3 | rubocop: 4 | run: dip {cmd} 5 | -------------------------------------------------------------------------------- /lefthook.yml: -------------------------------------------------------------------------------- 1 | pre-commit: 2 | commands: 3 | rubocop: 4 | tags: backend 5 | glob: "{*.rb,**/*.rb,Gemfile,Rakefile}" 6 | run: bundle exec rubocop -A --force-exclusion {staged_files} && git add {staged_files} 7 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/concerns/configuration.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module KafkaConsumer 4 | module Generators 5 | module Concerns 6 | module Configuration 7 | extend ActiveSupport::Concern 8 | 9 | CONFIG_PATH = "config/kafka_consumer.yml" 10 | 11 | def check_config_file! 12 | config_path = File.expand_path(CONFIG_PATH) 13 | return if File.exist?(config_path) 14 | 15 | generate = ask "The file #{config_path} does not appear to exist. " \ 16 | "Would you like to generate it? [Yn]" 17 | 18 | generator_name = "kafka_consumer:install" 19 | if (generate.presence || "y").casecmp("y").zero? 20 | generate generator_name 21 | else 22 | raise Rails::Generators::Error, "Please generate #{config_path} " \ 23 | "by running `bin/rails g #{generator_name}` " \ 24 | "or add this file manually." 25 | end 26 | end 27 | end 28 | end 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/consumer/USAGE: -------------------------------------------------------------------------------- 1 | Description: 2 | Stubs out a new non-inbox consumer. Pass the consumer name, either 3 | CamelCased or under_scored. 4 | 5 | Example: 6 | bin/rails generate kafka_consumer:consumer Test 7 | 8 | This will create: 9 | app/consumers/test_consumer.rb 10 | 11 | This will optionally insert: 12 | 'group_key': 13 | name: <%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ 'group.name' } %><%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_SUFFIX'){ '' } %> 14 | topics: 15 | - name: 'topic.name' 16 | consumer: 17 | klass: "TestConsumer" 18 | # init_attrs: 19 | # skip_on_error: false # This is the default value 20 | deserializer: 21 | klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer" 22 | init_attrs: 23 | message_decoder_klass: "YourMessageDecoderClassName" 24 | # skip_decoding_error: false # This is the default value 25 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/consumer/consumer_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails/generators/named_base" 4 | require "generators/kafka_consumer/concerns/configuration" 5 | 6 | module KafkaConsumer 7 | module Generators 8 | class ConsumerGenerator < Rails::Generators::NamedBase 9 | include Concerns::Configuration 10 | 11 | source_root File.expand_path("templates", __dir__) 12 | 13 | def insert_consumer_class 14 | @consumer_name = "#{name.classify}Consumer" 15 | template "consumer.rb.erb", "app/consumers/#{file_path}_consumer.rb" 16 | end 17 | 18 | def configure_consumer_group 19 | @group_key = ask "Would you also configure a consumer group?" \ 20 | " Type the group's key (e.g. my_consumer_group) or press Enter to skip this action" 21 | return if @group_key.blank? 22 | 23 | check_config_file! 24 | 25 | @group_name = ask "Type the group's name (e.g. my.consumer.group)" 26 | @topic = ask "Type the group topic's name" 27 | insert_into_file CONFIG_PATH, group_template.result(binding), after: "consumer_groups:\n" 28 | end 29 | 30 | private 31 | 32 | def group_template_path 33 | File.join(ConsumerGenerator.source_root, "consumer_group.yml.erb") 34 | end 35 | 36 | def group_template 37 | ERB.new(File.read(group_template_path), trim_mode: "%-") 38 | end 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/consumer/templates/consumer.rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | <%- module_namespacing do -%> 4 | class <%= @consumer_name %> < Sbmt::KafkaConsumer::BaseConsumer 5 | def process_message(_message) 6 | # Add message processing here 7 | end 8 | end 9 | <%- end -%> 10 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/consumer/templates/consumer_group.yml.erb: -------------------------------------------------------------------------------- 1 | '<%= @group_key %>': 2 | name: <%%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ "<%= @group_name %>" } %><%%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_SUFFIX'){ "" } %> 3 | topics: 4 | - name: "<%= @topic.presence || "insert-your-topic-name-here" %>" 5 | consumer: 6 | klass: "<%= @consumer_name %>" 7 | # init_attrs: 8 | # skip_on_error: false # This is the default value 9 | deserializer: 10 | klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer" 11 | init_attrs: 12 | message_decoder_klass: "YourMessageDecoderClassName" 13 | # skip_decoding_error: false # This is the default value 14 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/inbox_consumer/USAGE: -------------------------------------------------------------------------------- 1 | Description: 2 | Inserts a consumer group's default configuration. 3 | It accepts a group key, a group name and an optional array of topics as arguments. 4 | 5 | 6 | Example: 7 | bin/rails generate kafka_consumer:inbox_consumer group_key group.name topic.name 8 | 9 | This will insert: 10 | 'group_key': 11 | name: <%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ 'group.name' } %><%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_SUFFIX'){ '' } %> 12 | topics: 13 | - name: 'topic.name' 14 | consumer: 15 | # Change the line below to the desired consumer 16 | # if InboxConsumer doesn't suit your needs 17 | klass: "Sbmt::KafkaConsumer::InboxConsumer" 18 | init_attrs: 19 | name: "test_items" 20 | inbox_item: "SomeModelInboxItem" # Change this to your item class name 21 | # deserializer: # This deserializer is used by default 22 | # klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer" 23 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/inbox_consumer/inbox_consumer_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails/generators/named_base" 4 | require "generators/kafka_consumer/concerns/configuration" 5 | 6 | module KafkaConsumer 7 | module Generators 8 | class InboxConsumerGenerator < Rails::Generators::NamedBase 9 | include Concerns::Configuration 10 | 11 | source_root File.expand_path("templates", __dir__) 12 | 13 | argument :group_name, type: :string, banner: "group.name" 14 | argument :topics, type: :array, default: [], banner: "topic topic" 15 | 16 | def process_topics 17 | check_config_file! 18 | 19 | @items = {} 20 | topics.each do |topic| 21 | inbox_item = ask "Would you also add an InboxItem class for topic '#{topic}'?" \ 22 | " Type item's name in the form of SomeModel::InboxItem or press Enter" \ 23 | " to skip creating item's class" 24 | @items[topic] = if inbox_item.blank? 25 | nil 26 | else 27 | generate "outbox:item", inbox_item, "--kind inbox" 28 | inbox_item.classify 29 | end 30 | end 31 | end 32 | 33 | def insert_consumer_group 34 | insert_into_file CONFIG_PATH, group_template.result(binding), after: "consumer_groups:\n" 35 | end 36 | 37 | private 38 | 39 | def group_template_path 40 | File.join(InboxConsumerGenerator.source_root, "consumer_group.yml.erb") 41 | end 42 | 43 | def group_template 44 | ERB.new(File.read(group_template_path), trim_mode: "%-") 45 | end 46 | end 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/inbox_consumer/templates/consumer_group.yml.erb: -------------------------------------------------------------------------------- 1 | '<%= file_name %>': 2 | name: <%%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ '<%= group_name %>' } %><%%= ENV.fetch('CONSUMER_GROUP_SUFFIX'){ '' } %> 3 | <%- if @items.empty? -%> 4 | topics: [] 5 | <%- else -%> 6 | topics: 7 | <%- @items.each do |topic, item_name| -%> 8 | <%- next if topic.blank? -%> 9 | <%- inbox_item = item_name.presence || "YourModelName::InboxItem" -%> 10 | <%- consumer_name = inbox_item.split('::').first.presence || "#{topic}_item" -%> 11 | - name: "<%= topic %>" 12 | consumer: 13 | # Change the line below to the desired consumer 14 | # if InboxConsumer doesn't suit your needs 15 | klass: "Sbmt::KafkaConsumer::InboxConsumer" 16 | init_attrs: 17 | name: "<%= consumer_name.underscore.pluralize %>" 18 | inbox_item: "<%= inbox_item %>" 19 | # deserializer: # This deserializer is used by default 20 | # klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer" 21 | <%- end -%> 22 | <%- end -%> 23 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/install/USAGE: -------------------------------------------------------------------------------- 1 | Description: 2 | Generates the Kafka consumer's initial setup 3 | 4 | Example: 5 | bin/rails generate kafka_consumer:install 6 | 7 | This will create: 8 | Kafkafile 9 | config/kafka_consumer.yml 10 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/install/install_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails/generators/base" 4 | require "generators/kafka_consumer/concerns/configuration" 5 | 6 | module KafkaConsumer 7 | module Generators 8 | class InstallGenerator < Rails::Generators::Base 9 | include Concerns::Configuration 10 | 11 | source_root File.expand_path("templates", __dir__) 12 | 13 | def create_kafkafile 14 | copy_file "Kafkafile", "./Kafkafile" 15 | end 16 | 17 | def create_kafka_consumer_yml 18 | copy_file "kafka_consumer.yml", CONFIG_PATH 19 | end 20 | end 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/install/templates/Kafkafile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "config/environment" 4 | -------------------------------------------------------------------------------- /lib/generators/kafka_consumer/install/templates/kafka_consumer.yml: -------------------------------------------------------------------------------- 1 | default: &default 2 | client_id: 'some-name' 3 | max_wait_time: 1 4 | shutdown_timeout: 60 5 | concurrency: 4 6 | pause_timeout: 1 7 | pause_max_timeout: 30 8 | pause_with_exponential_backoff: true 9 | ## available strategies: range, roundrobin, cooperative-sticky 10 | # partition_assignment_strategy: "range,roundrobin" 11 | auth: 12 | kind: plaintext 13 | kafka: 14 | servers: "kafka:9092" 15 | heartbeat_timeout: 5 16 | session_timeout: 30 17 | reconnect_timeout: 3 18 | connect_timeout: 5 19 | socket_timeout: 30 20 | kafka_options: 21 | allow.auto.create.topics: true 22 | consumer_groups: 23 | # group_ref_id_1: 24 | # name: cg_with_single_topic 25 | # topics: 26 | # - name: topic_with_inbox_items 27 | # consumer: 28 | # klass: "Sbmt::KafkaConsumer::InboxConsumer" 29 | # init_attrs: 30 | # name: "test_items" 31 | # inbox_item: "TestInboxItem" 32 | # deserializer: 33 | # klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer" 34 | # group_ref_id_2: 35 | # name: cg_with_multiple_topics 36 | # topics: 37 | # - name: topic_with_json_data 38 | # consumer: 39 | # klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 40 | # deserializer: 41 | # klass: "Sbmt::KafkaConsumer::Serialization::JsonDeserializer" 42 | # - name: topic_with_protobuf_data 43 | # consumer: 44 | # klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 45 | # deserializer: 46 | # klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer" 47 | # init_attrs: 48 | # message_decoder_klass: "Sso::UserRegistration" 49 | # skip_decoding_error: true 50 | probes: 51 | port: 9394 52 | 53 | development: 54 | <<: *default 55 | test: 56 | <<: *default 57 | deliver: false 58 | staging: &staging 59 | <<: *default 60 | production: 61 | <<: *staging 62 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "zeitwerk" 4 | require "karafka" 5 | require "active_record" 6 | require "yabeda" 7 | require "anyway_config" 8 | require "thor" 9 | require "dry/types" 10 | require "dry-struct" 11 | 12 | begin 13 | require "sbmt/outbox" 14 | rescue LoadError 15 | # sbmt-outbox is an optional dependency 16 | end 17 | 18 | require "anyway/rails" if defined?(Rails) 19 | require_relative "kafka_consumer/railtie" if defined?(Rails::Railtie) 20 | 21 | module Sbmt 22 | module KafkaConsumer 23 | class << self 24 | delegate :monitor, to: Karafka 25 | 26 | def logger 27 | @logger ||= Rails.logger 28 | end 29 | end 30 | 31 | class Error < StandardError; end 32 | 33 | class SkipUndeserializableMessage < Error; end 34 | end 35 | end 36 | 37 | loader = Zeitwerk::Loader.new 38 | # we need to set parent dir as gem autoloading root 39 | # see https://github.com/fxn/zeitwerk/issues/138#issuecomment-709640940 for details 40 | loader.push_dir(File.join(__dir__, "..")) 41 | loader.tag = "sbmt-kafka_consumer" 42 | 43 | # protobuf is an optional dependency 44 | loader.do_not_eager_load("#{__dir__}/kafka_consumer/serialization/protobuf_deserializer.rb") 45 | loader.do_not_eager_load("#{__dir__}/kafka_consumer/instrumentation/open_telemetry_loader.rb") 46 | loader.do_not_eager_load("#{__dir__}/kafka_consumer/instrumentation/open_telemetry_tracer.rb") 47 | loader.do_not_eager_load("#{__dir__}/kafka_consumer/instrumentation/sentry_tracer.rb") 48 | 49 | # completely ignore testing helpers 50 | # because testing.rb just requires some files and does not contain any constants (e.g. Testing) which Zeitwerk expects 51 | loader.ignore("#{__dir__}/kafka_consumer/testing.rb") 52 | loader.ignore("#{__dir__}/kafka_consumer/testing") 53 | loader.ignore("#{File.expand_path("../", __dir__)}/generators") 54 | 55 | loader.inflector.inflect("cli" => "CLI") 56 | loader.inflector.inflect("version" => "VERSION") 57 | 58 | loader.setup 59 | loader.eager_load 60 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/app_initializer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module AppInitializer 6 | extend self 7 | 8 | def initialize! 9 | ClientConfigurer.configure! 10 | end 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/base_consumer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | class BaseConsumer < Karafka::BaseConsumer 6 | class_attribute :skip_on_error, instance_writer: false, default: false 7 | class_attribute :middlewares, instance_writer: false, default: [] 8 | 9 | def self.consumer_klass(skip_on_error: nil, middlewares: nil) 10 | klass = Class.new(self) do 11 | def self.name 12 | superclass.name 13 | end 14 | end 15 | 16 | # defaults are set in class_attribute definition 17 | klass.skip_on_error = skip_on_error if skip_on_error 18 | klass.middlewares = middlewares.map(&:constantize) if middlewares 19 | klass 20 | end 21 | 22 | def consume 23 | ::Rails.application.executor.wrap do 24 | if process_batch? 25 | with_batch_instrumentation(messages) do 26 | process_batch(messages) 27 | mark_message(messages.last) 28 | end 29 | else 30 | messages.each do |message| 31 | with_instrumentation(message) { do_consume(message) } 32 | end 33 | end 34 | end 35 | end 36 | 37 | def process_batch? 38 | if @process_batch_memoized.nil? 39 | @process_batch_memoized = respond_to?(:process_batch) 40 | end 41 | @process_batch_memoized 42 | end 43 | 44 | private 45 | 46 | def with_instrumentation(message) 47 | logger.tagged( 48 | trace_id: trace_id 49 | ) do 50 | ::Sbmt::KafkaConsumer.monitor.instrument( 51 | "consumer.consumed_one", 52 | caller: self, message: message, trace_id: trace_id 53 | ) do 54 | yield 55 | rescue SkipUndeserializableMessage => ex 56 | instrument_error(ex, message) 57 | logger.warn("skipping undeserializable message: #{ex.message}") 58 | rescue => ex 59 | instrument_error(ex, message) 60 | 61 | if skip_on_error 62 | logger.warn("skipping unprocessable message: #{ex.message}, message: #{message_payload(message).inspect}") 63 | else 64 | raise ex 65 | end 66 | end 67 | end 68 | end 69 | 70 | def with_batch_instrumentation(messages) 71 | logger.tagged( 72 | trace_id: trace_id, 73 | first_offset: messages.first.metadata.offset, 74 | last_offset: messages.last.metadata.offset 75 | ) do 76 | ::Sbmt::KafkaConsumer.monitor.instrument( 77 | "consumer.consumed_batch", 78 | caller: self, 79 | messages: messages, 80 | trace_id: trace_id 81 | ) do 82 | yield 83 | end 84 | end 85 | end 86 | 87 | def with_common_instrumentation(name, message) 88 | logger.tagged( 89 | trace_id: trace_id 90 | ) do 91 | ::Sbmt::KafkaConsumer.monitor.instrument( 92 | "consumer.#{name}", 93 | caller: self, 94 | message: message, 95 | trace_id: trace_id 96 | ) do 97 | yield 98 | end 99 | end 100 | end 101 | 102 | def do_consume(message) 103 | log_message(message) if log_payload? 104 | 105 | # deserialization process is lazy (and cached) 106 | # so we trigger it explicitly to catch undeserializable message early 107 | message.payload 108 | 109 | with_common_instrumentation("process_message", message) do 110 | call_middlewares(message, middlewares) { process_message(message) } 111 | end 112 | 113 | with_common_instrumentation("mark_as_consumed", message) do 114 | mark_message(message) 115 | end 116 | end 117 | 118 | # can be overridden in consumer to enable message logging 119 | def log_payload? 120 | false 121 | end 122 | 123 | def logger 124 | ::Sbmt::KafkaConsumer.logger 125 | end 126 | 127 | def process_message(_message) 128 | raise NotImplementedError, "Implement this in a subclass" 129 | end 130 | 131 | def log_message(message) 132 | logger.info("#{message_payload(message).inspect}, message_key: #{message.metadata.key}, message_headers: #{message.metadata.headers}") 133 | end 134 | 135 | def instrument_error(error, message) 136 | ::Sbmt::KafkaConsumer.monitor.instrument( 137 | "error.occurred", 138 | error: error, 139 | caller: self, 140 | message: message, 141 | type: "consumer.base.consume_one", 142 | log_level: :error 143 | ) 144 | end 145 | 146 | def message_payload(message) 147 | message.payload || message.raw_payload 148 | end 149 | 150 | def call_middlewares(message, middlewares) 151 | return yield if middlewares.empty? 152 | 153 | chain = middlewares.map { |middleware_class| middleware_class.new } 154 | 155 | traverse_chain = proc do 156 | if chain.empty? 157 | yield 158 | else 159 | chain.shift.call(message, &traverse_chain) 160 | end 161 | end 162 | traverse_chain.call 163 | end 164 | 165 | def trace_id 166 | return nil unless defined?(::OpenTelemetry) 167 | 168 | context = ::OpenTelemetry::Trace.current_span.context 169 | 170 | context.valid? ? context.hex_trace_id : nil 171 | end 172 | 173 | def config 174 | @config ||= Sbmt::KafkaConsumer::Config.new 175 | end 176 | 177 | def cooperative_sticky? 178 | config.partition_assignment_strategy == "cooperative-sticky" 179 | end 180 | 181 | def mark_message(message) 182 | if cooperative_sticky? 183 | mark_as_consumed(message) 184 | else 185 | mark_as_consumed!(message) 186 | end 187 | end 188 | end 189 | end 190 | end 191 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/cli.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | class CLI < Thor 6 | def self.exit_on_failure? 7 | true 8 | end 9 | 10 | default_command :start 11 | 12 | desc "start", "Start kafka_consumer worker" 13 | option :consumer_group_id, 14 | aliases: "-g", 15 | desc: "Consumer group id to start", 16 | repeatable: true 17 | option :concurrency, 18 | aliases: "-c", 19 | type: :numeric, 20 | default: 5, 21 | desc: "Number of threads, overrides global kafka.concurrency config" 22 | def start 23 | $stdout.puts "Initializing KafkaConsumer" 24 | $stdout.puts "Version: #{VERSION}" 25 | 26 | load_environment 27 | 28 | $stdout.sync = true 29 | 30 | $stdout.puts "Configuring client" 31 | ClientConfigurer.configure!( 32 | consumer_groups: options[:consumer_group_id], 33 | concurrency: options[:concurrency] 34 | ) 35 | $stdout.puts "Client configured routes: #{ClientConfigurer.routes.inspect}" 36 | 37 | $stdout.puts "Starting probes/metrics http-server" 38 | Sbmt::KafkaConsumer::Probes::Host.run_async 39 | 40 | Sbmt::KafkaConsumer::Server.run 41 | end 42 | 43 | private 44 | 45 | def load_environment 46 | env_file_path = ENV["KAFKAFILE"] || "#{Dir.pwd}/Kafkafile" 47 | 48 | if File.exist?(env_file_path) 49 | $stdout.puts "Loading env from Kafkafile: #{env_file_path}" 50 | load(env_file_path) 51 | end 52 | end 53 | end 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/client_configurer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::ClientConfigurer 4 | def self.configure!(**opts) 5 | config = Sbmt::KafkaConsumer::Config.new 6 | Karafka::App.setup do |karafka_config| 7 | karafka_config.monitor = config.monitor_class.classify.constantize.new 8 | karafka_config.logger = Sbmt::KafkaConsumer.logger 9 | 10 | karafka_config.client_id = config.client_id 11 | karafka_config.kafka = config.to_kafka_options 12 | 13 | karafka_config.pause_timeout = config.pause_timeout * 1_000 if config.pause_timeout.present? 14 | karafka_config.pause_max_timeout = config.pause_max_timeout * 1_000 if config.pause_max_timeout.present? 15 | karafka_config.max_wait_time = config.max_wait_time * 1_000 if config.max_wait_time.present? 16 | karafka_config.shutdown_timeout = config.shutdown_timeout * 1_000 if config.shutdown_timeout.present? 17 | 18 | karafka_config.pause_with_exponential_backoff = config.pause_with_exponential_backoff if config.pause_with_exponential_backoff.present? 19 | 20 | karafka_config.concurrency = opts[:concurrency] || config.concurrency 21 | 22 | # Do not validate topics naming consistency 23 | # see https://github.com/karafka/karafka/wiki/FAQ#why-am-i-seeing-a-needs-to-be-consistent-namespacing-style-error 24 | karafka_config.strict_topics_namespacing = false 25 | 26 | # Recreate consumers with each batch. This will allow Rails code reload to work in the 27 | # development mode. Otherwise Karafka process would not be aware of code changes 28 | karafka_config.consumer_persistence = !Rails.env.development? 29 | end 30 | 31 | Karafka.monitor.subscribe(config.logger_listener_class.classify.constantize.new) 32 | Karafka.monitor.subscribe(config.metrics_listener_class.classify.constantize.new) 33 | 34 | target_consumer_groups = if opts[:consumer_groups].blank? 35 | config.consumer_groups 36 | else 37 | config.consumer_groups.select do |group| 38 | opts[:consumer_groups].include?(group.id) 39 | end 40 | end 41 | 42 | raise "No configured consumer groups found, exiting" if target_consumer_groups.blank? 43 | 44 | consumer_mapper = config.consumer_mapper_class.classify.constantize.new 45 | 46 | # clear routes in case CLI runner tries to reconfigure them 47 | # but railtie initializer had already executed and did the same 48 | # otherwise we'll get duplicate routes error from sbmt-karafka internal config validation process 49 | Karafka::App.routes.clear 50 | Karafka::App.routes.draw do 51 | target_consumer_groups.each do |cg| 52 | group_id = consumer_mapper.call(cg.name) 53 | consumer_group group_id do 54 | cg.topics.each do |t| 55 | topic t.name do 56 | active t.active 57 | manual_offset_management t.manual_offset_management 58 | consumer t.consumer.consumer_klass 59 | deserializer t.deserializer.instantiate if t.deserializer.klass.present? 60 | kafka t.kafka_options.merge(inherit: true) if t.kafka_options.present? 61 | end 62 | end 63 | end 64 | end 65 | end 66 | end 67 | 68 | def self.routes 69 | Karafka::App.routes.map do |cg| 70 | topics = cg.topics.map { |t| {name: t.name, deserializer: t.deserializers.payload} } 71 | {group: cg.id, topics: topics} 72 | end 73 | end 74 | end 75 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config < Anyway::Config 4 | config_name :kafka_consumer 5 | 6 | class << self 7 | def coerce_to(struct) 8 | lambda do |raw_attrs| 9 | struct.new(**raw_attrs) 10 | rescue Dry::Types::SchemaError => e 11 | raise_validation_error "cannot parse #{struct}: #{e.message}" 12 | end 13 | end 14 | 15 | def coerce_to_array_of(struct) 16 | lambda do |raw_attrs| 17 | raw_attrs.keys.map do |obj_title| 18 | coerce_to(struct) 19 | .call(**raw_attrs.fetch(obj_title) 20 | .merge(id: obj_title)) 21 | end 22 | end 23 | end 24 | end 25 | 26 | attr_config :client_id, 27 | :pause_timeout, :pause_max_timeout, :pause_with_exponential_backoff, 28 | :max_wait_time, :shutdown_timeout, :partition_assignment_strategy, 29 | concurrency: 4, auth: {}, kafka: {}, consumer_groups: {}, probes: {}, metrics: {}, 30 | deserializer_class: "::Sbmt::KafkaConsumer::Serialization::NullDeserializer", 31 | monitor_class: "::Sbmt::KafkaConsumer::Instrumentation::TracingMonitor", 32 | logger_class: "::Sbmt::KafkaConsumer::Logger", 33 | logger_listener_class: "::Sbmt::KafkaConsumer::Instrumentation::LoggerListener", 34 | metrics_listener_class: "::Sbmt::KafkaConsumer::Instrumentation::YabedaMetricsListener", 35 | consumer_mapper_class: "::Sbmt::KafkaConsumer::Routing::KarafkaV1ConsumerMapper" 36 | 37 | required :client_id 38 | 39 | on_load :validate_consumer_groups 40 | on_load :set_default_metrics_port 41 | 42 | coerce_types client_id: :string, 43 | pause_timeout: :integer, 44 | pause_max_timeout: :integer, 45 | pause_with_exponential_backoff: :boolean, 46 | max_wait_time: :integer, 47 | shutdown_timeout: :integer, 48 | partition_assignment_strategy: :string, 49 | concurrency: :integer 50 | 51 | coerce_types kafka: coerce_to(Kafka) 52 | coerce_types auth: coerce_to(Auth) 53 | coerce_types probes: coerce_to(Probes) 54 | coerce_types metrics: coerce_to(Metrics) 55 | coerce_types consumer_groups: coerce_to_array_of(ConsumerGroup) 56 | 57 | def to_kafka_options 58 | { 59 | "partition.assignment.strategy": partition_assignment_strategy 60 | }.compact 61 | .merge(kafka.to_kafka_options) 62 | .merge(auth.to_kafka_options) 63 | end 64 | 65 | private 66 | 67 | def validate_consumer_groups 68 | consumer_groups.each do |cg| 69 | raise_validation_error "consumer group #{cg.id} must have at least one topic defined" if cg.topics.blank? 70 | cg.topics.each do |t| 71 | if t.kafka_options.key?(:"partition.assignment.strategy") 72 | raise_validation_error "Using the partition.assignment.strategy option for individual topics is not supported due to consuming issues. Use the global option `partition_assignment_strategy` instead" 73 | end 74 | raise_validation_error "topic #{cg.id}.topics.name[#{t.name}] contains invalid consumer class: no const #{t.consumer.klass} defined" unless t.consumer.klass.safe_constantize 75 | raise_validation_error "topic #{cg.id}.topics.name[#{t.name}] contains invalid deserializer class: no const #{t.deserializer.klass} defined" unless t.deserializer&.klass&.safe_constantize 76 | end 77 | end 78 | end 79 | 80 | def set_default_metrics_port 81 | self.metrics = metrics.new(port: probes.port) unless metrics.port 82 | end 83 | end 84 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/auth.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Auth < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | AVAILABLE_AUTH_KINDS = %w[plaintext sasl_plaintext].freeze 7 | DEFAULT_AUTH_KIND = "plaintext" 8 | 9 | AVAILABLE_SASL_MECHANISMS = %w[PLAIN SCRAM-SHA-256 SCRAM-SHA-512].freeze 10 | DEFAULT_SASL_MECHANISM = "SCRAM-SHA-512" 11 | 12 | attribute :kind, Sbmt::KafkaConsumer::Types::Strict::String 13 | .default(DEFAULT_AUTH_KIND) 14 | .enum(*AVAILABLE_AUTH_KINDS) 15 | attribute? :sasl_mechanism, Sbmt::KafkaConsumer::Types::Strict::String 16 | .default(DEFAULT_SASL_MECHANISM) 17 | .enum(*AVAILABLE_SASL_MECHANISMS) 18 | attribute? :sasl_username, Sbmt::KafkaConsumer::Types::Strict::String 19 | attribute? :sasl_password, Sbmt::KafkaConsumer::Types::Strict::String 20 | 21 | def to_kafka_options 22 | ensure_options_are_valid 23 | 24 | opts = {} 25 | 26 | case kind 27 | when "sasl_plaintext" 28 | opts.merge!( 29 | "security.protocol": kind, 30 | "sasl.mechanism": sasl_mechanism, 31 | "sasl.username": sasl_username, 32 | "sasl.password": sasl_password 33 | ) 34 | when "plaintext" 35 | opts[:"security.protocol"] = kind 36 | else 37 | raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}" 38 | end 39 | 40 | opts.symbolize_keys 41 | end 42 | 43 | private 44 | 45 | def ensure_options_are_valid 46 | raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}" unless AVAILABLE_AUTH_KINDS.include?(kind) 47 | 48 | case kind 49 | when "sasl_plaintext" 50 | raise Anyway::Config::ValidationError, "sasl_username is required for #{kind} auth kind" if sasl_username.blank? 51 | raise Anyway::Config::ValidationError, "sasl_password is required for #{kind} auth kind" if sasl_password.blank? 52 | raise Anyway::Config::ValidationError, "sasl_mechanism is required for #{kind} auth kind" if sasl_mechanism.blank? 53 | raise Anyway::Config::ValidationError, "invalid sasl_mechanism for #{kind} auth kind, available options are: [#{AVAILABLE_SASL_MECHANISMS.join(",")}]" unless AVAILABLE_SASL_MECHANISMS.include?(sasl_mechanism) 54 | end 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/consumer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Consumer < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :klass, Sbmt::KafkaConsumer::Types::Strict::String 7 | attribute :init_attrs, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze) 8 | 9 | def consumer_klass 10 | target_klass = klass.constantize 11 | 12 | return target_klass.consumer_klass if init_attrs.blank? 13 | 14 | target_klass.consumer_klass(**init_attrs) 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/consumer_group.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::ConsumerGroup < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :id, Sbmt::KafkaConsumer::Types::Strict::String 7 | attribute :name, Sbmt::KafkaConsumer::Types::Strict::String 8 | attribute :topics, Sbmt::KafkaConsumer::Types.Array(Sbmt::KafkaConsumer::Types::ConfigTopic) 9 | end 10 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/deserializer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Deserializer < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :klass, Sbmt::KafkaConsumer::Types::Strict::String 7 | .optional 8 | .default(Sbmt::KafkaConsumer::Serialization::NullDeserializer.to_s.freeze) 9 | attribute :init_attrs, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze) 10 | 11 | def instantiate 12 | return klass.constantize.new if init_attrs.blank? 13 | klass.constantize.new(**init_attrs) 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/kafka.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Kafka < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | # srv1:port1,srv2:port2,... 7 | SERVERS_REGEXP = /^[a-z\d.\-:]+(,[a-z\d.\-:]+)*$/.freeze 8 | 9 | attribute :servers, Sbmt::KafkaConsumer::Types::String.constrained(format: SERVERS_REGEXP) 10 | 11 | # defaults are rdkafka's 12 | # see https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md 13 | attribute :heartbeat_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(5) 14 | attribute :session_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(30) 15 | attribute :reconnect_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(3) 16 | attribute :connect_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(5) 17 | attribute :socket_timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(30) 18 | 19 | attribute :kafka_options, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze) 20 | 21 | def to_kafka_options 22 | # root options take precedence over kafka_options' ones 23 | kafka_options.merge( 24 | "bootstrap.servers": servers, 25 | "heartbeat.interval.ms": heartbeat_timeout * 1_000, 26 | "session.timeout.ms": session_timeout * 1_000, 27 | "reconnect.backoff.max.ms": reconnect_timeout * 1_000, 28 | "socket.connection.setup.timeout.ms": connect_timeout * 1_000, 29 | "socket.timeout.ms": socket_timeout * 1_000 30 | ).symbolize_keys 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/metrics.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Metrics < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute? :port, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional 7 | attribute :path, Sbmt::KafkaConsumer::Types::Strict::String 8 | .optional 9 | .default("/metrics") 10 | end 11 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/probes.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Probes < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :port, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(9394) 7 | attribute :endpoints, Endpoints.optional.default(Endpoints.new.freeze) 8 | end 9 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/probes/endpoints.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Probes::Endpoints < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :liveness, Sbmt::KafkaConsumer::Config::Probes::LivenessProbe.optional.default( 7 | Sbmt::KafkaConsumer::Config::Probes::LivenessProbe.new.freeze 8 | ) 9 | 10 | attribute :readiness, Sbmt::KafkaConsumer::Config::Probes::ReadinessProbe.optional.default( 11 | Sbmt::KafkaConsumer::Config::Probes::ReadinessProbe.new.freeze 12 | ) 13 | end 14 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/probes/liveness_probe.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Probes::LivenessProbe < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :enabled, Sbmt::KafkaConsumer::Types::Bool.optional.default(true) 7 | attribute :path, Sbmt::KafkaConsumer::Types::Strict::String 8 | .optional 9 | .default("/liveness") 10 | attribute :timeout, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(300) 11 | attribute :max_error_count, Sbmt::KafkaConsumer::Types::Coercible::Integer.optional.default(10) 12 | end 13 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/probes/readiness_probe.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Probes::ReadinessProbe < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :enabled, Sbmt::KafkaConsumer::Types::Bool.optional.default(true) 7 | attribute :path, Sbmt::KafkaConsumer::Types::Strict::String 8 | .optional 9 | .default("/readiness/kafka_consumer") 10 | end 11 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/config/topic.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::Config::Topic < Dry::Struct 4 | transform_keys(&:to_sym) 5 | 6 | attribute :name, Sbmt::KafkaConsumer::Types::Strict::String 7 | attribute :consumer, Sbmt::KafkaConsumer::Types::ConfigConsumer 8 | attribute :deserializer, Sbmt::KafkaConsumer::Types::ConfigDeserializer 9 | .optional 10 | .default(Sbmt::KafkaConsumer::Config::Deserializer.new.freeze) 11 | attribute :active, Sbmt::KafkaConsumer::Types::Bool.optional.default(true) 12 | attribute :manual_offset_management, Sbmt::KafkaConsumer::Types::Bool.optional.default(true) 13 | attribute? :kafka_options, Sbmt::KafkaConsumer::Types::ConfigAttrs.optional.default({}.freeze) 14 | end 15 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/inbox_consumer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | class InboxConsumer < BaseConsumer 6 | IDEMPOTENCY_HEADER_NAME = "Idempotency-Key" 7 | DEFAULT_SOURCE = "KAFKA" 8 | 9 | class_attribute :inbox_item_class, instance_writer: false, default: nil 10 | class_attribute :event_name, instance_writer: false, default: nil 11 | 12 | def self.consumer_klass(inbox_item:, event_name: nil, skip_on_error: nil, name: nil, middlewares: nil) 13 | # defaults are set in class_attribute definition 14 | klass = super(skip_on_error: skip_on_error, middlewares: middlewares) 15 | klass.inbox_item_class = inbox_item.constantize 16 | klass.event_name = event_name if event_name 17 | klass 18 | end 19 | 20 | def initialize 21 | raise Sbmt::KafkaConsumer::Error, "inbox_item param is not set" if inbox_item_class.blank? 22 | super 23 | end 24 | 25 | def extra_message_attrs(_message) 26 | {} 27 | end 28 | 29 | private 30 | 31 | def process_message(message) 32 | logger.tagged(inbox_name: inbox_name, event_name: event_name) do 33 | ::Sbmt::KafkaConsumer.monitor.instrument( 34 | "consumer.inbox.consumed_one", caller: self, 35 | message: message, 36 | message_uuid: message_uuid(message), 37 | inbox_name: inbox_name, 38 | event_name: event_name, 39 | status: "success" 40 | ) do 41 | process_inbox_item(message) 42 | end 43 | end 44 | end 45 | 46 | def process_inbox_item(message) 47 | result = Sbmt::Outbox::CreateInboxItem.call( 48 | inbox_item_class, 49 | attributes: message_attrs(message) 50 | ) 51 | 52 | if result.failure? 53 | raise "Failed consuming message for #{inbox_name}, message_uuid: #{message_uuid(message)}: #{result}" 54 | end 55 | 56 | item = result.success 57 | item.track_metrics_after_consume if item.respond_to?(:track_metrics_after_consume) 58 | rescue ActiveRecord::RecordNotUnique 59 | instrument_warn("Skipped duplicate message for #{inbox_name}, message_uuid: #{message_uuid(message)}", message, "duplicate") 60 | rescue => ex 61 | if skip_on_error 62 | logger.warn("skipping unprocessable message for #{inbox_name}, message_uuid: #{message_uuid(message)}") 63 | instrument_error(ex, message, "skipped") 64 | else 65 | instrument_error(ex, message) 66 | end 67 | raise ex 68 | end 69 | 70 | def message_attrs(message) 71 | attrs = { 72 | proto_payload: message.raw_payload, 73 | options: { 74 | headers: message.metadata.headers.dup, 75 | group_id: topic.consumer_group.id, 76 | topic: message.metadata.topic, 77 | partition: message.metadata.partition, 78 | source: DEFAULT_SOURCE 79 | } 80 | } 81 | 82 | if message_uuid(message) 83 | attrs[:uuid] = message_uuid(message) 84 | end 85 | 86 | # if message has no uuid, it will be generated later in Sbmt::Outbox::CreateInboxItem 87 | 88 | attrs[:event_key] = if message.metadata.key.present? 89 | message.metadata.key 90 | elsif inbox_item_class.respond_to?(:event_key) 91 | inbox_item_class.event_key(message) 92 | else 93 | # if message has no partitioning key 94 | # set it to something random and monotonically increasing like offset 95 | message.offset 96 | end 97 | 98 | attrs[:event_name] = event_name if inbox_item_class.has_attribute?(:event_name) 99 | 100 | attrs.merge(extra_message_attrs(message)) 101 | end 102 | 103 | def message_uuid(message) 104 | message.metadata.headers.fetch(IDEMPOTENCY_HEADER_NAME, nil).presence 105 | end 106 | 107 | def inbox_name 108 | inbox_item_class.box_name 109 | end 110 | 111 | def instrument_error(error, message, status = "failure", log_level: :error) 112 | ::Sbmt::KafkaConsumer.monitor.instrument( 113 | "error.occurred", 114 | error: error, 115 | caller: self, 116 | message: message, 117 | inbox_name: inbox_name, 118 | event_name: event_name, 119 | status: status, 120 | type: "consumer.inbox.consume_one", 121 | log_level: log_level 122 | ) 123 | end 124 | 125 | def instrument_warn(*args, **kwargs) 126 | instrument_error(*args, **kwargs, log_level: :warn) 127 | end 128 | end 129 | end 130 | end 131 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class BaseMonitor < Karafka::Instrumentation::Monitor 7 | # karafka consuming is based around batch-processing 8 | # so we need these per-message custom events 9 | SBMT_KAFKA_CONSUMER_EVENTS = %w[ 10 | consumer.consumed_one 11 | consumer.inbox.consumed_one 12 | consumer.consumed_batch 13 | consumer.process_message 14 | consumer.mark_as_consumed 15 | ].freeze 16 | 17 | def initialize 18 | super 19 | SBMT_KAFKA_CONSUMER_EVENTS.each { |event_id| notifications_bus.register_event(event_id) } 20 | end 21 | 22 | def instrument(_event_id, _payload = EMPTY_HASH, &block) 23 | super 24 | end 25 | end 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/chainable_monitor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class ChainableMonitor < BaseMonitor 7 | attr_reader :monitors 8 | 9 | def initialize(monitors = []) 10 | super() 11 | 12 | @monitors = monitors 13 | end 14 | 15 | def instrument(event_id, payload = EMPTY_HASH, &block) 16 | return super if monitors.empty? 17 | 18 | chain = monitors.map { |monitor| monitor.new(event_id, payload) } 19 | traverse_chain = proc do 20 | if chain.empty? 21 | super 22 | else 23 | chain.shift.trace(&traverse_chain) 24 | end 25 | end 26 | traverse_chain.call 27 | end 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/listener_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | module ListenerHelper 7 | delegate :logger, to: ::Sbmt::KafkaConsumer 8 | 9 | private 10 | 11 | def consumer_tags(event) 12 | message = event[:message] 13 | { 14 | topic: message.metadata.topic, 15 | partition: message.metadata.partition 16 | } 17 | end 18 | 19 | def inbox_tags(event) 20 | { 21 | inbox_name: event[:inbox_name], 22 | event_name: event[:event_name], 23 | status: event[:status] 24 | } 25 | end 26 | 27 | def error_message(error) 28 | if error.respond_to?(:message) 29 | error.message 30 | elsif error.respond_to?(:failure) 31 | error.failure 32 | else 33 | error.to_s 34 | end 35 | end 36 | 37 | def log_backtrace(error) 38 | if error.respond_to?(:backtrace) 39 | logger.error(error.backtrace.join("\n")) 40 | elsif error.respond_to?(:trace) 41 | logger.error(error.trace) 42 | end 43 | end 44 | end 45 | end 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/liveness_listener.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class LivenessListener 7 | include ListenerHelper 8 | include KafkaConsumer::Probes::Probe 9 | 10 | ERROR_TYPE = "Liveness probe error" 11 | 12 | def initialize(timeout_sec: 300, max_error_count: 10) 13 | @timeout_sec = timeout_sec * 1000 14 | @max_error_count = max_error_count 15 | @error_count = 0 16 | @error_backtrace = nil 17 | @polls = {} 18 | @mutex = Mutex.new 19 | setup_subscription 20 | end 21 | 22 | def probe(_env) 23 | now = monotonic_now 24 | has_timed_out_polls = polls.values.any? { |tick| (now - tick) > timeout_sec } 25 | 26 | if !has_timed_out_polls && @error_count < @max_error_count 27 | probe_ok timed_out_polls: false, errors_count: @error_count 28 | elsif @error_count >= @max_error_count 29 | probe_error error_type: ERROR_TYPE, timed_out_polls: false, error_count: @error_count, error_backtrace: @error_backtrace 30 | else 31 | probe_error error_type: ERROR_TYPE, timed_out_polls: true, errors_count: @error_count, polls: polls 32 | end 33 | end 34 | 35 | def on_connection_listener_fetch_loop(event) 36 | now = monotonic_now 37 | KafkaConsumer.logger.debug("on_connection_listener_fetch_loop: now=#{now}, thread_id=#{thread_id}") 38 | mutex.synchronize do 39 | polls[thread_id] = monotonic_now 40 | end 41 | end 42 | 43 | def on_error_occurred(event) 44 | type = event[:type] 45 | 46 | return unless type == "librdkafka.error" 47 | error = event[:error] 48 | 49 | @error_backtrace ||= (error.backtrace || []).join("\n") 50 | @error_count += 1 51 | end 52 | 53 | private 54 | 55 | attr_reader :polls, :timeout_sec, :mutex 56 | 57 | def monotonic_now 58 | ::Process.clock_gettime(::Process::CLOCK_MONOTONIC, :float_millisecond) 59 | end 60 | 61 | def thread_id 62 | Thread.current.object_id 63 | end 64 | 65 | def setup_subscription 66 | Karafka::App.monitor.subscribe(self) 67 | end 68 | end 69 | end 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/logger_listener.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class LoggerListener < Karafka::Instrumentation::LoggerListener 7 | include ListenerHelper 8 | CUSTOM_ERROR_TYPES = %w[consumer.base.consume_one consumer.inbox.consume_one].freeze 9 | VALID_LOG_LEVELS = %i[error warn].freeze 10 | 11 | def on_error_occurred(event) 12 | type = event[:type] 13 | error = event[:error] 14 | 15 | # catch here only consumer-specific errors 16 | # and let default handler to process other 17 | return super unless CUSTOM_ERROR_TYPES.include?(type) 18 | 19 | tags = {} 20 | tags[:status] = event[:status] if type == "consumer.inbox.consume_one" 21 | 22 | logger.tagged( 23 | type: type, 24 | stacktrace: log_backtrace(error), 25 | **tags 26 | ) do 27 | log_level = event[:log_level] || :error 28 | if VALID_LOG_LEVELS.include?(log_level) 29 | logger.public_send(log_level, error_message(error)) 30 | else 31 | raise "Invalid log level #{log_level}" 32 | end 33 | end 34 | end 35 | 36 | # BaseConsumer events 37 | def on_consumer_consumed_one(event) 38 | log_with_tags(log_tags(event), "Successfully consumed message") 39 | end 40 | 41 | def on_consumer_mark_as_consumed(event) 42 | log_with_tags(log_tags(event), "Processing message") 43 | end 44 | 45 | def on_consumer_process_message(event) 46 | log_with_tags(log_tags(event), "Commit offset") 47 | end 48 | 49 | # InboxConsumer events 50 | def on_consumer_inbox_consumed_one(event) 51 | log_tags = log_tags(event).merge!(status: event[:status]) 52 | msg = "Successfully consumed message with uuid: #{event[:message_uuid]}" 53 | 54 | log_with_tags(log_tags, msg) 55 | end 56 | 57 | private 58 | 59 | def log_tags(event) 60 | metadata = event.payload[:message].metadata 61 | 62 | { 63 | kafka: { 64 | topic: metadata.topic, 65 | partition: metadata.partition, 66 | key: metadata.key, 67 | offset: metadata.offset, 68 | consumer_group: event.payload[:caller].topic.consumer_group.id, 69 | consume_duration_ms: event.payload[:time] 70 | } 71 | } 72 | end 73 | 74 | def log_with_tags(log_tags, msg) 75 | return unless logger.respond_to?(:tagged) 76 | 77 | logger.tagged(log_tags) do 78 | logger.send(:info, msg) 79 | end 80 | end 81 | end 82 | end 83 | end 84 | end 85 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/open_telemetry_loader.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "opentelemetry" 4 | require "opentelemetry-common" 5 | require "opentelemetry-instrumentation-base" 6 | 7 | require_relative "open_telemetry_tracer" 8 | 9 | module Sbmt 10 | module KafkaConsumer 11 | module Instrumentation 12 | class OpenTelemetryLoader < OpenTelemetry::Instrumentation::Base 13 | install do |_config| 14 | OpenTelemetryTracer.enabled = true 15 | end 16 | 17 | present do 18 | defined?(OpenTelemetryTracer) 19 | end 20 | end 21 | end 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "tracer" 4 | 5 | module Sbmt 6 | module KafkaConsumer 7 | module Instrumentation 8 | class OpenTelemetryTracer < ::Sbmt::KafkaConsumer::Instrumentation::Tracer 9 | CONSUMED_EVENTS = %w[ 10 | consumer.process_message 11 | consumer.mark_as_consumed 12 | ].freeze 13 | 14 | delegate :enabled?, to: :class 15 | 16 | class << self 17 | def enabled? 18 | !!@enabled 19 | end 20 | 21 | attr_writer :enabled 22 | end 23 | 24 | def trace(&block) 25 | return handle_consumed_one(&block) if @event_id == "consumer.consumed_one" 26 | return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch" 27 | return handle_inbox_consumed_one(&block) if @event_id == "consumer.inbox.consumed_one" 28 | return handle_common_event(&block) if CONSUMED_EVENTS.include?(@event_id) 29 | return handle_error(&block) if @event_id == "error.occurred" 30 | 31 | yield 32 | end 33 | 34 | def handle_consumed_one 35 | return yield unless enabled? 36 | 37 | consumer = @payload[:caller] 38 | message = @payload[:message] 39 | 40 | parent_context = ::OpenTelemetry.propagation.extract(message.headers, getter: ::OpenTelemetry::Context::Propagation.text_map_getter) 41 | span_context = ::OpenTelemetry::Trace.current_span(parent_context).context 42 | links = [::OpenTelemetry::Trace::Link.new(span_context)] if span_context.valid? 43 | 44 | ::OpenTelemetry::Context.with_current(parent_context) do 45 | tracer.in_span("consume #{message.topic}", links: links, attributes: consumer_attrs(consumer, message), kind: :consumer) do 46 | yield 47 | end 48 | end 49 | end 50 | 51 | def handle_consumed_batch 52 | return yield unless enabled? 53 | 54 | consumer = @payload[:caller] 55 | messages = @payload[:messages] 56 | 57 | links = messages.filter_map do |m| 58 | parent_context = ::OpenTelemetry.propagation.extract(m.headers, getter: ::OpenTelemetry::Context::Propagation.text_map_getter) 59 | span_context = ::OpenTelemetry::Trace.current_span(parent_context).context 60 | ::OpenTelemetry::Trace::Link.new(span_context) if span_context.valid? 61 | end 62 | 63 | tracer.in_span("consume batch", links: links, attributes: batch_attrs(consumer, messages), kind: :consumer) do 64 | yield 65 | end 66 | end 67 | 68 | def handle_inbox_consumed_one 69 | return yield unless enabled? 70 | 71 | inbox_name = @payload[:inbox_name] 72 | event_name = @payload[:event_name] 73 | status = @payload[:status] 74 | 75 | inbox_attributes = { 76 | "inbox.inbox_name" => inbox_name, 77 | "inbox.event_name" => event_name, 78 | "inbox.status" => status 79 | }.compact 80 | 81 | tracer.in_span("inbox #{inbox_name} process", attributes: inbox_attributes, kind: :consumer) do 82 | yield 83 | end 84 | end 85 | 86 | def handle_common_event(&block) 87 | return yield unless enabled? 88 | 89 | if @payload[:inbox_name].present? 90 | handle_inbox_consumed_one(&block) 91 | else 92 | handle_consumed_one(&block) 93 | end 94 | end 95 | 96 | def handle_error 97 | return yield unless enabled? 98 | 99 | current_span = OpenTelemetry::Trace.current_span 100 | current_span&.status = OpenTelemetry::Trace::Status.error 101 | 102 | yield 103 | end 104 | 105 | private 106 | 107 | def tracer 108 | ::Sbmt::KafkaConsumer::Instrumentation::OpenTelemetryLoader.instance.tracer 109 | end 110 | 111 | def consumer_attrs(consumer, message) 112 | attributes = { 113 | "messaging.system" => "kafka", 114 | "messaging.destination" => message.topic, 115 | "messaging.destination_kind" => "topic", 116 | "messaging.kafka.consumer_group" => consumer.topic.consumer_group.id, 117 | "messaging.kafka.partition" => message.partition, 118 | "messaging.kafka.offset" => message.offset 119 | } 120 | 121 | message_key = extract_message_key(message.key) 122 | attributes["messaging.kafka.message_key"] = message_key if message_key 123 | 124 | attributes.compact 125 | end 126 | 127 | def batch_attrs(consumer, messages) 128 | message = messages.first 129 | { 130 | "messaging.system" => "kafka", 131 | "messaging.destination" => message.topic, 132 | "messaging.destination_kind" => "topic", 133 | "messaging.kafka.consumer_group" => consumer.topic.consumer_group.id, 134 | "messaging.batch_size" => messages.count, 135 | "messaging.first_offset" => messages.first.offset, 136 | "messaging.last_offset" => messages.last.offset 137 | }.compact 138 | end 139 | 140 | def extract_message_key(key) 141 | # skip encode if already valid utf8 142 | return key if key.nil? || (key.encoding == Encoding::UTF_8 && key.valid_encoding?) 143 | 144 | key.encode(Encoding::UTF_8) 145 | rescue Encoding::UndefinedConversionError 146 | nil 147 | end 148 | end 149 | end 150 | end 151 | end 152 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/readiness_listener.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class ReadinessListener 7 | include ListenerHelper 8 | include KafkaConsumer::Probes::Probe 9 | 10 | def initialize 11 | setup_subscription 12 | end 13 | 14 | def on_app_running(_event) 15 | @ready = true 16 | end 17 | 18 | def on_app_stopping(_event) 19 | @ready = false 20 | end 21 | 22 | def probe(_env) 23 | ready? ? probe_ok(ready: true) : probe_error(ready: false) 24 | end 25 | 26 | private 27 | 28 | def ready? 29 | @ready 30 | end 31 | 32 | def setup_subscription 33 | Karafka::App.monitor.subscribe(self) 34 | end 35 | end 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "sentry-ruby" 4 | require_relative "tracer" 5 | 6 | module Sbmt 7 | module KafkaConsumer 8 | module Instrumentation 9 | class SentryTracer < ::Sbmt::KafkaConsumer::Instrumentation::Tracer 10 | CONSUMER_ERROR_TYPES = %w[ 11 | consumer.base.consume_one 12 | consumer.base.consumed_batch 13 | consumer.inbox.consume_one 14 | ].freeze 15 | 16 | EVENTS = %w[ 17 | consumer.consumed_one 18 | consumer.process_message 19 | consumer.mark_as_consumed 20 | ].freeze 21 | 22 | def trace(&block) 23 | return handle_consumed_one(&block) if EVENTS.include?(@event_id) 24 | return handle_consumed_batch(&block) if @event_id == "consumer.consumed_batch" 25 | return handle_error(&block) if @event_id == "error.occurred" 26 | 27 | yield 28 | end 29 | 30 | def handle_consumed_one 31 | message = { 32 | trace_id: @payload[:trace_id], 33 | topic: @payload[:message].topic, 34 | offset: @payload[:message].offset 35 | } 36 | 37 | with_sentry_transaction( 38 | @payload[:caller], 39 | message 40 | ) do 41 | yield 42 | end 43 | end 44 | 45 | def handle_consumed_batch 46 | message_first = @payload[:messages].first 47 | message = { 48 | trace_id: @payload[:trace_id], 49 | topic: message_first.topic, 50 | first_offset: message_first.offset, 51 | last_offset: @payload[:messages].last.offset 52 | } 53 | 54 | with_sentry_transaction( 55 | @payload[:caller], 56 | message 57 | ) do 58 | yield 59 | end 60 | end 61 | 62 | def handle_error 63 | return yield unless ::Sentry.initialized? 64 | 65 | exception = @payload[:error] 66 | return yield unless exception.respond_to?(:message) 67 | 68 | ::Sentry.with_scope do |scope| 69 | if detailed_logging_enabled? 70 | message = @payload[:message] 71 | if message.present? 72 | contexts = { 73 | payload: message_payload(message), 74 | metadata: message.metadata 75 | } 76 | scope.set_contexts(contexts: contexts) 77 | end 78 | end 79 | ::Sentry.capture_exception(exception) 80 | end 81 | 82 | yield 83 | end 84 | 85 | private 86 | 87 | def start_transaction(consumer, message) 88 | scope = ::Sentry.get_current_scope 89 | scope.set_tags(message) 90 | scope.set_transaction_name("Sbmt/KafkaConsumer/#{consumer.class.name}") 91 | 92 | transaction = ::Sentry.start_transaction(name: scope.transaction_name, op: "kafka-consumer") 93 | 94 | scope.set_span(transaction) if transaction 95 | 96 | [scope, transaction] 97 | end 98 | 99 | def finish_transaction(transaction, status) 100 | return unless transaction 101 | 102 | transaction.set_http_status(status) 103 | transaction.finish 104 | end 105 | 106 | def detailed_logging_enabled? 107 | consumer = @payload[:caller] 108 | event_type = @payload[:type] 109 | 110 | CONSUMER_ERROR_TYPES.include?(event_type) && consumer.send(:log_payload?) 111 | end 112 | 113 | def message_payload(message) 114 | message.payload 115 | rescue => _ex 116 | # payload triggers deserialization error 117 | # so in that case we return raw_payload 118 | message.raw_payload 119 | end 120 | 121 | def with_sentry_transaction(consumer, message) 122 | return yield unless ::Sentry.initialized? 123 | 124 | scope, transaction = start_transaction(consumer, message) 125 | 126 | begin 127 | yield 128 | rescue 129 | finish_transaction(transaction, 500) 130 | raise 131 | end 132 | 133 | finish_transaction(transaction, 200) 134 | scope.clear 135 | end 136 | end 137 | end 138 | end 139 | end 140 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/tracer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class Tracer 7 | def initialize(event_id, payload) 8 | @event_id = event_id 9 | @payload = payload 10 | end 11 | 12 | def trace(&block) 13 | yield 14 | end 15 | end 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/tracing_monitor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class TracingMonitor < ChainableMonitor 7 | def initialize 8 | tracers = [] 9 | tracers << OpenTelemetryTracer if defined?(OpenTelemetryTracer) 10 | tracers << SentryTracer if defined?(SentryTracer) 11 | 12 | super(tracers) 13 | end 14 | end 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/instrumentation/yabeda_metrics_listener.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Instrumentation 6 | class YabedaMetricsListener 7 | include ListenerHelper 8 | 9 | delegate :logger, to: ::Sbmt::KafkaConsumer 10 | 11 | def on_statistics_emitted(event) 12 | # statistics.emitted is being executed in the main rdkafka thread 13 | # so we have to do it in async way to prevent thread's hang issues 14 | report_rdkafka_stats(event) 15 | end 16 | 17 | def on_consumer_consumed(event) 18 | # batch processed 19 | consumer = event[:caller] 20 | 21 | Yabeda.kafka_consumer.batch_size 22 | .measure( 23 | consumer_base_tags(consumer), 24 | consumer.messages.count 25 | ) 26 | 27 | Yabeda.kafka_consumer.process_batch_latency 28 | .measure( 29 | consumer_base_tags(consumer), 30 | time_elapsed_sec(event) 31 | ) 32 | 33 | Yabeda.kafka_consumer.time_lag 34 | .set( 35 | consumer_base_tags(consumer), 36 | consumer.messages.metadata.consumption_lag 37 | ) 38 | end 39 | 40 | def on_consumer_consumed_one(event) 41 | # one message processed by any consumer 42 | 43 | consumer = event[:caller] 44 | Yabeda.kafka_consumer.process_messages 45 | .increment(consumer_base_tags(consumer)) 46 | Yabeda.kafka_consumer.process_message_latency 47 | .measure( 48 | consumer_base_tags(consumer), 49 | time_elapsed_sec(event) 50 | ) 51 | end 52 | 53 | def on_consumer_inbox_consumed_one(event) 54 | # one message processed by InboxConsumer 55 | Yabeda 56 | .kafka_consumer 57 | .inbox_consumes 58 | .increment(consumer_inbox_tags(event)) 59 | end 60 | 61 | def on_error_occurred(event) 62 | caller = event[:caller] 63 | 64 | return unless caller.respond_to?(:messages) 65 | 66 | # caller is a BaseConsumer subclass 67 | case event[:type] 68 | when "consumer.revoked.error" 69 | Yabeda.kafka_consumer.leave_group_errors 70 | .increment(consumer_base_tags(caller)) 71 | when "consumer.consume.error" 72 | Yabeda.kafka_consumer.process_batch_errors 73 | .increment(consumer_base_tags(caller)) 74 | when "consumer.base.consume_one" 75 | Yabeda.kafka_consumer.process_message_errors 76 | .increment(consumer_base_tags(caller)) 77 | when "consumer.inbox.consume_one" 78 | Yabeda.kafka_consumer.inbox_consumes 79 | .increment(consumer_inbox_tags(event)) 80 | end 81 | end 82 | 83 | private 84 | 85 | def consumer_base_tags(consumer) 86 | { 87 | client: Karafka::App.config.client_id, 88 | group_id: consumer.topic.consumer_group.id, 89 | topic: consumer.messages.metadata.topic, 90 | partition: consumer.messages.metadata.partition 91 | } 92 | end 93 | 94 | def consumer_inbox_tags(event) 95 | caller = event[:caller] 96 | 97 | consumer_base_tags(caller) 98 | .merge(inbox_tags(event)) 99 | end 100 | 101 | def report_rdkafka_stats(event, async: true) 102 | thread = Thread.new do 103 | # https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md 104 | stats = event.payload[:statistics] 105 | consumer_group_id = event.payload[:consumer_group_id] 106 | consumer_group_stats = stats["cgrp"] 107 | broker_stats = stats["brokers"] 108 | topic_stats = stats["topics"] 109 | 110 | report_broker_stats(broker_stats) 111 | report_consumer_group_stats(consumer_group_id, consumer_group_stats) 112 | report_topic_stats(consumer_group_id, topic_stats) 113 | rescue => e 114 | logger.error("exception happened while reporting rdkafka metrics: #{e.message}") 115 | logger.error(e.backtrace&.join("\n")) 116 | end 117 | 118 | thread.join unless async 119 | end 120 | 121 | def report_broker_stats(brokers) 122 | brokers.each_value do |broker_statistics| 123 | # Skip bootstrap nodes 124 | next if broker_statistics["nodeid"] == -1 125 | 126 | broker_tags = { 127 | client: Karafka::App.config.client_id, 128 | broker: broker_statistics["nodename"] 129 | } 130 | 131 | Yabeda.kafka_api.calls 132 | .increment(broker_tags, by: broker_statistics["tx_d"]) 133 | Yabeda.kafka_api.latency 134 | .measure(broker_tags, broker_statistics["rtt"]["avg"]) 135 | Yabeda.kafka_api.request_size 136 | .measure(broker_tags, broker_statistics["txbytes"]) 137 | Yabeda.kafka_api.response_size 138 | .measure(broker_tags, broker_statistics["rxbytes"]) 139 | Yabeda.kafka_api.errors 140 | .increment(broker_tags, by: broker_statistics["txerrs_d"] + broker_statistics["rxerrs_d"]) 141 | end 142 | end 143 | 144 | def report_consumer_group_stats(group_id, group_stats) 145 | return if group_stats.blank? 146 | 147 | cg_tags = { 148 | client: Karafka::App.config.client_id, 149 | group_id: group_id, 150 | state: group_stats["state"] 151 | } 152 | 153 | Yabeda.kafka_consumer.group_rebalances 154 | .set(cg_tags, group_stats["rebalance_cnt"]) 155 | end 156 | 157 | def report_topic_stats(group_id, topic_stats) 158 | return if topic_stats.blank? 159 | 160 | topic_stats.each do |topic_name, topic_values| 161 | topic_values["partitions"].each do |partition_name, partition_statistics| 162 | next if partition_name == "-1" 163 | 164 | # Skip until lag info is available 165 | offset_lag = partition_statistics["consumer_lag"] 166 | next if offset_lag == -1 167 | 168 | unless partition_owned?(partition_statistics) 169 | # reset offset lag after cg rebalance 170 | offset_lag = 0 171 | end 172 | 173 | Yabeda.kafka_consumer.offset_lag 174 | .set({ 175 | client: Karafka::App.config.client_id, 176 | group_id: group_id, 177 | topic: topic_name, 178 | partition: partition_name 179 | }, 180 | offset_lag) 181 | end 182 | end 183 | end 184 | 185 | def time_elapsed_sec(event) 186 | (event.payload[:time] || 0) / 1000.0 187 | end 188 | 189 | def partition_owned?(partition_statistics) 190 | return false if partition_statistics["fetch_state"] == "stopped" 191 | return false if partition_statistics["fetch_state"] == "none" 192 | 193 | true 194 | end 195 | end 196 | end 197 | end 198 | end 199 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/probes/host.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rack" 4 | require "rackup/handler/webrick" if Gem::Version.new(::Rack.release) >= Gem::Version.new("3") 5 | 6 | module Sbmt 7 | module KafkaConsumer 8 | module Probes 9 | class Host 10 | class << self 11 | def run_async 12 | config = Sbmt::KafkaConsumer::Config.new 13 | if config.probes[:port] == config.metrics[:port] 14 | start_on_single_port(config) 15 | else 16 | start_on_different_ports(config) 17 | end 18 | end 19 | 20 | def webrick 21 | return ::Rack::Handler::WEBrick if Gem::Version.new(::Rack.release) < Gem::Version.new("3") 22 | 23 | ::Rackup::Handler::WEBrick 24 | end 25 | 26 | private 27 | 28 | def health_check_app(config) 29 | ::HttpHealthCheck::RackApp.configure do |c| 30 | c.logger Rails.logger unless Rails.env.production? 31 | 32 | liveness = config[:liveness] 33 | if liveness[:enabled] 34 | c.probe liveness[:path], Sbmt::KafkaConsumer::Instrumentation::LivenessListener.new( 35 | timeout_sec: liveness[:timeout], max_error_count: liveness[:max_error_count] 36 | ) 37 | end 38 | 39 | readiness = config[:readiness] 40 | if readiness[:enabled] 41 | c.probe readiness[:path], Sbmt::KafkaConsumer::Instrumentation::ReadinessListener.new 42 | end 43 | end 44 | end 45 | 46 | def start_on_single_port(config) 47 | app = health_check_app(config.probes[:endpoints]) 48 | middlewares = defined?(Yabeda) ? {::Yabeda::Prometheus::Exporter => {path: config.metrics[:path]}} : {} 49 | start_webrick(app, middlewares: middlewares, port: config.probes[:port]) 50 | end 51 | 52 | def start_on_different_ports(config) 53 | ::HttpHealthCheck.run_server_async( 54 | port: config.probes[:port], 55 | rack_app: health_check_app(config.probes[:endpoints]) 56 | ) 57 | if defined?(Yabeda) 58 | start_webrick( 59 | Yabeda::Prometheus::Mmap::Exporter::NOT_FOUND_HANDLER, 60 | middlewares: {::Yabeda::Prometheus::Exporter => {path: config.metrics[:path]}}, 61 | port: config.metrics[:port] 62 | ) 63 | end 64 | end 65 | 66 | def start_webrick(app, middlewares:, port:) 67 | Thread.new do 68 | webrick.run( 69 | ::Rack::Builder.new do 70 | middlewares.each do |middleware, options| 71 | use middleware, **options 72 | end 73 | run app 74 | end, 75 | Host: "0.0.0.0", 76 | Port: port 77 | ) 78 | end 79 | end 80 | end 81 | end 82 | end 83 | end 84 | end 85 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/probes/probe.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Probes 6 | module Probe 7 | HEADERS = {"Content-Type" => "application/json"}.freeze 8 | 9 | def call(env) 10 | with_error_handler { probe(env) } 11 | end 12 | 13 | def meta 14 | {} 15 | end 16 | 17 | def probe_ok(extra_meta = {}) 18 | [200, HEADERS, [meta.merge(extra_meta).to_json]] 19 | end 20 | 21 | def probe_error(extra_meta = {}) 22 | KafkaConsumer.logger.error("probe error meta: #{meta.merge(extra_meta).inspect}") 23 | [500, HEADERS, [meta.merge(extra_meta).to_json]] 24 | end 25 | 26 | def with_error_handler 27 | yield 28 | rescue => error 29 | probe_error(error_class: error.class.name, error_message: error.message) 30 | end 31 | end 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/railtie.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails/railtie" 4 | 5 | module Sbmt 6 | module KafkaConsumer 7 | class Railtie < Rails::Railtie 8 | config.before_initialize do 9 | require "sbmt/kafka_consumer/yabeda_configurer" 10 | ::Sbmt::KafkaConsumer::YabedaConfigurer.configure 11 | end 12 | 13 | # it must be consistent with sbmt_karafka initializers' name 14 | initializer "sbmt_kafka_consumer_karafka_init.configure_rails_initialization", 15 | before: "karafka.require_karafka_boot_file" do 16 | # skip loading native karafka.rb, because we want custom init process 17 | Karafka.instance_eval do 18 | def boot_file; false; end 19 | end 20 | end 21 | 22 | initializer "sbmt_kafka_consumer_opentelemetry_init.configure_rails_initialization", 23 | after: "opentelemetry.configure" do 24 | require "sbmt/kafka_consumer/instrumentation/open_telemetry_loader" if defined?(::OpenTelemetry) 25 | end 26 | 27 | config.after_initialize do 28 | require "sbmt/kafka_consumer/instrumentation/sentry_tracer" if defined?(::Sentry) 29 | end 30 | end 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/routing/consumer_mapper/base.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Routing 6 | module ConsumerMapper 7 | class Base 8 | # @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name 9 | # @return [String] remapped final consumer group name 10 | def call(raw_consumer_group_name) 11 | raise "Implement #call in a subclass" 12 | end 13 | end 14 | end 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/routing/karafka_v1_consumer_mapper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "consumer_mapper/base" 4 | 5 | module Sbmt 6 | module KafkaConsumer 7 | module Routing 8 | class KarafkaV1ConsumerMapper < ConsumerMapper::Base 9 | # karafka v1 consumer group name mapper 10 | def call(raw_consumer_group_name) 11 | client_id = ActiveSupport::Inflector.underscore(Karafka::App.config.client_id).tr("/", "_") 12 | "#{client_id}_#{raw_consumer_group_name}" 13 | end 14 | end 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/routing/karafka_v2_consumer_mapper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "consumer_mapper/base" 4 | 5 | module Sbmt 6 | module KafkaConsumer 7 | module Routing 8 | # karafka v2 (before 2.4) consumer group name mapper 9 | class KarafkaV2ConsumerMapper < ConsumerMapper::Base 10 | def call(raw_consumer_group_name) 11 | "#{Karafka::App.config.client_id}_#{raw_consumer_group_name}" 12 | end 13 | end 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/serialization/base_deserializer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Serialization 6 | class BaseDeserializer 7 | attr_reader :skip_decoding_error 8 | 9 | def initialize(skip_decoding_error: false) 10 | @skip_decoding_error = skip_decoding_error 11 | end 12 | 13 | def call(_message) 14 | raise NotImplementedError, "Implement this in a subclass" 15 | end 16 | end 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/serialization/json_deserializer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Serialization 6 | class JsonDeserializer < BaseDeserializer 7 | def call(message) 8 | # nil payload can be present for example for tombstone messages 9 | message.raw_payload.nil? ? nil : ::JSON.parse(message.raw_payload) 10 | rescue JSON::ParserError => e 11 | raise Sbmt::KafkaConsumer::SkipUndeserializableMessage, "cannot decode message: #{e.message}, payload: #{message.raw_payload}" if skip_decoding_error 12 | 13 | raise 14 | end 15 | end 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/serialization/null_deserializer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Serialization 6 | class NullDeserializer < BaseDeserializer 7 | def call(message) 8 | message.raw_payload 9 | end 10 | end 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/serialization/protobuf_deserializer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "google/protobuf" 4 | 5 | module Sbmt 6 | module KafkaConsumer 7 | module Serialization 8 | class ProtobufDeserializer < BaseDeserializer 9 | attr_reader :message_decoder 10 | 11 | def initialize(message_decoder_klass:, skip_decoding_error: false) 12 | super(skip_decoding_error: skip_decoding_error) 13 | 14 | @message_decoder = message_decoder_klass.constantize 15 | end 16 | 17 | def call(message) 18 | message_decoder.decode(message.raw_payload) 19 | rescue Google::Protobuf::ParseError, ArgumentError => e 20 | raise Sbmt::KafkaConsumer::SkipUndeserializableMessage, "cannot decode message: #{e.message}, payload: #{message.raw_payload}" if skip_decoding_error 21 | 22 | raise 23 | end 24 | end 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/server.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | class Server < Karafka::Server 6 | class << self 7 | # original klass tries to validate karafka-specific server cli-options which we override 8 | # see Karafka::Server for details 9 | def run 10 | Karafka::Server.listeners = [] 11 | Karafka::Server.workers = [] 12 | 13 | process.on_sigint { Karafka::Server.stop } 14 | process.on_sigquit { Karafka::Server.stop } 15 | process.on_sigterm { Karafka::Server.stop } 16 | process.on_sigtstp { Karafka::Server.quiet } 17 | process.supervise 18 | 19 | $stdout.puts "Starting server" 20 | Karafka::Server.start 21 | 22 | sleep(0.1) until Karafka::App.terminated? 23 | # rubocop:disable Lint/RescueException 24 | rescue Exception => e 25 | $stdout.puts "Cannot start server: #{e.message}" 26 | 27 | # rubocop:enable Lint/RescueException 28 | Karafka::Server.stop 29 | 30 | raise e 31 | end 32 | end 33 | end 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/simple_logging_consumer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sbmt::KafkaConsumer::SimpleLoggingConsumer < Sbmt::KafkaConsumer::BaseConsumer 4 | private 5 | 6 | def log_payload? 7 | true 8 | end 9 | 10 | def process_message(_message); end 11 | end 12 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/testing.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rspec" 4 | 5 | Dir["#{__dir__}/testing/shared_contexts/*.rb"].sort.each { |f| require f } 6 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | RSpec.shared_context "with sbmt karafka consumer" do 4 | subject(:consume_with_sbmt_karafka) do 5 | coordinator.increment(:consume) 6 | consumer.on_consume 7 | end 8 | 9 | let(:coordinator) { 10 | instance = Karafka::Processing::Coordinator.new(test_topic, 0, instance_double(Karafka::TimeTrackers::Pause)) 11 | instance.instance_variable_set(:@seek_offset, -1) 12 | instance 13 | } 14 | let(:test_consumer_group) { Karafka::Routing::ConsumerGroup.new(:test_group) } 15 | let(:test_topic) { Karafka::Routing::Topic.new(:test_topic, test_consumer_group) } 16 | let(:kafka_client) { instance_double(Karafka::Connection::Client) } 17 | let(:null_deserializer) { Sbmt::KafkaConsumer::Serialization::NullDeserializer.new } 18 | 19 | let(:consumer_class) { described_class.consumer_klass } 20 | let(:consumer) { build_consumer(consumer_class.new) } 21 | 22 | before { 23 | Sbmt::KafkaConsumer::ClientConfigurer.configure! 24 | allow(kafka_client).to receive(:assignment_lost?).and_return(false) 25 | allow(kafka_client).to receive(:mark_as_consumed!).and_return(true) 26 | allow(kafka_client).to receive(:mark_as_consumed).and_return(true) 27 | } 28 | 29 | def publish_to_sbmt_karafka(raw_payload, opts = {}) 30 | message = Karafka::Messages::Message.new(raw_payload, Karafka::Messages::Metadata.new(build_metadata_hash(opts))) 31 | consumer.messages = consumer_messages([message]) 32 | end 33 | 34 | def publish_to_sbmt_karafka_batch(raw_payloads, opts = {}) 35 | messages = raw_payloads.map do |p| 36 | Karafka::Messages::Message.new(p, Karafka::Messages::Metadata.new(build_metadata_hash(opts))) 37 | end 38 | consumer.messages = consumer_messages(messages) 39 | end 40 | 41 | # @return [Hash] message default options 42 | def build_metadata_hash(opts) 43 | { 44 | deserializers: test_topic.deserializers(payload: opts[:deserializer] || null_deserializer), 45 | raw_headers: opts[:headers] || {}, 46 | raw_key: opts[:key], 47 | offset: opts[:offset] || 0, 48 | partition: opts[:partition] || 0, 49 | received_at: opts[:received_at] || Time.current, 50 | topic: opts[:topic] || test_topic.name 51 | } 52 | end 53 | 54 | def build_consumer(instance) 55 | instance.coordinator = coordinator 56 | instance.client = kafka_client 57 | instance.singleton_class.include Karafka::Processing::Strategies::Default 58 | instance 59 | end 60 | 61 | private 62 | 63 | def consumer_messages(messages) 64 | Karafka::Messages::Messages.new( 65 | messages, 66 | Karafka::Messages::BatchMetadata.new( 67 | topic: test_topic.name, 68 | partition: 0, 69 | processed_at: Time.zone.now, 70 | created_at: Time.zone.now 71 | ) 72 | ) 73 | end 74 | end 75 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/types.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | module Types 6 | include Dry.Types 7 | 8 | ConfigAttrs = Dry::Types["hash"].constructor { |hsh| hsh.deep_symbolize_keys } 9 | 10 | ConfigConsumer = Types.Constructor(Config::Consumer) 11 | ConfigDeserializer = Types.Constructor(Config::Deserializer) 12 | ConfigTopic = Types.Constructor(Config::Topic) 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/version.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | VERSION = "3.5.1" 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_consumer/yabeda_configurer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaConsumer 5 | class YabedaConfigurer 6 | SIZE_BUCKETS = [1, 10, 100, 1000, 10_000, 100_000, 1_000_000].freeze 7 | LATENCY_BUCKETS = [0.0001, 0.001, 0.01, 0.1, 1.0, 10, 100, 1000].freeze 8 | DELAY_BUCKETS = [1, 3, 10, 30, 100, 300, 1000, 3000, 10_000, 30_000].freeze 9 | def self.configure 10 | Yabeda.configure do 11 | group :kafka_api do 12 | counter :calls, 13 | tags: %i[client broker api], 14 | comment: "API calls" 15 | histogram :latency, 16 | tags: %i[client broker api], 17 | buckets: LATENCY_BUCKETS, 18 | comment: "API latency" 19 | histogram :request_size, 20 | tags: %i[client broker api], 21 | buckets: SIZE_BUCKETS, 22 | comment: "API request size" 23 | histogram :response_size, 24 | tags: %i[client broker api], 25 | buckets: SIZE_BUCKETS, 26 | comment: "API response size" 27 | counter :errors, 28 | tags: %i[client broker api], 29 | comment: "API errors" 30 | end 31 | 32 | group :kafka_consumer do 33 | gauge :group_rebalances, 34 | tags: %i[client group_id state], 35 | comment: "Consumer group rebalances" 36 | 37 | counter :process_messages, 38 | tags: %i[client group_id topic partition], 39 | comment: "Messages consumed" 40 | 41 | counter :process_message_errors, 42 | tags: %i[client group_id topic partition], 43 | comment: "Messages failed to process" 44 | 45 | histogram :process_message_latency, 46 | tags: %i[client group_id topic partition], 47 | buckets: LATENCY_BUCKETS, 48 | comment: "Consumer latency" 49 | 50 | gauge :offset_lag, 51 | tags: %i[client group_id topic partition], 52 | comment: "Consumer offset lag" 53 | 54 | gauge :time_lag, 55 | tags: %i[client group_id topic partition], 56 | comment: "Consumer time lag" 57 | 58 | counter :process_batch_errors, 59 | tags: %i[client group_id topic partition], 60 | comment: "Messages failed to process" 61 | 62 | histogram :process_batch_latency, 63 | tags: %i[client group_id topic partition], 64 | buckets: LATENCY_BUCKETS, 65 | comment: "Consumer batch latency" 66 | 67 | histogram :batch_size, 68 | tags: %i[client group_id topic partition], 69 | buckets: SIZE_BUCKETS, 70 | comment: "Consumer batch size" 71 | 72 | counter :leave_group_errors, 73 | tags: %i[client group_id], 74 | comment: "Consumer group leave errors" 75 | 76 | gauge :pause_duration, 77 | tags: %i[client group_id topic partition], 78 | comment: "Consumer pause duration" 79 | 80 | counter :inbox_consumes, 81 | tags: %i[ 82 | client group_id topic partition 83 | inbox_name event_name status 84 | ], 85 | comment: "Inbox item consumes" 86 | end 87 | end 88 | end 89 | end 90 | end 91 | end 92 | -------------------------------------------------------------------------------- /rubocop/rspec.yml: -------------------------------------------------------------------------------- 1 | RSpec/AnyInstance: 2 | Enabled: false 3 | 4 | RSpec/MultipleExpectations: 5 | Enabled: false 6 | 7 | RSpec/LetSetup: 8 | Enabled: false 9 | 10 | RSpec/StubbedMock: 11 | Enabled: false 12 | 13 | RSpec/MessageSpies: 14 | Enabled: false 15 | 16 | RSpec/NestedGroups: 17 | Enabled: false 18 | 19 | RSpec/EmptyExampleGroup: 20 | Enabled: false 21 | 22 | RSpec/ExampleLength: 23 | Enabled: false 24 | 25 | RSpec/MultipleMemoizedHelpers: 26 | Enabled: false 27 | 28 | RSpec/VariableName: 29 | Enabled: false 30 | -------------------------------------------------------------------------------- /sbmt-kafka_consumer.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "lib/sbmt/kafka_consumer/version" 4 | 5 | Gem::Specification.new do |spec| 6 | spec.name = "sbmt-kafka_consumer" 7 | spec.license = "MIT" 8 | spec.version = Sbmt::KafkaConsumer::VERSION 9 | spec.authors = ["Kuper Ruby-Platform Team"] 10 | 11 | spec.summary = "Ruby gem for consuming Kafka messages" 12 | spec.description = "This gem is used for consuming Kafka messages. It represents a wrapper over Karafka gem and is recommended for using as a transport with sbmt-outbox" 13 | spec.homepage = "https://github.com/Kuper-Tech/sbmt-kafka_consumer" 14 | spec.required_ruby_version = ">= 3.0.0" 15 | 16 | spec.metadata["allowed_push_host"] = ENV.fetch("NEXUS_URL", "https://rubygems.org") 17 | 18 | spec.metadata["homepage_uri"] = spec.homepage 19 | spec.metadata["source_code_uri"] = spec.homepage 20 | spec.metadata["changelog_uri"] = "#{spec.homepage}/blob/master/CHANGELOG.md" 21 | spec.metadata["rubygems_mfa_required"] = "false" # rubocop:disable Gemspec/RequireMFA 22 | 23 | # Specify which files should be added to the gem when it is released. 24 | # The `git ls-files -z` loads the files in the RubyGem that have been added into git. 25 | spec.files = Dir.chdir(__dir__) do 26 | `git ls-files -z`.split("\x0").reject do |f| 27 | (f == __FILE__) || f.match(%r{\A(?:(?:bin|test|spec|features)/|\.(?:git|travis|circleci)|appveyor)}) 28 | end 29 | end 30 | spec.bindir = "exe" 31 | spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) } 32 | spec.require_paths = ["lib"] 33 | 34 | spec.add_dependency "rails", ">= 6.1" 35 | spec.add_dependency "zeitwerk", "~> 2.3" 36 | spec.add_dependency "karafka", "~> 2.4" 37 | spec.add_dependency "yabeda", ">= 0.11" 38 | spec.add_dependency "anyway_config", ">= 2.4.0" 39 | spec.add_dependency "thor" 40 | spec.add_dependency "dry-struct" 41 | 42 | spec.add_development_dependency "appraisal", ">= 2.4" 43 | spec.add_development_dependency "bundler", ">= 2.1" 44 | spec.add_development_dependency "combustion", ">= 1.3" 45 | spec.add_development_dependency "rake", ">= 13.0" 46 | spec.add_development_dependency "dry-monads", ">= 1.3" 47 | spec.add_development_dependency "factory_bot_rails" 48 | spec.add_development_dependency "pg" 49 | spec.add_development_dependency "google-protobuf" 50 | spec.add_development_dependency "sentry-rails", ">= 5.2" 51 | spec.add_development_dependency "opentelemetry-sdk" 52 | spec.add_development_dependency "opentelemetry-api", ">= 0.17" 53 | spec.add_development_dependency "opentelemetry-common", ">= 0.17" 54 | spec.add_development_dependency "opentelemetry-instrumentation-base", ">= 0.17" 55 | spec.add_development_dependency "rspec", ">= 3.0" 56 | spec.add_development_dependency "rspec_junit_formatter", ">= 0.6" 57 | spec.add_development_dependency "rspec-rails", ">= 4.0" 58 | spec.add_development_dependency "rubocop-rails", ">= 2.5" 59 | spec.add_development_dependency "rubocop-rspec", ">= 2.11" 60 | spec.add_development_dependency "sbmt-outbox", ">= 5.0" 61 | spec.add_development_dependency "simplecov", ">= 0.16" 62 | spec.add_development_dependency "standard", ">= 1.12" 63 | 64 | # let metrics and probes work in dev-mode with combustion 65 | # e.g. RAILS_ENV=development bundle exec kafka_consumer 66 | spec.add_development_dependency "yabeda-prometheus-mmap" 67 | spec.add_development_dependency "webrick" 68 | spec.add_development_dependency "rack" 69 | spec.add_development_dependency "http_health_check" 70 | end 71 | -------------------------------------------------------------------------------- /spec/factories/inbox_item.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | FactoryBot.define do 4 | factory :inbox_item, class: "TestInboxItem" do 5 | proto_payload { "test" } 6 | sequence :event_key 7 | bucket { 0 } 8 | event_name { "event-name" } 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /spec/factories/karafka/batch_metadata.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | FactoryBot.define do 4 | factory :messages_batch_metadata, class: "Karafka::Messages::BatchMetadata" do 5 | skip_create 6 | 7 | size { 0 } 8 | first_offset { 0 } 9 | sequence(:last_offset) { |nr| nr } 10 | topic { "topic" } 11 | partition { 0 } 12 | deserializers { 13 | { 14 | payload: ->(message) { message.raw_payload } 15 | } 16 | } 17 | created_at { Time.now.utc } 18 | scheduled_at { Time.now.utc } 19 | processed_at { Time.now.utc } 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /spec/factories/karafka/message.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | FactoryBot.define do 4 | factory :messages_message, class: "Karafka::Messages::Message" do 5 | skip_create 6 | 7 | transient do 8 | topic { "topic" } 9 | partition { 0 } 10 | timestamp { Time.now.utc } 11 | end 12 | 13 | raw_payload { "{}" } 14 | 15 | # rubocop:disable FactoryBot/FactoryAssociationWithStrategy 16 | metadata do 17 | build( 18 | :messages_metadata, 19 | topic: topic, 20 | partition: partition, 21 | timestamp: timestamp 22 | ) 23 | end 24 | # rubocop:enable FactoryBot/FactoryAssociationWithStrategy 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /spec/factories/karafka/metadata.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | FactoryBot.define do 4 | factory :messages_metadata, class: "Karafka::Messages::Metadata" do 5 | skip_create 6 | 7 | topic { "topic" } 8 | sequence(:offset) { |nr| nr } 9 | partition { 0 } 10 | deserializers { 11 | { 12 | payload: ->(message) { message.raw_payload } 13 | } 14 | } 15 | timestamp { Time.now.utc } 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /spec/internal/Kafkafile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | ENV["RAILS_ENV"] ||= "test" 4 | 5 | ENGINE_ROOT = Pathname.new(File.expand_path("../..", __dir__)) 6 | 7 | require "combustion" 8 | require "yabeda/prometheus/mmap" 9 | require "http_health_check" 10 | 11 | Dir.chdir(File.expand_path("../..", __dir__)) do 12 | Combustion.initialize! :active_record, database_reset: false, load_schema: false, database_migrate: false do 13 | config.logger = ActiveSupport::TaggedLogging.new(Logger.new($stdout)) 14 | config.log_level = :info 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /spec/internal/app/interactors/test_inbox_item_transport.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class TestInboxItemTransport < Sbmt::Outbox::DryInteractor 4 | option :source 5 | 6 | def call(_inbox_item, _payload) 7 | Success() 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /spec/internal/app/models/application_record.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class ApplicationRecord < ActiveRecord::Base 4 | self.abstract_class = true 5 | end 6 | -------------------------------------------------------------------------------- /spec/internal/app/models/test_inbox_item.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class TestInboxItem < Sbmt::Outbox::InboxItem 4 | def track_metrics_after_consume; end 5 | end 6 | -------------------------------------------------------------------------------- /spec/internal/config/database.yml: -------------------------------------------------------------------------------- 1 | test: 2 | adapter: postgresql 3 | url: <%= ENV.fetch("DATABASE_URL") %> 4 | database: kafka_consumer_test 5 | -------------------------------------------------------------------------------- /spec/internal/config/initializers/open_telemetry.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | return if Rails.env.test? 4 | 5 | OpenTelemetry::SDK.configure do |c| 6 | c.add_span_processor( 7 | OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new( 8 | OpenTelemetry::SDK::Trace::Export::ConsoleSpanExporter.new 9 | ) 10 | ) 11 | c.service_name = "rails-app" 12 | c.use_all 13 | end 14 | -------------------------------------------------------------------------------- /spec/internal/config/initializers/outbox.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Rails.application.config.outbox.tap do |config| 4 | config.paths << Rails.root.join("config/outbox.yml").to_s 5 | end 6 | -------------------------------------------------------------------------------- /spec/internal/config/initializers/protobuf.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | %w[local vendor].each do |dir| 4 | Rails.root.glob("protobuf/#{dir}/compile/**/*.rb").sort.each do |file| 5 | # Supress messages about downcased letters in constants 6 | Kernel.silence_warnings do 7 | require_relative(file) 8 | end 9 | end 10 | end 11 | 12 | Rails.root.glob("pkg/**/*.rb").sort.each do |file| 13 | # Supress messages about downcased letters in constants 14 | Kernel.silence_warnings do 15 | require_relative(file) 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /spec/internal/config/initializers/sentry.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | SENTRY_DUMMY_DSN = "http://12345:67890@sentry.localdomain/sentry/42" 4 | Sentry.init do |config| 5 | config.dsn = SENTRY_DUMMY_DSN 6 | config.enabled_environments = [Rails.env] 7 | config.background_worker_threads = 1 8 | config.transport.transport_class = Class.new(Sentry::HTTPTransport) do 9 | def send_data(data) 10 | # for local sentry testing purposes 11 | Rails.logger.info("sending #{data.inspect} to Sentry") 12 | sleep 1 # rubocop:disable Rails/CallingSleepInsideTests 13 | end 14 | end 15 | config.traces_sample_rate = 1.0 16 | end 17 | -------------------------------------------------------------------------------- /spec/internal/config/kafka_consumer.yml: -------------------------------------------------------------------------------- 1 | default: &default 2 | client_id: 'some-name' 3 | auth: 4 | kind: plaintext 5 | kafka: 6 | servers: "kafka:9092" 7 | kafka_options: 8 | allow.auto.create.topics: true 9 | consumer_groups: 10 | group_id_1: 11 | name: cg_with_single_topic 12 | topics: 13 | - name: topic_with_inbox_items 14 | consumer: 15 | klass: "Sbmt::KafkaConsumer::InboxConsumer" 16 | init_attrs: 17 | name: "test_items" 18 | inbox_item: "TestInboxItem" 19 | kafka_options: 20 | auto.offset.reset: latest 21 | group_id_2: 22 | name: cg_with_multiple_topics 23 | topics: 24 | - name: topic_with_json_data 25 | consumer: 26 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 27 | init_attrs: 28 | skip_on_error: true 29 | deserializer: 30 | klass: "Sbmt::KafkaConsumer::Serialization::JsonDeserializer" 31 | init_attrs: 32 | skip_decoding_error: true 33 | - name: inactive_topic_with_autocommit 34 | active: false 35 | manual_offset_management: false 36 | consumer: 37 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 38 | - name: topic_with_protobuf_data 39 | consumer: 40 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 41 | deserializer: 42 | klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer" 43 | init_attrs: 44 | message_decoder_klass: "Sso::UserRegistration" 45 | skip_decoding_error: true 46 | - name: topic-name-with.dots-dashes_and_underscores 47 | consumer: 48 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 49 | 50 | probes: 51 | port: <%= ENV.fetch('PROMETHEUS_EXPORTER_PORT') { '9394' } %> 52 | 53 | metrics: 54 | # port: 9394 55 | path: "/metrics" 56 | 57 | development: 58 | <<: *default 59 | test: 60 | <<: *default 61 | deliver: false 62 | staging: &staging 63 | <<: *default 64 | production: 65 | <<: *staging 66 | -------------------------------------------------------------------------------- /spec/internal/config/outbox.yml: -------------------------------------------------------------------------------- 1 | test: 2 | bucket_size: 4 3 | 4 | inbox_items: 5 | test_inbox_item: 6 | partition_size: 2 7 | partition_strategy: number 8 | retention: P1W 9 | retry_strategies: 10 | - exponential_backoff 11 | transports: 12 | test_inbox_item_transport: 13 | source: "kafka_consumer" 14 | -------------------------------------------------------------------------------- /spec/internal/db/schema.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | ActiveRecord::Schema.define do 4 | create_table :test_inbox_items do |t| 5 | t.string :uuid, null: false 6 | t.string :event_name, null: false 7 | t.bigint :event_key, null: false 8 | t.bigint :bucket, null: false 9 | t.json :options 10 | t.binary :proto_payload, null: false 11 | t.integer :status, null: false, default: 0 12 | t.integer :errors_count, null: false, default: 0 13 | t.text :error_log 14 | t.timestamp :processed_at 15 | t.timestamps 16 | end 17 | 18 | add_index :test_inbox_items, :uuid, unique: true 19 | add_index :test_inbox_items, [:status, :bucket] 20 | add_index :test_inbox_items, [:event_name, :event_key] 21 | add_index :test_inbox_items, :created_at 22 | end 23 | -------------------------------------------------------------------------------- /spec/internal/deps/services/sso/events/shopper_registration.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "google/protobuf/timestamp.proto"; 4 | 5 | package sso; 6 | 7 | option go_package="grpc/user_registrations"; 8 | 9 | message UserRegistration { 10 | string user_uuid = 1; 11 | string phone = 2; 12 | google.protobuf.Timestamp created_at = 4; 13 | } 14 | -------------------------------------------------------------------------------- /spec/internal/pkg/client/sso/events/shopper_registration_pb.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: sso/events/shopper_registration.proto 4 | 5 | require 'google/protobuf' 6 | 7 | require 'google/protobuf/timestamp_pb' 8 | 9 | 10 | descriptor_data = "\n%sso/events/shopper_registration.proto\x12\x03sso\x1a\x1fgoogle/protobuf/timestamp.proto\"d\n\x10UserRegistration\x12\x11\n\tuser_uuid\x18\x01 \x01(\t\x12\r\n\x05phone\x18\x02 \x01(\t\x12.\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x19Z\x17grpc/user_registrationsb\x06proto3" 11 | 12 | pool = Google::Protobuf::DescriptorPool.generated_pool 13 | 14 | begin 15 | pool.add_serialized_file(descriptor_data) 16 | rescue TypeError 17 | # Compatibility code: will be removed in the next major version. 18 | require 'google/protobuf/descriptor_pb' 19 | parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data) 20 | parsed.clear_dependency 21 | serialized = parsed.class.encode(parsed) 22 | file = pool.add_serialized_file(serialized) 23 | warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}" 24 | imports = [ 25 | ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"], 26 | ] 27 | imports.each do |type_name, expected_filename| 28 | import_file = pool.lookup(type_name).file_descriptor 29 | if import_file.name != expected_filename 30 | warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}" 31 | end 32 | end 33 | warn "Each proto file must use a consistent fully-qualified name." 34 | warn "This will become an error in the next major version." 35 | end 36 | 37 | module Sso 38 | UserRegistration = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("sso.UserRegistration").msgclass 39 | end 40 | -------------------------------------------------------------------------------- /spec/rails_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Engine root is used by rails_configuration to correctly 4 | # load fixtures and support files 5 | require "pathname" 6 | ENGINE_ROOT = Pathname.new(File.expand_path("..", __dir__)) 7 | 8 | require "spec_helper" 9 | require "logger" 10 | require "combustion" 11 | 12 | RSpec::Matchers.define_negated_matcher :not_increment_yabeda_counter, :increment_yabeda_counter 13 | RSpec::Matchers.define_negated_matcher :not_update_yabeda_gauge, :update_yabeda_gauge 14 | 15 | begin 16 | Combustion.initialize! :active_record do 17 | if ENV["LOG"].to_s.empty? 18 | config.logger = ActiveSupport::TaggedLogging.new(Logger.new(nil)) 19 | config.log_level = :fatal 20 | else 21 | config.logger = ActiveSupport::TaggedLogging.new(Logger.new($stdout)) 22 | config.log_level = :debug 23 | end 24 | 25 | config.i18n.available_locales = %i[ru en] 26 | config.i18n.default_locale = :ru 27 | end 28 | rescue => e 29 | # Fail fast if application couldn't be loaded 30 | warn "💥 Failed to load the app: #{e.message}\n#{e.backtrace.join("\n")}" 31 | exit(1) 32 | end 33 | 34 | require "rspec/rails" 35 | # Add additional requires below this line. Rails is not loaded until this point! 36 | 37 | require "factory_bot_rails" 38 | require "dry-monads" 39 | require "dry/monads/result" 40 | 41 | require "sbmt/kafka_consumer/testing" 42 | require "sbmt/kafka_consumer/instrumentation/sentry_tracer" 43 | require "sbmt/kafka_consumer/instrumentation/open_telemetry_loader" 44 | 45 | # when using with combustion, anyway is required earlier than rails 46 | # so it's railtie does nothing, but that require is cached 47 | # we must require it explicitly to force anyway autoload our configs 48 | require "anyway/rails" if defined?(Rails::Railtie) 49 | 50 | RSpec.configure do |config| 51 | config.include FactoryBot::Syntax::Methods 52 | config.include ActiveSupport::Testing::TimeHelpers 53 | config.include ActionDispatch::TestProcess::FixtureFile 54 | 55 | if Rails::VERSION::STRING >= "7.1.0" 56 | config.fixture_paths = Rails.root.join("spec/fixtures") 57 | else 58 | config.fixture_path = Rails.root.join("spec/fixtures") 59 | end 60 | 61 | config.use_transactional_fixtures = true 62 | config.infer_spec_type_from_file_location! 63 | config.filter_rails_from_backtrace! 64 | end 65 | 66 | Sbmt::KafkaConsumer::ClientConfigurer.configure! 67 | 68 | Dir["#{__dir__}/support/**/*.rb"].sort.each { |f| require f } 69 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/base_consumer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | class MyMiddleware 6 | def call(message) 7 | yield 8 | end 9 | end 10 | 11 | class MyMiddleware1 12 | def call(message) 13 | yield 14 | end 15 | end 16 | 17 | class SkipMessageMiddleware 18 | def call(message); end 19 | end 20 | 21 | class MiddlewareError 22 | def call(message) 23 | raise exception_class, "Middleware error" 24 | end 25 | end 26 | 27 | describe Sbmt::KafkaConsumer::BaseConsumer do 28 | include_context "with sbmt karafka consumer" 29 | 30 | context "with class_attribute params" do 31 | it "child class does not overwrite parent's attrs" do 32 | parent_class = described_class.consumer_klass(skip_on_error: false) 33 | expect(parent_class.skip_on_error).to be(false) 34 | expect(parent_class.name).to eq(described_class.name) 35 | 36 | child_class = parent_class.consumer_klass(skip_on_error: true) 37 | expect(child_class.skip_on_error).to be(true) 38 | expect(child_class.name).to eq(described_class.name) 39 | 40 | expect(parent_class.skip_on_error).to be(false) 41 | end 42 | end 43 | 44 | context "when the consumer processes one message at a time" do 45 | let(:consumer_class) do 46 | Class.new(described_class.consumer_klass) do 47 | attr_reader :consumed, :consume_count 48 | 49 | def initialize(error: nil, reset_error: true) 50 | @error = error 51 | @reset_error = reset_error 52 | super() 53 | end 54 | 55 | def process_message(_message) 56 | @consume_count = @consume_count.to_i + 1 57 | 58 | if @error 59 | error_to_raise = @error 60 | @error = nil if @reset_error 61 | 62 | raise error_to_raise, "test error" 63 | end 64 | 65 | @consumed = true 66 | end 67 | 68 | def consumed? 69 | !!@consumed 70 | end 71 | end 72 | end 73 | 74 | let(:payload) { "test-payload" } 75 | let(:headers) { {"Test-Header" => "test-header-value"} } 76 | let(:key) { "test-key" } 77 | let(:consume_error) { nil } 78 | 79 | before do 80 | stub_const("Sbmt::KafkaConsumer::BaseConsumer::DEFAULT_RETRY_DELAY_MULTIPLIER", 0) 81 | allow(consumer).to receive(:log_payload?).and_return(true) 82 | publish_to_sbmt_karafka(payload.to_json, headers: headers, key: key) 83 | end 84 | 85 | it "consumes" do 86 | consume_with_sbmt_karafka 87 | expect(consumer).to be_consumed 88 | end 89 | 90 | it "logs message" do 91 | expect(Rails.logger).to receive(:info).with(/Successfully consumed message/) 92 | expect(Rails.logger).to receive(:info).with(/Processing message/) 93 | expect(Rails.logger).to receive(:info).with(/Commit offset/) 94 | expect(Rails.logger).to receive(:info).with(/#{payload}/) 95 | 96 | consume_with_sbmt_karafka 97 | expect(consumer).to be_consumed 98 | end 99 | 100 | context "when get active record error" do 101 | let(:error) { ActiveRecord::StatementInvalid } 102 | let(:consumer) { build_consumer(consumer_class.new(error: error)) } 103 | 104 | it "tracks error" do 105 | allow(Rails.logger).to receive(:error) 106 | 107 | consume_with_sbmt_karafka 108 | expect(consumer).not_to be_consumed 109 | expect(consumer.consume_count).to eq 1 110 | end 111 | end 112 | 113 | context "when consumer raises exception" do 114 | let(:consumer_class) do 115 | base_klass = described_class.consumer_klass(skip_on_error: true) 116 | Class.new(base_klass) do 117 | def process_message(_message) 118 | raise "always throws an exception" 119 | end 120 | end 121 | end 122 | 123 | it "skips message if skip_on_error is set" do 124 | expect(Rails.logger).to receive(:error).twice 125 | 126 | consume_with_sbmt_karafka 127 | end 128 | end 129 | 130 | context "when cooperative_sticky is true" do 131 | before do 132 | allow(consumer).to receive(:cooperative_sticky?).and_return(true) 133 | end 134 | 135 | it "calls mark_as_consumed" do 136 | expect(consumer).to receive(:mark_as_consumed).once.and_call_original 137 | expect(consumer).not_to receive(:mark_as_consumed!) 138 | 139 | consume_with_sbmt_karafka 140 | end 141 | end 142 | 143 | context "when cooperative_sticky is false" do 144 | before do 145 | allow(consumer).to receive(:cooperative_sticky?).and_return(false) 146 | end 147 | 148 | it "calls mark_as_consumed!" do 149 | expect(consumer).to receive(:mark_as_consumed!).once.and_call_original 150 | expect(consumer).not_to receive(:mark_as_consumed) 151 | 152 | consume_with_sbmt_karafka 153 | end 154 | end 155 | 156 | context "when used middlewares" do 157 | let(:consumer_class) do 158 | base_klass = described_class.consumer_klass(middlewares: middlewares) 159 | Class.new(base_klass) do 160 | def process_message(_message) 161 | @consumed = true 162 | end 163 | 164 | def consumed? 165 | !!@consumed 166 | end 167 | end 168 | end 169 | 170 | context "when middleware condition calls message processing" do 171 | let(:middlewares) { ["SkipMessageMiddleware"] } 172 | 173 | it "calls middleware before processing message" do 174 | expect(consumer).not_to receive(:process_message) 175 | expect(consumer).to receive(:mark_message).once.and_call_original 176 | 177 | consume_with_sbmt_karafka 178 | expect(consumer).not_to be_consumed 179 | end 180 | end 181 | 182 | context "when middlewares are present" do 183 | let(:middlewares) { ["MyMiddleware"] } 184 | 185 | it "calls middleware before processing message" do 186 | consume_with_sbmt_karafka 187 | expect(consumer).to be_consumed 188 | end 189 | end 190 | 191 | context "when multiple middlewares are present" do 192 | let(:middlewares) { %w[MyMiddleware MyMiddleware1] } 193 | 194 | it "calls each middleware in order before processing message" do 195 | consume_with_sbmt_karafka 196 | expect(consumer).to be_consumed 197 | end 198 | end 199 | 200 | context "when no middlewares are present" do 201 | let(:consumer_class) do 202 | Class.new(described_class.consumer_klass) do 203 | def process_message(_message) 204 | @consumed = true 205 | end 206 | 207 | def consumed? 208 | !!@consumed 209 | end 210 | end 211 | end 212 | 213 | it "does not call any middleware" do 214 | consume_with_sbmt_karafka 215 | expect(consumer).to be_consumed 216 | end 217 | end 218 | 219 | context "when middleware raises exception" do 220 | let(:exception_class) { StandardError } 221 | let(:middlewares) { ["MiddlewareError"] } 222 | let(:consumer_class) do 223 | Class.new(described_class.consumer_klass(skip_on_error: true, middlewares: middlewares)) do 224 | def process_message(_message) 225 | @consumed = true 226 | end 227 | 228 | def consumed? 229 | !!@consumed 230 | end 231 | end 232 | end 233 | 234 | it "skips message if middleware raises exception and skip_on_error is set" do 235 | expect(Rails.logger).to receive(:warn).once.with(/skipping unprocessable message/) 236 | 237 | consume_with_sbmt_karafka 238 | expect(consumer).not_to be_consumed 239 | end 240 | end 241 | end 242 | end 243 | 244 | context "when the consumer export messages in batches" do 245 | let(:consumer_class) do 246 | Class.new(described_class.consumer_klass) do 247 | attr_reader :consumed 248 | def process_batch(messages) 249 | Rails.logger.info "Process batch #{messages.count} messages" 250 | @consumed = true 251 | end 252 | 253 | def consumed? 254 | !!@consumed 255 | end 256 | end 257 | end 258 | 259 | let(:payload) { "test-payload" } 260 | 261 | before do 262 | allow(Rails.logger).to receive(:info) 263 | publish_to_sbmt_karafka(payload.to_json) 264 | end 265 | 266 | it "consumes" do 267 | consume_with_sbmt_karafka 268 | expect(consumer).to be_consumed 269 | expect(Rails.logger).to have_received(:info).with(/Process batch/) 270 | end 271 | end 272 | end 273 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/client_configurer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::ClientConfigurer do 6 | it "properly configures karafka routes" do 7 | described_class.configure! 8 | 9 | expect(Karafka::App.routes.count).to be(2) 10 | 11 | simple_route = Karafka::App.routes.first.to_h 12 | expect(simple_route).to include( 13 | id: "some_name_cg_with_single_topic", 14 | topics: contain_exactly( 15 | hash_including( 16 | active: true, 17 | initial_offset: "earliest", 18 | kafka: hash_including( 19 | "auto.offset.reset": "latest", 20 | "allow.auto.create.topics": true, 21 | "bootstrap.servers": "kafka:9092", 22 | "heartbeat.interval.ms": 5000, 23 | "reconnect.backoff.max.ms": 3000, 24 | "security.protocol": "plaintext", 25 | "session.timeout.ms": 30000, 26 | "socket.connection.setup.timeout.ms": 5000, 27 | "socket.timeout.ms": 30000 28 | ), 29 | manual_offset_management: {active: true}, 30 | max_messages: 100, 31 | max_wait_time: 1000, 32 | name: "topic_with_inbox_items" 33 | ) 34 | ) 35 | ) 36 | 37 | complex_routes = Karafka::App.routes.last.to_h 38 | expect(complex_routes[:id]).to eq("some_name_cg_with_multiple_topics") 39 | 40 | topics = complex_routes[:topics].sort_by { |topic| topic[:name] } 41 | expect(topics[0]).to include( 42 | active: false, 43 | initial_offset: "earliest", 44 | kafka: hash_including( 45 | "allow.auto.create.topics": true, 46 | "bootstrap.servers": "kafka:9092", 47 | "heartbeat.interval.ms": 5000, 48 | "reconnect.backoff.max.ms": 3000, 49 | "security.protocol": "plaintext", 50 | "session.timeout.ms": 30000, 51 | "socket.connection.setup.timeout.ms": 5000, 52 | "socket.timeout.ms": 30000 53 | ), 54 | manual_offset_management: {active: false}, 55 | max_messages: 100, 56 | max_wait_time: 1000, 57 | name: "inactive_topic_with_autocommit", 58 | consumer_group_id: "some_name_cg_with_multiple_topics" 59 | ) 60 | expect(topics[1]).to include( 61 | active: true, 62 | initial_offset: "earliest", 63 | kafka: hash_including( 64 | "allow.auto.create.topics": true, 65 | "bootstrap.servers": "kafka:9092", 66 | "heartbeat.interval.ms": 5000, 67 | "reconnect.backoff.max.ms": 3000, 68 | "security.protocol": "plaintext", 69 | "session.timeout.ms": 30000, 70 | "socket.connection.setup.timeout.ms": 5000, 71 | "socket.timeout.ms": 30000 72 | ), 73 | manual_offset_management: {active: true}, 74 | max_messages: 100, 75 | max_wait_time: 1000, 76 | name: "topic-name-with.dots-dashes_and_underscores" 77 | ) 78 | expect(topics[2]).to include( 79 | active: true, 80 | initial_offset: "earliest", 81 | kafka: hash_including( 82 | "allow.auto.create.topics": true, 83 | "bootstrap.servers": "kafka:9092", 84 | "heartbeat.interval.ms": 5000, 85 | "reconnect.backoff.max.ms": 3000, 86 | "security.protocol": "plaintext", 87 | "session.timeout.ms": 30000, 88 | "socket.connection.setup.timeout.ms": 5000, 89 | "socket.timeout.ms": 30000 90 | ), 91 | manual_offset_management: {active: true}, 92 | max_messages: 100, 93 | max_wait_time: 1000, 94 | name: "topic_with_json_data" 95 | ) 96 | expect(topics[3]).to include( 97 | active: true, 98 | initial_offset: "earliest", 99 | kafka: hash_including( 100 | "allow.auto.create.topics": true, 101 | "bootstrap.servers": "kafka:9092", 102 | "heartbeat.interval.ms": 5000, 103 | "reconnect.backoff.max.ms": 3000, 104 | "security.protocol": "plaintext", 105 | "session.timeout.ms": 30000, 106 | "socket.connection.setup.timeout.ms": 5000, 107 | "socket.timeout.ms": 30000 108 | ), 109 | manual_offset_management: {active: true}, 110 | max_messages: 100, 111 | max_wait_time: 1000, 112 | name: "topic_with_protobuf_data" 113 | ) 114 | end 115 | end 116 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/config/auth_config_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Config::Auth, type: :config do 6 | context "when no auth configured" do 7 | let(:config) { described_class.new } 8 | 9 | it "defaults to plaintext and properly translates to kafka options" do 10 | expect(config.kind).to eq("plaintext") 11 | expect(config.to_kafka_options).to eq("security.protocol": "plaintext") 12 | end 13 | end 14 | 15 | context "when sasl plaintext auth is used" do 16 | let(:config) { 17 | described_class.new( 18 | kind: "sasl_plaintext", sasl_mechanism: "PLAIN", 19 | sasl_username: "username", sasl_password: "password" 20 | ) 21 | } 22 | 23 | it "loads valid config and properly translates to kafka options" do 24 | expect(config.kind).to eq("sasl_plaintext") 25 | expect(config.sasl_username).to eq("username") 26 | expect(config.sasl_password).to eq("password") 27 | expect(config.sasl_mechanism).to eq("PLAIN") 28 | expect(config.to_kafka_options) 29 | .to eq({ 30 | "security.protocol": "sasl_plaintext", 31 | "sasl.password": "password", 32 | "sasl.username": "username", 33 | "sasl.mechanism": "PLAIN" 34 | }) 35 | end 36 | 37 | it "raises on empty username" do 38 | expect { described_class.new(kind: "sasl_plaintext").to_kafka_options } 39 | .to raise_error(/sasl_username is required/) 40 | end 41 | 42 | it "raises on empty password" do 43 | expect { described_class.new(kind: "sasl_plaintext", sasl_username: "username").to_kafka_options } 44 | .to raise_error(/sasl_password is required/) 45 | end 46 | 47 | it "sasl_mechanism defaults to SCRAM-SHA-512" do 48 | expect(described_class.new(kind: "sasl_plaintext", 49 | sasl_username: "username", 50 | sasl_password: "password").to_kafka_options) 51 | .to eq({ 52 | "security.protocol": "sasl_plaintext", 53 | "sasl.password": "password", 54 | "sasl.username": "username", 55 | "sasl.mechanism": "SCRAM-SHA-512" 56 | }) 57 | end 58 | end 59 | end 60 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/config/kafka_config_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Config::Kafka, type: :config do 6 | let(:kafka_config_defaults) do 7 | { 8 | "heartbeat.interval.ms": 5000, 9 | "reconnect.backoff.max.ms": 3000, 10 | "session.timeout.ms": 30000, 11 | "socket.connection.setup.timeout.ms": 5000, 12 | "socket.timeout.ms": 30000 13 | } 14 | end 15 | 16 | context "with servers validation" do 17 | it "raises error if servers are not set" do 18 | expect { described_class.new } 19 | .to raise_error(/:servers is missing/) 20 | end 21 | 22 | it "raises error if servers have unexpected format" do 23 | expect { described_class.new(servers: "kafka://server:9092") } 24 | .to raise_error(/violates constraints/) 25 | end 26 | end 27 | 28 | context "when servers are properly set" do 29 | let(:servers) { "server1:9092,server2:9092" } 30 | let(:config) { described_class.new(servers: servers) } 31 | 32 | it "successfully loads config and translates to kafka options" do 33 | expect(config.servers).to eq(servers) 34 | expect(config.to_kafka_options) 35 | .to eq(kafka_config_defaults.merge("bootstrap.servers": servers)) 36 | end 37 | end 38 | 39 | context "when servers are also set in kafka options" do 40 | let(:root_servers) { "server1:9092,server2:9092" } 41 | let(:kafka_servers) { "server3:9092,server4:9092" } 42 | let(:config) { described_class.new(servers: root_servers, kafka_options: {"bootstrap.servers": kafka_servers}) } 43 | 44 | it "root servers option takes precedence over kafka config" do 45 | expect(config.servers).to eq(root_servers) 46 | expect(config.to_kafka_options) 47 | .to eq(kafka_config_defaults.merge("bootstrap.servers": root_servers)) 48 | end 49 | end 50 | end 51 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/config/metrics_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Config::Metrics do 6 | context "when no probes configured" do 7 | let(:config) { described_class.new } 8 | 9 | it "has default values" do 10 | expect(config.port).to be_nil 11 | expect(config.path).to eq("/metrics") 12 | end 13 | end 14 | 15 | context "when something is configured" do 16 | let(:config) { 17 | described_class.new( 18 | port: 8080, 19 | path: "/custom_metrics" 20 | ) 21 | } 22 | 23 | it "loads valid config" do 24 | expect(config.port).to eq(8080) 25 | expect(config.path).to eq("/custom_metrics") 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/config/probes_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Config::Probes do 6 | context "when no probes configured" do 7 | let(:config) { described_class.new } 8 | 9 | it "defaults to 9394 port and both endpoints enabled" do 10 | expect(config.port).to eq(9394) 11 | expect(config.endpoints).to eq( 12 | Sbmt::KafkaConsumer::Config::Probes::Endpoints.new( 13 | liveness: { 14 | enabled: true, 15 | path: "/liveness", 16 | timeout: 300 17 | }, 18 | readiness: { 19 | enabled: true, 20 | path: "/readiness/kafka_consumer" 21 | } 22 | ) 23 | ) 24 | end 25 | end 26 | 27 | context "when something is configured" do 28 | let(:config) { 29 | described_class.new( 30 | port: 8080, 31 | endpoints: { 32 | liveness: { 33 | timeout: 15 34 | } 35 | } 36 | ) 37 | } 38 | 39 | it "loads valid config" do 40 | expect(config.port).to eq(8080) 41 | expect(config.endpoints).to eq( 42 | Sbmt::KafkaConsumer::Config::Probes::Endpoints.new( 43 | liveness: { 44 | enabled: true, 45 | path: "/liveness", 46 | timeout: 15 47 | }, 48 | readiness: { 49 | enabled: true, 50 | path: "/readiness/kafka_consumer" 51 | } 52 | ) 53 | ) 54 | end 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/config_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Config, type: :config do 6 | context "when app initialized" do 7 | let(:default_env) { 8 | { 9 | "KAFKA_CONSUMER_AUTH__KIND" => "sasl_plaintext", 10 | "KAFKA_CONSUMER_AUTH__SASL_USERNAME" => "username", 11 | "KAFKA_CONSUMER_AUTH__SASL_PASSWORD" => "password", 12 | "KAFKA_CONSUMER_AUTH__SASL_MECHANISM" => "PLAIN", 13 | 14 | "KAFKA_CONSUMER_KAFKA__SERVERS" => "server1:9092,server2:9092", 15 | 16 | "KAFKA_CONSUMER_CLIENT_ID" => "client-id", 17 | "KAFKA_CONSUMER_PARTITION_ASSIGNMENT_STRATEGY" => "cooperative-sticky" 18 | } 19 | } 20 | let(:config) { described_class.new } 21 | let(:kafka_config_defaults) do 22 | { 23 | "heartbeat.interval.ms": 5000, 24 | "reconnect.backoff.max.ms": 3000, 25 | "session.timeout.ms": 30000, 26 | "socket.connection.setup.timeout.ms": 5000, 27 | "socket.timeout.ms": 30000 28 | } 29 | end 30 | 31 | it "properly merges kafka options" do 32 | with_env(default_env) do 33 | expect(config.to_kafka_options) 34 | .to eq(kafka_config_defaults.merge( 35 | "bootstrap.servers": "server1:9092,server2:9092", 36 | "security.protocol": "sasl_plaintext", 37 | "sasl.mechanism": "PLAIN", 38 | "sasl.password": "password", 39 | "sasl.username": "username", 40 | "partition.assignment.strategy": "cooperative-sticky", 41 | # loaded from kafka_consumer.yml 42 | "allow.auto.create.topics": true 43 | )) 44 | end 45 | end 46 | 47 | it "has correct defaults" do 48 | with_env(default_env) do 49 | expect(config.deserializer_class).to eq("::Sbmt::KafkaConsumer::Serialization::NullDeserializer") 50 | expect(config.monitor_class).to eq("::Sbmt::KafkaConsumer::Instrumentation::TracingMonitor") 51 | expect(config.logger_class).to eq("::Sbmt::KafkaConsumer::Logger") 52 | expect(config.logger_listener_class).to eq("::Sbmt::KafkaConsumer::Instrumentation::LoggerListener") 53 | expect(config.metrics_listener_class).to eq("::Sbmt::KafkaConsumer::Instrumentation::YabedaMetricsListener") 54 | expect(config.consumer_mapper_class).to eq("::Sbmt::KafkaConsumer::Routing::KarafkaV1ConsumerMapper") 55 | end 56 | end 57 | 58 | it "properly loads/maps consumer groups to config klasses" do 59 | with_env(default_env) do 60 | expect(config.consumer_groups) 61 | .to eq([ 62 | Sbmt::KafkaConsumer::Config::ConsumerGroup.new( 63 | id: "group_id_1", 64 | name: "cg_with_single_topic", 65 | topics: [ 66 | Sbmt::KafkaConsumer::Config::Topic.new( 67 | name: "topic_with_inbox_items", 68 | active: true, 69 | manual_offset_management: true, 70 | consumer: Sbmt::KafkaConsumer::Config::Consumer.new( 71 | klass: "Sbmt::KafkaConsumer::InboxConsumer", 72 | init_attrs: { 73 | name: "test_items", 74 | inbox_item: "TestInboxItem" 75 | } 76 | ), 77 | deserializer: Sbmt::KafkaConsumer::Config::Deserializer.new( 78 | klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer" 79 | ), 80 | kafka_options: { 81 | "auto.offset.reset": "latest" 82 | } 83 | ) 84 | ] 85 | ), 86 | Sbmt::KafkaConsumer::Config::ConsumerGroup.new( 87 | id: "group_id_2", 88 | name: "cg_with_multiple_topics", 89 | topics: [ 90 | Sbmt::KafkaConsumer::Config::Topic.new( 91 | name: "topic_with_json_data", 92 | active: true, 93 | manual_offset_management: true, 94 | consumer: Sbmt::KafkaConsumer::Config::Consumer.new( 95 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer", 96 | init_attrs: { 97 | skip_on_error: true 98 | } 99 | ), 100 | deserializer: Sbmt::KafkaConsumer::Config::Deserializer.new( 101 | klass: "Sbmt::KafkaConsumer::Serialization::JsonDeserializer", 102 | init_attrs: { 103 | skip_decoding_error: true 104 | } 105 | ) 106 | ), 107 | Sbmt::KafkaConsumer::Config::Topic.new( 108 | name: "inactive_topic_with_autocommit", 109 | active: false, 110 | manual_offset_management: false, 111 | consumer: Sbmt::KafkaConsumer::Config::Consumer.new( 112 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 113 | ) 114 | ), 115 | Sbmt::KafkaConsumer::Config::Topic.new( 116 | name: "topic_with_protobuf_data", 117 | active: true, 118 | manual_offset_management: true, 119 | consumer: Sbmt::KafkaConsumer::Config::Consumer.new( 120 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 121 | ), 122 | deserializer: Sbmt::KafkaConsumer::Config::Deserializer.new( 123 | klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer", 124 | init_attrs: { 125 | message_decoder_klass: "Sso::UserRegistration", 126 | skip_decoding_error: true 127 | } 128 | ) 129 | ), 130 | Sbmt::KafkaConsumer::Config::Topic.new( 131 | name: "topic-name-with.dots-dashes_and_underscores", 132 | active: true, 133 | manual_offset_management: true, 134 | consumer: Sbmt::KafkaConsumer::Config::Consumer.new( 135 | klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer" 136 | ), 137 | deserializer: Sbmt::KafkaConsumer::Config::Deserializer.new( 138 | klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer", 139 | init_attrs: {} 140 | ) 141 | ) 142 | ] 143 | ) 144 | ]) 145 | end 146 | end 147 | 148 | context "without metrics port" do 149 | let(:env) { 150 | default_env.merge( 151 | "KAFKA_CONSUMER_PROBES__PORT" => "8080" 152 | ) 153 | } 154 | 155 | it "sets metrics port equal to probes port" do 156 | with_env(env) do 157 | expect(config.probes.port).to eq 8080 158 | expect(config.metrics.port).to eq config.probes.port 159 | end 160 | end 161 | end 162 | 163 | context "with metrics port" do 164 | let(:env) { 165 | default_env.merge( 166 | "KAFKA_CONSUMER_PROBES__PORT" => "8080", 167 | "KAFKA_CONSUMER_METRICS__PORT" => "9090" 168 | ) 169 | } 170 | 171 | it "sets different ports for probes and metrics" do 172 | with_env(env) do 173 | expect(config.probes.port).to eq 8080 174 | expect(config.metrics.port).to eq 9090 175 | end 176 | end 177 | end 178 | 179 | context "with partition assignment for topic" do 180 | let(:config) { 181 | described_class.new(consumer_groups: { 182 | group_id_1: { 183 | name: "cg_with_single_topic", 184 | topics: [ 185 | { 186 | name: "topic_with_inbox_items", 187 | consumer: { 188 | klass: "Sbmt::KafkaConsumer::InboxConsumer", 189 | init_attrs: 190 | {name: "test_items"}, 191 | inbox_item: "TestInboxItem" 192 | }, 193 | kafka_options: { 194 | "partition.assignment.strategy": "cooperative-sticky" 195 | } 196 | } 197 | ] 198 | } 199 | }) 200 | } 201 | 202 | it "raises error" do 203 | with_env(default_env) do 204 | expect { config }.to raise_error(/Using the partition.assignment.strategy option for individual topics is not supported/) 205 | end 206 | end 207 | end 208 | end 209 | end 210 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/inbox_consumer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::InboxConsumer do 6 | include_context "with sbmt karafka consumer" 7 | 8 | let(:consumer_class) do 9 | described_class.consumer_klass( 10 | name: "test_items", 11 | event_name: "test-event-name", 12 | inbox_item: "TestInboxItem", 13 | skip_on_error: skip_on_error 14 | ) 15 | end 16 | 17 | let(:skip_on_error) { false } 18 | let(:create_item_result) { Dry::Monads::Result::Success } 19 | let(:logger) { double(ActiveSupport::TaggedLogging) } 20 | let(:uuid) { "test-uuid-1" } 21 | let(:message_key) { "test-key" } 22 | let(:message_offset) { 0 } 23 | let(:headers) do 24 | { 25 | "Idempotency-Key" => uuid, 26 | "Dispatched-At" => 5.seconds.ago, 27 | "Sequence-ID" => 3 28 | } 29 | end 30 | 31 | before do 32 | publish_to_sbmt_karafka( 33 | '{"test":"message"}', 34 | offset: message_offset, 35 | key: message_key, 36 | partition: 10, 37 | headers: headers 38 | ) 39 | end 40 | 41 | context "when message valid" do 42 | it "creates inbox item" do 43 | expect(kafka_client).to receive(:mark_as_consumed!) 44 | expect(Rails.logger).to receive(:info).with(/Successfully consumed/).twice 45 | expect(Rails.logger).to receive(:info).with(/Processing message/) 46 | expect(Rails.logger).to receive(:info).with(/Commit offset/) 47 | expect { consume_with_sbmt_karafka } 48 | .to change(TestInboxItem, :count).by(1) 49 | .and increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 50 | .with_tags( 51 | inbox_name: "test_inbox_item", 52 | event_name: "test-event-name", 53 | status: "success" 54 | ) 55 | expect(TestInboxItem.last.options) 56 | .to include( 57 | { 58 | group_id: "test_group", 59 | partition: 10, 60 | source: "KAFKA", 61 | topic: "test_topic" 62 | } 63 | ) 64 | end 65 | 66 | context "with additional metrics after consume" do 67 | let(:inbox_item) { build(:inbox_item) } 68 | 69 | before do 70 | allow(Sbmt::Outbox::CreateInboxItem).to receive(:call).and_return(Dry::Monads::Result::Success.new(inbox_item)) 71 | end 72 | 73 | it "call method" do 74 | expect(inbox_item).to receive(:track_metrics_after_consume) 75 | consume_with_sbmt_karafka 76 | end 77 | end 78 | end 79 | 80 | context "when got failure from inbox item creator" do 81 | before do 82 | allow(Sbmt::Outbox::CreateInboxItem).to receive(:call) 83 | .and_return(Dry::Monads::Result::Failure.new("test failure")) 84 | end 85 | 86 | context "when skip_on_error is enabled" do 87 | let(:skip_on_error) { true } 88 | 89 | it "skips failed message and continues processing" do 90 | expect(kafka_client).not_to receive(:mark_as_consumed!) 91 | allow(Rails.logger).to receive(:warn) 92 | expect { 93 | consume_with_sbmt_karafka 94 | }.to increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 95 | .with_tags( 96 | inbox_name: "test_inbox_item", 97 | event_name: "test-event-name", 98 | status: "skipped" 99 | ) 100 | end 101 | 102 | context "when got active record error" do 103 | before do 104 | allow(Sbmt::Outbox::CreateInboxItem) 105 | .to receive(:call).and_raise(ActiveRecord::StatementInvalid) 106 | end 107 | 108 | it "skips failed message and continues processing" do 109 | expect(kafka_client).not_to receive(:mark_as_consumed!) 110 | allow(Rails.logger).to receive(:warn) 111 | expect { 112 | consume_with_sbmt_karafka 113 | }.to increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 114 | .with_tags( 115 | inbox_name: "test_inbox_item", 116 | event_name: "test-event-name", 117 | status: "skipped" 118 | ) 119 | end 120 | end 121 | end 122 | 123 | context "when skip_on_error is disabled" do 124 | let(:skip_on_error) { false } 125 | 126 | it "consumer crashes without committing offsets" do 127 | expect(kafka_client).not_to receive(:mark_as_consumed!) 128 | allow(Rails.logger).to receive(:error) 129 | expect { 130 | consume_with_sbmt_karafka 131 | }.to increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 132 | .with_tags( 133 | inbox_name: "test_inbox_item", 134 | event_name: "test-event-name", 135 | status: "failure" 136 | ) 137 | end 138 | end 139 | end 140 | 141 | context "when got exception from inbox item creator" do 142 | before do 143 | allow(Sbmt::Outbox::CreateInboxItem).to receive(:call) 144 | .and_raise("test exception") 145 | end 146 | 147 | it "let consumer crash without committing offsets" do 148 | expect(kafka_client).not_to receive(:mark_as_consumed!) 149 | allow(Rails.logger).to receive(:error) 150 | expect { 151 | consume_with_sbmt_karafka 152 | }.to increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 153 | .with_tags( 154 | inbox_name: "test_inbox_item", 155 | event_name: "test-event-name", 156 | status: "failure" 157 | ) 158 | end 159 | end 160 | 161 | context "with poisoned message" do 162 | before do 163 | allow(Rails.logger).to receive(:info).with(/Successfully consumed/) 164 | allow(Rails.logger).to receive(:info).with(/Processing message/) 165 | allow(Rails.logger).to receive(:info).with(/Commit offset/) 166 | allow(Rails.logger).to receive(:error) 167 | end 168 | 169 | shared_examples "successful consumer" do 170 | it "successfully consumes" do 171 | expect(kafka_client).to receive(:mark_as_consumed!) 172 | expect { consume_with_sbmt_karafka } 173 | .to change(TestInboxItem, :count).by(1) 174 | .and increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 175 | .with_tags( 176 | inbox_name: "test_inbox_item", 177 | event_name: "test-event-name", 178 | status: "success" 179 | ) 180 | end 181 | end 182 | 183 | shared_examples "empty key consumer" do 184 | it_behaves_like "successful consumer" 185 | it "uses a message offset value and logs error" do 186 | consume_with_sbmt_karafka 187 | expect(TestInboxItem.last.event_key).to eq(message_offset) 188 | expect(Rails.logger).not_to have_received(:error) 189 | end 190 | end 191 | 192 | context "when message idempotency header does not exist" do 193 | before do 194 | stub_const( 195 | "Sbmt::KafkaConsumer::InboxConsumer::IDEMPOTENCY_HEADER_NAME", 196 | "non-existent-header" 197 | ) 198 | end 199 | 200 | it_behaves_like "successful consumer" 201 | 202 | it "successfully uses generated value" do 203 | consume_with_sbmt_karafka 204 | expect(TestInboxItem.last.uuid.size).to eq(36) 205 | expect(Rails.logger).not_to have_received(:error) 206 | end 207 | end 208 | 209 | context "when message key header is empty" do 210 | let(:message_key) { "" } 211 | 212 | it_behaves_like "empty key consumer" 213 | end 214 | 215 | context "when message key header is nil" do 216 | let(:message_key) { nil } 217 | 218 | it_behaves_like "empty key consumer" 219 | end 220 | end 221 | 222 | context "when there is the same inbox item exists" do 223 | before do 224 | create(:inbox_item, uuid: uuid) 225 | end 226 | 227 | it "skips creating a new one" do 228 | expect(kafka_client).to receive(:mark_as_consumed!) 229 | allow(Rails.logger).to receive(:error) 230 | expect { consume_with_sbmt_karafka } 231 | .to increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 232 | .with_tags( 233 | inbox_name: "test_inbox_item", 234 | event_name: "test-event-name", 235 | status: "duplicate" 236 | ) 237 | .and not_change(TestInboxItem, :count) 238 | end 239 | end 240 | 241 | context "when extra_message_attrs is used" do 242 | let(:consumer_class) do 243 | klass = super() 244 | Class.new(klass) do 245 | def extra_message_attrs(_message) 246 | {event_name: "custom-value"} 247 | end 248 | end 249 | end 250 | 251 | it "merges with default inbox-item attributes" do 252 | expect(kafka_client).to receive(:mark_as_consumed!) 253 | expect(Rails.logger).to receive(:info).with(/Successfully consumed/).twice 254 | expect(Rails.logger).to receive(:info).with(/Processing message/) 255 | expect(Rails.logger).to receive(:info).with(/Commit offset/) 256 | expect { consume_with_sbmt_karafka }.to change(TestInboxItem, :count).by(1) 257 | expect(TestInboxItem.last.event_name).to eq("custom-value") 258 | end 259 | end 260 | end 261 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/listener_helper_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::ListenerHelper do 6 | let(:subject_klass) { Class.new { include Sbmt::KafkaConsumer::Instrumentation::ListenerHelper } } 7 | let(:event) { double("event") } 8 | let(:payload) { double("payload") } 9 | 10 | describe ".consumer_tags" do 11 | let(:message) { double("message") } 12 | let(:metadata) { OpenStruct.new(topic: "topic", partition: 0) } 13 | 14 | it "returns consumer tags" do 15 | expect(event).to receive(:[]).with(:message).and_return(message) 16 | expect(message).to receive(:metadata).and_return(metadata).twice 17 | 18 | expect(subject_klass.new.send(:consumer_tags, event)).to eq({topic: "topic", partition: 0}) 19 | end 20 | end 21 | 22 | describe ".inbox_tags" do 23 | let(:inbox_name) { "inbox" } 24 | let(:event_name) { "event" } 25 | let(:status) { "status" } 26 | 27 | it "returns tags" do 28 | expect(event).to receive(:[]).with(:inbox_name).and_return(inbox_name) 29 | expect(event).to receive(:[]).with(:event_name).and_return(event_name) 30 | expect(event).to receive(:[]).with(:status).and_return(status) 31 | 32 | expect(subject_klass.new.send(:inbox_tags, event)) 33 | .to eq( 34 | { 35 | inbox_name: inbox_name, 36 | event_name: event_name, 37 | status: status 38 | } 39 | ) 40 | end 41 | end 42 | 43 | describe ".error_message" do 44 | it "builds correct message when exception provided" do 45 | expect(subject_klass.new.send(:error_message, StandardError.new("test"))) 46 | .to eq("test") 47 | end 48 | 49 | it "builds correct message when dry-result provided" do 50 | expect(subject_klass.new.send(:error_message, Dry::Monads::Result::Failure.new("test"))) 51 | .to eq("test") 52 | end 53 | 54 | it "builds correct message when regular string provided" do 55 | expect(subject_klass.new.send(:error_message, "test")) 56 | .to eq("test") 57 | end 58 | end 59 | 60 | describe ".log_backtrace" do 61 | let(:error) { double("argument") } 62 | 63 | it "logs backtrace when exception provided" do 64 | expect(error).to receive(:backtrace).and_return(["backtrace1", "backtrace2"]) 65 | expect(Rails.logger).to receive(:error).with("backtrace1\nbacktrace2") 66 | 67 | subject_klass.new.send(:log_backtrace, error) 68 | end 69 | 70 | it "logs backtrace when dry-result provided" do 71 | expect(error).to receive(:trace).and_return("trace") 72 | expect(Rails.logger).to receive(:error).with("trace") 73 | 74 | subject_klass.new.send(:log_backtrace, error) 75 | end 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/liveness_listener_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::LivenessListener do 6 | subject(:probe) { service.call({}) } 7 | 8 | let(:service) { described_class.new } 9 | let(:consumer_group) { spy(:consumer_group, name: "CONSUMER_GROUP") } 10 | 11 | before do 12 | allow(Karafka::App).to receive(:routes).and_return([consumer_group]) 13 | 14 | travel_to Time.now.utc # rubocop:disable Rails/TravelToWithoutBlock 15 | end 16 | 17 | context "without polls" do 18 | it "returns ok" do 19 | expect(probe).to eq [ 20 | 200, 21 | {"Content-Type" => "application/json"}, 22 | [ 23 | { 24 | timed_out_polls: false, 25 | errors_count: 0 26 | }.to_json 27 | ] 28 | ] 29 | end 30 | end 31 | 32 | context "with polls" do 33 | let(:subscription_group) { spy(:subscription_group, consumer_group: consumer_group) } 34 | let(:event) { spy(:event, payload: {subscription_group: subscription_group}) } 35 | 36 | before do 37 | service.on_connection_listener_fetch_loop(event) 38 | end 39 | 40 | it "returns ok" do 41 | expect(probe).to eq [ 42 | 200, 43 | {"Content-Type" => "application/json"}, 44 | [ 45 | { 46 | timed_out_polls: false, 47 | errors_count: 0 48 | }.to_json 49 | ] 50 | ] 51 | end 52 | 53 | context "with timed out polls" do 54 | before do 55 | service.on_connection_listener_fetch_loop(event) 56 | allow(service).to receive(:monotonic_now).and_wrap_original do |meth| 57 | meth.call + 315 * 1000 58 | end 59 | end 60 | 61 | it "returns error" do 62 | res = probe 63 | expect(res[0]).to eq 500 64 | expect(res[1]).to eq({"Content-Type" => "application/json"}) 65 | expect(JSON.parse(res[2][0]).symbolize_keys).to match( 66 | a_hash_including(error_type: Sbmt::KafkaConsumer::Instrumentation::LivenessListener::ERROR_TYPE, 67 | timed_out_polls: true, 68 | errors_count: 0) 69 | ) 70 | end 71 | end 72 | end 73 | 74 | context "with librdkafka errors" do 75 | let(:error_event) { {type: "librdkafka.error", error: StandardError.new("Test error")} } 76 | 77 | before do 78 | allow(error_event[:error]).to receive(:backtrace).and_return(["line 1", "line 2"]) 79 | end 80 | 81 | it "increments error count and stores backtrace" do 82 | expect { service.on_error_occurred(error_event) }.to change { service.instance_variable_get(:@error_count) }.by(1) 83 | expect(service.instance_variable_get(:@error_backtrace)).to eq("line 1\nline 2") 84 | end 85 | 86 | context "when error count exceeds max_error_count" do 87 | before do 88 | 10.times { service.on_error_occurred(error_event) } 89 | end 90 | 91 | it "returns error with error count and backtrace" do 92 | expect(probe).to eq [ 93 | 500, 94 | {"Content-Type" => "application/json"}, 95 | [ 96 | { 97 | error_type: Sbmt::KafkaConsumer::Instrumentation::LivenessListener::ERROR_TYPE, 98 | timed_out_polls: false, 99 | error_count: 10, 100 | error_backtrace: "line 1\nline 2" 101 | }.to_json 102 | ] 103 | ] 104 | end 105 | end 106 | end 107 | end 108 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/logger_listener_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::LoggerListener do 6 | let(:metadata) { OpenStruct.new(topic: "topic", partition: 0, key: "key", offset: 42) } 7 | let(:message) { double("message", metadata: metadata) } 8 | let(:caller) { double(topic: double(consumer_group: double(id: "group_id"))) } 9 | let(:event) { double("event", payload: {message: message, time: time, caller: caller}) } 10 | 11 | let(:inbox_name) { "inbox" } 12 | let(:event_name) { "event" } 13 | let(:status) { "status" } 14 | let(:message_uuid) { "uuid" } 15 | let(:time) { 10.20 } 16 | let(:logger) { double("Logger") } 17 | let(:error) { StandardError.new("some error") } 18 | let(:error_message) { "some error message" } 19 | 20 | before do 21 | allow(Sbmt::KafkaConsumer).to receive(:logger).and_return(logger) 22 | 23 | allow_any_instance_of(described_class).to receive(:log_backtrace).and_return("some backtrace") 24 | allow_any_instance_of(described_class).to receive(:error_message).and_return(error_message) 25 | end 26 | 27 | describe ".on_error_occurred" do 28 | it "logs error when consumer.base.consume_one event occurred" do 29 | allow(event).to receive(:[]).with(:error).and_return(error) 30 | allow(event).to receive(:[]).with(:type).and_return("consumer.base.consume_one") 31 | allow(event).to receive(:[]).with(:log_level).and_return(:error) 32 | 33 | expect(logger).to receive(:tagged).with(hash_including( 34 | type: "consumer.base.consume_one", 35 | stacktrace: "some backtrace" 36 | )).and_yield 37 | 38 | expect(logger).to receive(:error).with(error_message) 39 | 40 | described_class.new.on_error_occurred(event) 41 | end 42 | 43 | it "logs error when consumer.inbox.consume_one event occurred" do 44 | allow(event).to receive(:[]).with(:error).and_return(error) 45 | allow(event).to receive(:[]).with(:type).and_return("consumer.inbox.consume_one") 46 | allow(event).to receive(:[]).with(:status).and_return(status) 47 | allow(event).to receive(:[]).with(:log_level).and_return(:error) 48 | 49 | expect(logger).to receive(:tagged).with(hash_including( 50 | type: "consumer.inbox.consume_one", 51 | status: "status", 52 | stacktrace: "some backtrace" 53 | )).and_yield 54 | 55 | expect(logger).to receive(:error).with(error_message) 56 | 57 | described_class.new.on_error_occurred(event) 58 | end 59 | 60 | it "logs warnings" do 61 | allow(event).to receive(:[]).with(:error).and_return("test error") 62 | allow(event).to receive(:[]).with(:type).and_return("consumer.base.consume_one") 63 | allow(event).to receive(:[]).with(:log_level).and_return(:warn) 64 | 65 | expect(logger).to receive(:tagged).with(hash_including( 66 | type: "consumer.base.consume_one", 67 | stacktrace: "some backtrace" 68 | )).and_yield 69 | 70 | expect(logger).to receive(:warn).with(error_message) 71 | 72 | described_class.new.on_error_occurred(event) 73 | end 74 | end 75 | 76 | describe ".on_consumer_consumed_one" do 77 | it "logs info message" do 78 | expect(logger).to receive(:tagged).with(hash_including( 79 | kafka: hash_including( 80 | topic: "topic", 81 | partition: 0, 82 | key: "key", 83 | offset: 42, 84 | consumer_group: "group_id", 85 | consume_duration_ms: time 86 | ) 87 | )).and_yield 88 | 89 | expect(logger).to receive(:info).with("Successfully consumed message") 90 | 91 | described_class.new.on_consumer_consumed_one(event) 92 | end 93 | end 94 | 95 | describe ".on_consumer_inbox_consumed_one" do 96 | it "logs info message" do 97 | expect(event).to receive(:[]).with(:status).and_return(status) 98 | expect(event).to receive(:[]).with(:message_uuid).and_return(message_uuid) 99 | 100 | expect(logger).to receive(:tagged).with(hash_including( 101 | kafka: hash_including( 102 | topic: "topic", 103 | partition: 0, 104 | key: "key", 105 | offset: 42, 106 | consumer_group: "group_id", 107 | consume_duration_ms: time 108 | ) 109 | )).and_yield 110 | 111 | expect(logger).to receive(:info).with("Successfully consumed message with uuid: uuid") 112 | 113 | described_class.new.on_consumer_inbox_consumed_one(event) 114 | end 115 | end 116 | end 117 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::OpenTelemetryTracer do 6 | let(:topic_name) { "topic" } 7 | let(:message) { OpenStruct.new(topic: topic_name, offset: 0, partition: 1, metadata: {topic: topic_name}, payload: "message payload") } 8 | let(:batch_messages) { 9 | [ 10 | OpenStruct.new(topic: "topic", offset: 0, partition: 1, metadata: {topic: "topic"}, payload: "message payload"), 11 | OpenStruct.new(topic: "another_topic", offset: 1, partition: 2, metadata: {topic: "another_topic"}, payload: "another message payload") 12 | ] 13 | } 14 | let(:consumer_group_name) { "consumer-group-name" } 15 | let(:consumer_group) { OpenStruct.new(id: consumer_group_name) } 16 | let(:consumer_topic) { OpenStruct.new(consumer_group: consumer_group) } 17 | let(:consumer) { OpenStruct.new(topic: consumer_topic, inbox_name: "inbox/name", event_name: nil) } 18 | let(:event_payload) { OpenStruct.new(caller: consumer, message: message, event_name: nil, status: "failure") } 19 | let(:event_inbox_payload) { OpenStruct.new(caller: consumer, message: message, inbox_name: "inbox/name", event_name: nil, status: "failure") } 20 | let(:event_payload_with_batch) { OpenStruct.new(caller: consumer, messages: batch_messages, inbox_name: "inbox/name", event_name: nil, status: "failure") } 21 | 22 | shared_examples "traces message" do |event_name, span_name| 23 | it "traces #{event_name} message" do 24 | expect(tracer).to receive(:in_span).with(span_name, links: nil, kind: :consumer, attributes: { 25 | "messaging.destination" => topic_name, 26 | "messaging.destination_kind" => "topic", 27 | "messaging.kafka.consumer_group" => consumer_group_name, 28 | "messaging.kafka.offset" => 0, 29 | "messaging.kafka.partition" => 1, 30 | "messaging.system" => "kafka" 31 | }) 32 | described_class.new(event_name, event_payload).trace {} 33 | end 34 | end 35 | 36 | shared_examples "traces message with inbox" do |event_name, span_name| 37 | it "traces #{event_name} message" do 38 | expect(tracer).to receive(:in_span).with(span_name, kind: :consumer, attributes: { 39 | "inbox.inbox_name" => "inbox/name", 40 | "inbox.status" => "failure" 41 | }) 42 | described_class.new(event_name, event_inbox_payload).trace {} 43 | end 44 | end 45 | 46 | describe "when disabled" do 47 | before { described_class.enabled = false } 48 | 49 | it "does not trace consumed message" do 50 | expect(::Sbmt::KafkaConsumer::Instrumentation::OpenTelemetryLoader).not_to receive(:instance) 51 | 52 | described_class.new("consumer.consumed_one", event_payload).trace {} 53 | end 54 | end 55 | 56 | describe ".trace" do 57 | let(:tracer) { double("tracer") } 58 | let(:instrumentation_instance) { double("instrumentation instance") } 59 | 60 | before do 61 | described_class.enabled = true 62 | 63 | allow(::Sbmt::KafkaConsumer::Instrumentation::OpenTelemetryLoader).to receive(:instance).and_return(instrumentation_instance) 64 | allow(instrumentation_instance).to receive(:tracer).and_return(tracer) 65 | end 66 | 67 | it_behaves_like "traces message", "consumer.consumed_one", "consume topic" 68 | it_behaves_like "traces message", "consumer.process_message", "consume topic" 69 | it_behaves_like "traces message", "consumer.mark_as_consumed", "consume topic" 70 | 71 | it_behaves_like "traces message with inbox", "consumer.inbox.consumed_one", "inbox inbox/name process" 72 | it_behaves_like "traces message with inbox", "consumer.process_message", "inbox inbox/name process" 73 | it_behaves_like "traces message with inbox", "consumer.mark_as_consumed", "inbox inbox/name process" 74 | 75 | it "traces messages" do 76 | expect(tracer).to receive(:in_span).with("consume batch", links: [], kind: :consumer, attributes: { 77 | "messaging.destination" => topic_name, 78 | "messaging.destination_kind" => "topic", 79 | "messaging.kafka.consumer_group" => consumer_group_name, 80 | "messaging.system" => "kafka", 81 | "messaging.batch_size" => 2, 82 | "messaging.first_offset" => 0, 83 | "messaging.last_offset" => 1 84 | }) 85 | described_class.new("consumer.consumed_batch", event_payload_with_batch).trace {} 86 | end 87 | end 88 | end 89 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/readiness_listener_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::ReadinessListener do 6 | subject(:probe) { service.call({}) } 7 | 8 | let(:service) { described_class.new } 9 | let(:headers) { {"Content-Type" => "application/json"} } 10 | let(:error_response) { [500, headers, [{ready: false}.to_json]] } 11 | let(:ok_response) { [200, headers, [{ready: true}.to_json]] } 12 | 13 | shared_examples "error responder" do 14 | it "returns error" do 15 | expect(probe).to eq error_response 16 | end 17 | end 18 | 19 | context "without any events" do 20 | it_behaves_like "error responder" 21 | end 22 | 23 | context "with app.stopping event" do 24 | before do 25 | service.on_app_running({}) 26 | service.on_app_stopping({}) 27 | end 28 | 29 | it_behaves_like "error responder" 30 | end 31 | 32 | context "with app.running event" do 33 | before { service.on_app_running({}) } 34 | 35 | it "returns ok" do 36 | expect(probe).to eq ok_response 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/sentry_tracer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::SentryTracer do 6 | let(:sentry_transaction) { instance_double(Sentry::Transaction) } 7 | let(:trace_id) { "trace-id" } 8 | let(:caller) { double("consumer instance") } 9 | let(:message) { OpenStruct.new(topic: "topic", offset: 0, partition: 1, metadata: {topic: "topic"}, payload: "message payload") } 10 | let(:batch_messages) { 11 | [ 12 | OpenStruct.new(topic: "topic", offset: 0, partition: 1, metadata: {topic: "topic"}, payload: "message payload"), 13 | OpenStruct.new(topic: "another_topic", offset: 1, partition: 2, metadata: {topic: "another_topic"}, payload: "another message payload") 14 | ] 15 | } 16 | let(:event_payload) { OpenStruct.new(caller: caller, message: message, trace_id: trace_id, type: nil) } 17 | let(:event_payload_with_batch) { OpenStruct.new(caller: caller, messages: batch_messages, trace_id: trace_id, type: nil) } 18 | 19 | before do 20 | allow(caller).to receive(:messages).and_return([message]) 21 | end 22 | 23 | describe ".trace" do 24 | context "when sentry is not initialized" do 25 | it "does nothing" do 26 | expect(Sentry).to receive(:initialized?).and_return(false) 27 | expect(Sentry).not_to receive(:start_transaction) 28 | 29 | described_class.new("consumer.consumed_one", event_payload).trace {} 30 | end 31 | end 32 | 33 | context "when event is consumer.consumed_one" do 34 | before { allow(Sentry).to receive(:initialized?).and_return(true) } 35 | 36 | it "traces message" do 37 | expect(Sentry).to receive(:get_current_scope).and_return(Sentry::Scope.new) 38 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 39 | 40 | expect(sentry_transaction).to receive(:is_a?).and_return(Sentry::Span) 41 | expect(sentry_transaction).to receive(:set_http_status).with(200) 42 | expect(sentry_transaction).to receive(:finish) 43 | 44 | described_class.new("consumer.consumed_one", event_payload).trace {} 45 | end 46 | 47 | context "with scope" do 48 | let(:sentry_scope) { instance_double(Sentry::Scope) } 49 | 50 | before do 51 | allow(sentry_scope).to receive(:transaction_name) 52 | allow(sentry_scope).to receive(:set_span) 53 | allow(sentry_scope).to receive(:clear) 54 | allow(sentry_transaction).to receive(:set_http_status) 55 | allow(sentry_transaction).to receive(:finish) 56 | end 57 | 58 | context "when custom consumer class is used" do 59 | let(:custom_class) { stub_const("SomeModule::CustomConsumerClass", Class.new(Sbmt::KafkaConsumer::BaseConsumer)) } 60 | let(:caller) { custom_class.consumer_klass.new } 61 | 62 | it "sets proper params" do 63 | expect(Sentry).to receive(:get_current_scope).and_return(sentry_scope) 64 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 65 | 66 | expect(sentry_scope).to receive(:set_transaction_name).with("Sbmt/KafkaConsumer/SomeModule::CustomConsumerClass") 67 | expect(sentry_scope).to receive(:set_tags).with(hash_including(offset: 0, topic: "topic", trace_id: "trace-id")) 68 | 69 | described_class.new("consumer.consumed_one", event_payload).trace {} 70 | end 71 | end 72 | 73 | context "when base consumer class is used" do 74 | let(:caller) { Sbmt::KafkaConsumer::BaseConsumer.consumer_klass.new } 75 | 76 | it "sets proper params" do 77 | expect(Sentry).to receive(:get_current_scope).and_return(sentry_scope) 78 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 79 | 80 | expect(sentry_scope).to receive(:set_transaction_name).with("Sbmt/KafkaConsumer/Sbmt::KafkaConsumer::BaseConsumer") 81 | expect(sentry_scope).to receive(:set_tags).with(hash_including(offset: 0, topic: "topic", trace_id: "trace-id")) 82 | 83 | described_class.new("consumer.consumed_one", event_payload).trace {} 84 | end 85 | end 86 | end 87 | 88 | it "traces message when error is raised" do 89 | expect(Sentry).to receive(:get_current_scope).and_return(Sentry::Scope.new) 90 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 91 | 92 | expect(sentry_transaction).to receive(:is_a?).and_return(Sentry::Span) 93 | expect(sentry_transaction).to receive(:set_http_status).with(500) 94 | expect(sentry_transaction).to receive(:finish) 95 | 96 | expect do 97 | described_class.new("consumer.consumed_one", event_payload).trace { raise "error" } 98 | end.to raise_error("error") 99 | end 100 | end 101 | 102 | context "when event is consumer.consumed_batch" do 103 | before { allow(Sentry).to receive(:initialized?).and_return(true) } 104 | 105 | it "traces message" do 106 | expect(Sentry).to receive(:get_current_scope).and_return(Sentry::Scope.new) 107 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 108 | 109 | expect(sentry_transaction).to receive(:is_a?).and_return(Sentry::Span) 110 | expect(sentry_transaction).to receive(:set_http_status).with(200) 111 | expect(sentry_transaction).to receive(:finish) 112 | 113 | described_class.new("consumer.consumed_batch", event_payload_with_batch).trace {} 114 | end 115 | 116 | context "with scope" do 117 | let(:sentry_scope) { instance_double(Sentry::Scope) } 118 | 119 | before do 120 | allow(sentry_scope).to receive(:transaction_name) 121 | allow(sentry_scope).to receive(:set_span) 122 | allow(sentry_scope).to receive(:clear) 123 | allow(sentry_transaction).to receive(:set_http_status) 124 | allow(sentry_transaction).to receive(:finish) 125 | end 126 | 127 | context "when custom consumer class is used" do 128 | let(:custom_class) { stub_const("SomeModule::CustomConsumerClass", Class.new(Sbmt::KafkaConsumer::BaseConsumer)) } 129 | let(:caller) { custom_class.consumer_klass.new } 130 | 131 | it "sets proper params" do 132 | expect(Sentry).to receive(:get_current_scope).and_return(sentry_scope) 133 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 134 | 135 | expect(sentry_scope).to receive(:set_transaction_name).with("Sbmt/KafkaConsumer/SomeModule::CustomConsumerClass") 136 | expect(sentry_scope).to receive(:set_tags).with(hash_including(first_offset: 0, last_offset: 1, topic: "topic", trace_id: "trace-id")) 137 | 138 | described_class.new("consumer.consumed_batch", event_payload_with_batch).trace {} 139 | end 140 | end 141 | 142 | context "when base consumer class is used" do 143 | let(:caller) { Sbmt::KafkaConsumer::BaseConsumer.consumer_klass.new } 144 | 145 | it "sets proper params" do 146 | expect(Sentry).to receive(:get_current_scope).and_return(sentry_scope) 147 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 148 | 149 | expect(sentry_scope).to receive(:set_transaction_name).with("Sbmt/KafkaConsumer/Sbmt::KafkaConsumer::BaseConsumer") 150 | expect(sentry_scope).to receive(:set_tags).with(hash_including(first_offset: 0, last_offset: 1, topic: "topic", trace_id: "trace-id")) 151 | 152 | described_class.new("consumer.consumed_batch", event_payload_with_batch).trace {} 153 | end 154 | end 155 | end 156 | 157 | it "traces message when error is raised" do 158 | expect(Sentry).to receive(:get_current_scope).and_return(Sentry::Scope.new) 159 | expect(Sentry).to receive(:start_transaction).and_return(sentry_transaction) 160 | 161 | expect(sentry_transaction).to receive(:is_a?).and_return(Sentry::Span) 162 | expect(sentry_transaction).to receive(:set_http_status).with(500) 163 | expect(sentry_transaction).to receive(:finish) 164 | 165 | expect do 166 | described_class.new("consumer.consumed_batch", event_payload_with_batch).trace { raise "error" } 167 | end.to raise_error("error") 168 | end 169 | end 170 | 171 | context "when event is error.occurred" do 172 | let(:ex) { StandardError.new("error") } 173 | let(:sentry_scope) { double("sentry scope") } 174 | let(:event_payload) { OpenStruct.new(caller: caller, message: message, trace_id: trace_id, error: ex, type: nil) } 175 | 176 | before do 177 | allow(Sentry).to receive(:initialized?).and_return(true) 178 | allow(Sentry).to receive(:with_scope).and_yield(sentry_scope) 179 | end 180 | 181 | context "when detailed logging is not enabled" do 182 | it "does not report payload" do 183 | expect(Sentry).to receive(:capture_exception).with(ex) 184 | expect(sentry_scope).not_to receive(:set_contexts) 185 | 186 | described_class.new("error.occurred", event_payload).trace {} 187 | end 188 | end 189 | 190 | context "when detailed logging is enabled" do 191 | let(:event_payload) { OpenStruct.new(caller: caller, message: message, trace_id: trace_id, error: ex, type: "consumer.inbox.consume_one") } 192 | 193 | it "captures exception" do 194 | expect(Sentry).to receive(:capture_exception).with(ex) 195 | expect(caller).to receive(:log_payload?).and_return(true) 196 | expect(sentry_scope).to receive(:set_contexts).with(contexts: { 197 | payload: message.payload, 198 | metadata: message.metadata 199 | }) 200 | 201 | described_class.new("error.occurred", event_payload).trace {} 202 | end 203 | end 204 | 205 | context "when event is not an exception" do 206 | let(:ex) { "some string" } 207 | 208 | it "does not capture exception" do 209 | expect(Sentry).not_to receive(:capture_exception) 210 | 211 | described_class.new("error.occurred", event_payload).trace {} 212 | end 213 | end 214 | end 215 | end 216 | end 217 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/tracing_monitor_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::TracingMonitor do 6 | describe "when initialized" do 7 | it "returns sentry and otel monitors" do 8 | expect(described_class.new.monitors).to eq( 9 | [ 10 | Sbmt::KafkaConsumer::Instrumentation::OpenTelemetryTracer, 11 | Sbmt::KafkaConsumer::Instrumentation::SentryTracer 12 | ] 13 | ) 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/instrumentation/yabeda_metrics_listener_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Instrumentation::YabedaMetricsListener do 6 | let(:message) { build(:messages_message) } 7 | let(:messages) { OpenStruct.new(metadata: build(:messages_batch_metadata), count: 1) } 8 | 9 | describe ".on_statistics_emitted" do 10 | let(:base_rdkafka_stats) { 11 | { 12 | "client_id" => "some-name", 13 | "brokers" => { 14 | "kafka:9092/1001" => { 15 | "name" => "kafka:9092/1001", 16 | "nodeid" => 1001, 17 | "nodename" => "kafka:9092", 18 | "tx_d" => 7, 19 | "txbytes" => 338, 20 | "txerrs_d" => 0, 21 | "rx" => 7, 22 | "rxbytes" => 827, 23 | "rxerrs_d" => 0, 24 | "rtt" => { 25 | "avg" => 1984 26 | } 27 | } 28 | } 29 | }.freeze 30 | } 31 | 32 | context "when only base data is available" do 33 | let(:event) do 34 | Karafka::Core::Monitoring::Event.new( 35 | "statistics.emitted", 36 | {consumer_group_id: "consumer-group-id", statistics: base_rdkafka_stats} 37 | ) 38 | end 39 | 40 | it "reports only broker metrics" do 41 | tags = {client: "some-name", broker: "kafka:9092"} 42 | expect { 43 | described_class.new.send(:report_rdkafka_stats, event, async: false) 44 | }.to measure_yabeda_histogram(Yabeda.kafka_api.latency).with_tags(tags) 45 | .and measure_yabeda_histogram(Yabeda.kafka_api.request_size).with_tags(tags) 46 | .and measure_yabeda_histogram(Yabeda.kafka_api.response_size).with_tags(tags) 47 | .and increment_yabeda_counter(Yabeda.kafka_api.calls).with_tags(tags) 48 | .and increment_yabeda_counter(Yabeda.kafka_api.errors).with_tags(tags) 49 | .and not_update_yabeda_gauge(Yabeda.kafka_consumer.group_rebalances) 50 | .and not_update_yabeda_gauge(Yabeda.kafka_consumer.offset_lag) 51 | end 52 | end 53 | 54 | context "when consumer group data available" do 55 | let(:event) do 56 | Karafka::Core::Monitoring::Event.new( 57 | "statistics.emitted", { 58 | consumer_group_id: "consumer-group-id", 59 | statistics: base_rdkafka_stats.merge( 60 | "cgrp" => { 61 | "state" => "up", 62 | "rebalance_cnt" => 0 63 | } 64 | ) 65 | } 66 | ) 67 | end 68 | 69 | it "reports consumer group metrics" do 70 | expect { 71 | described_class.new.send(:report_rdkafka_stats, event, async: false) 72 | }.to update_yabeda_gauge(Yabeda.kafka_consumer.group_rebalances) 73 | .with_tags(client: "some-name", group_id: "consumer-group-id", state: "up") 74 | end 75 | end 76 | 77 | context "when topic data available" do 78 | let(:event) do 79 | Karafka::Core::Monitoring::Event.new( 80 | "statistics.emitted", { 81 | consumer_group_id: "consumer-group-id", 82 | statistics: base_rdkafka_stats.merge( 83 | "topics" => { 84 | "topic_with_json_data" => { 85 | "topic" => "topic_with_json_data", 86 | "partitions" => { 87 | "0" => { 88 | "partition" => 0, 89 | "consumer_lag" => 10 90 | }, 91 | "1" => { 92 | "partition" => 1, 93 | "consumer_lag" => 10, 94 | "fetch_state" => "stopped" 95 | }, 96 | "2" => { 97 | "partition" => 2, 98 | "consumer_lag" => 10, 99 | "fetch_state" => "none" 100 | }, 101 | "-1" => { 102 | "partition" => -1, 103 | "consumer_lag" => -1 104 | } 105 | } 106 | } 107 | } 108 | ) 109 | } 110 | ) 111 | end 112 | 113 | it "reports topic metrics" do 114 | expect { 115 | described_class.new.send(:report_rdkafka_stats, event, async: false) 116 | }.to update_yabeda_gauge(Yabeda.kafka_consumer.offset_lag).with_tags(client: "some-name", group_id: "consumer-group-id", partition: "0", topic: "topic_with_json_data").with(10) 117 | .and update_yabeda_gauge(Yabeda.kafka_consumer.offset_lag).with_tags(client: "some-name", group_id: "consumer-group-id", partition: "1", topic: "topic_with_json_data").with(0) 118 | .and update_yabeda_gauge(Yabeda.kafka_consumer.offset_lag).with_tags(client: "some-name", group_id: "consumer-group-id", partition: "2", topic: "topic_with_json_data").with(0) 119 | .and not_update_yabeda_gauge(Yabeda.kafka_consumer.offset_lag).with_tags(partition: "-1") 120 | end 121 | end 122 | end 123 | 124 | describe ".on_consumer_consumed" do 125 | let(:topic) { OpenStruct.new(consumer_group: OpenStruct.new(id: "group_id")) } 126 | let(:consumer) { OpenStruct.new(topic: topic, messages: messages) } 127 | let(:event) { Karafka::Core::Monitoring::Event.new("consumer.consumed", caller: consumer, time: 10) } 128 | 129 | tags = { 130 | client: "some-name", group_id: "group_id", 131 | partition: 0, topic: "topic" 132 | } 133 | 134 | it "reports batch consuming metrics" do 135 | expect { described_class.new.on_consumer_consumed(event) } 136 | .to measure_yabeda_histogram(Yabeda.kafka_consumer.batch_size).with_tags(tags) 137 | .and measure_yabeda_histogram(Yabeda.kafka_consumer.process_batch_latency).with_tags(tags).with(0.01) 138 | .and update_yabeda_gauge(Yabeda.kafka_consumer.time_lag).with_tags(tags) 139 | end 140 | end 141 | 142 | describe ".on_consumer_consumed_one" do 143 | let(:topic) { OpenStruct.new(consumer_group: OpenStruct.new(id: "group_id")) } 144 | let(:consumer) { OpenStruct.new(topic: topic, messages: messages) } 145 | let(:event) { Karafka::Core::Monitoring::Event.new("consumer.consumed", caller: consumer, time: 10) } 146 | 147 | tags = { 148 | client: "some-name", group_id: "group_id", 149 | partition: 0, topic: "topic" 150 | } 151 | 152 | it "reports consumed message metrics" do 153 | expect { described_class.new.on_consumer_consumed_one(event) } 154 | .to increment_yabeda_counter(Yabeda.kafka_consumer.process_messages).with_tags(tags) 155 | .and measure_yabeda_histogram(Yabeda.kafka_consumer.process_message_latency).with_tags(tags).with(0.01) 156 | end 157 | end 158 | 159 | describe ".on_consumer_inbox_consumed_one" do 160 | let(:inbox_tags) do 161 | { 162 | client: "some-name", group_id: "group_id", partition: 0, topic: "topic", 163 | inbox_name: "inbox", event_name: "event", status: "status" 164 | } 165 | end 166 | 167 | let(:topic) { OpenStruct.new(consumer_group: OpenStruct.new(id: "group_id")) } 168 | let(:consumer) { OpenStruct.new(topic: topic, messages: messages) } 169 | 170 | let(:event) do 171 | Karafka::Core::Monitoring::Event.new( 172 | "consumer.consumed", 173 | inbox_tags.merge(caller: consumer) 174 | ) 175 | end 176 | 177 | it "increments consumer metric" do 178 | expect { described_class.new.on_consumer_inbox_consumed_one(event) } 179 | .to increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 180 | .with_tags(inbox_tags) 181 | end 182 | end 183 | 184 | describe ".on_error_occurred" do 185 | let(:topic) { OpenStruct.new(consumer_group: OpenStruct.new(id: "group_id")) } 186 | let(:consumer) { OpenStruct.new(topic: topic, messages: messages) } 187 | let(:tags) do 188 | { 189 | client: "some-name", group_id: "group_id", 190 | partition: 0, topic: "topic" 191 | } 192 | end 193 | 194 | context "when error type is consumer.revoked.error" do 195 | let(:event) { Karafka::Core::Monitoring::Event.new("error.occurred", caller: consumer, type: "consumer.revoked.error") } 196 | 197 | it "increments consumer leave group counter" do 198 | expect { described_class.new.on_error_occurred(event) } 199 | .to increment_yabeda_counter(Yabeda.kafka_consumer.leave_group_errors) 200 | .with_tags(tags) 201 | end 202 | end 203 | 204 | context "when error type is consumer.consume.error" do 205 | let(:event) { Karafka::Core::Monitoring::Event.new("error.occurred", caller: consumer, type: "consumer.consume.error") } 206 | 207 | it "increments consumer batch error counter" do 208 | expect { described_class.new.on_error_occurred(event) } 209 | .to increment_yabeda_counter(Yabeda.kafka_consumer.process_batch_errors) 210 | .with_tags(tags) 211 | end 212 | end 213 | 214 | context "when error type is consumer.base.consume_one" do 215 | let(:event) { Karafka::Core::Monitoring::Event.new("error.occurred", caller: consumer, type: "consumer.base.consume_one") } 216 | 217 | it "increments consumer error counter" do 218 | expect { described_class.new.on_error_occurred(event) } 219 | .to increment_yabeda_counter(Yabeda.kafka_consumer.process_message_errors) 220 | .with_tags(tags) 221 | end 222 | end 223 | 224 | context "when error type is consumer.inbox.consume_one" do 225 | let(:inbox_tags) do 226 | { 227 | client: "some-name", group_id: "group_id", partition: 0, topic: "topic", 228 | inbox_name: "inbox", event_name: "event", status: "status" 229 | } 230 | end 231 | let(:event) { Karafka::Core::Monitoring::Event.new("error.occurred", caller: consumer, type: "consumer.inbox.consume_one", **inbox_tags) } 232 | 233 | it "increments inbox consumer metric" do 234 | expect { described_class.new.on_error_occurred(event) } 235 | .to increment_yabeda_counter(Yabeda.kafka_consumer.inbox_consumes) 236 | .with_tags(inbox_tags) 237 | end 238 | end 239 | end 240 | end 241 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/probes/host_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | require "yabeda/prometheus/exporter" 5 | 6 | RSpec.describe Sbmt::KafkaConsumer::Probes::Host do 7 | include Anyway::Testing::Helpers 8 | 9 | describe ".run_async" do 10 | let(:env) { 11 | { 12 | "KAFKA_CONSUMER_PROBES__PORT" => probes_port, 13 | "KAFKA_CONSUMER_METRICS__PORT" => metrics_port, 14 | "KAFKA_CONSUMER_METRICS__PATH" => metrics_path 15 | }.compact 16 | } 17 | let(:probes_port) { nil } 18 | let(:metrics_port) { nil } 19 | let(:metrics_path) { nil } 20 | 21 | before do 22 | allow(Thread).to receive(:new).and_yield 23 | allow(described_class.webrick).to receive(:run) 24 | allow_any_instance_of(Rack::Builder).to receive(:use) 25 | allow(HttpHealthCheck).to receive(:run_server_async) 26 | allow(Yabeda::Prometheus::Exporter).to receive(:new) 27 | end 28 | 29 | around do |ex| 30 | with_env(env) { ex.run } 31 | end 32 | 33 | context "when probe and metrics ports are equal" do 34 | let(:probes_port) { "8080" } 35 | let(:metrics_port) { "8080" } 36 | 37 | it "starts on single port" do 38 | expect(described_class).to receive(:start_on_single_port) 39 | described_class.run_async 40 | end 41 | 42 | it "calls WEBrick.run with the correct parameters" do 43 | described_class.run_async 44 | expect(described_class.webrick).to have_received(:run) do |rack_builder, **options| 45 | expect(options[:Host]).to eq "0.0.0.0" 46 | expect(options[:Port]).to eq 8080 47 | expect(rack_builder).to have_received(:use).with(Yabeda::Prometheus::Exporter, path: "/metrics") 48 | end 49 | end 50 | 51 | context "with custom metrics path" do 52 | let(:metrics_path) { "/custom_metrics_path" } 53 | 54 | it "calls WEBrick.run with the correct parameters" do 55 | described_class.run_async 56 | expect(described_class.webrick).to have_received(:run) do |rack_builder, **options| 57 | expect(options[:Host]).to eq "0.0.0.0" 58 | expect(options[:Port]).to eq 8080 59 | expect(rack_builder).to have_received(:use).with(Yabeda::Prometheus::Exporter, path: "/custom_metrics_path") 60 | end 61 | end 62 | end 63 | end 64 | 65 | context "when probe and metrics ports are different" do 66 | let(:probes_port) { "8080" } 67 | let(:metrics_port) { "9090" } 68 | 69 | it "starts on different ports" do 70 | expect(described_class).to receive(:start_on_different_ports) 71 | described_class.run_async 72 | end 73 | 74 | it "calls HttpHealthCheck.run_server_async with the correct parameters" do 75 | described_class.run_async 76 | expect(HttpHealthCheck).to have_received(:run_server_async).with( 77 | port: 8080, 78 | rack_app: an_instance_of(HttpHealthCheck::RackApp) 79 | ) 80 | end 81 | 82 | it "calls WEBrick.run for metrics with the correct parameters" do 83 | described_class.run_async 84 | expect(described_class.webrick).to have_received(:run) do |rack_builder, **options| 85 | expect(options[:Host]).to eq "0.0.0.0" 86 | expect(options[:Port]).to eq 9090 87 | expect(rack_builder).to have_received(:use).with(Yabeda::Prometheus::Exporter, path: "/metrics") 88 | end 89 | end 90 | 91 | context "with custom metrics path" do 92 | let(:metrics_path) { "/custom_metrics_path" } 93 | 94 | it "calls WEBrick.run for metrics with the correct parameters" do 95 | described_class.run_async 96 | expect(described_class.webrick).to have_received(:run) do |rack_builder, **options| 97 | expect(options[:Host]).to eq "0.0.0.0" 98 | expect(options[:Port]).to eq 9090 99 | expect(rack_builder).to have_received(:use).with(Yabeda::Prometheus::Exporter, path: "/custom_metrics_path") 100 | end 101 | end 102 | end 103 | end 104 | end 105 | end 106 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/probes/probe_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Probes::Probe do 6 | let(:subject_klass) do 7 | Class.new do 8 | include Sbmt::KafkaConsumer::Probes::Probe 9 | 10 | def probe(_env); end 11 | end 12 | end 13 | 14 | let(:env) { double(:env) } 15 | let(:service) { subject_klass.new } 16 | let(:logger) { instance_double(Logger) } 17 | 18 | before do 19 | allow(Sbmt::KafkaConsumer).to receive(:logger).and_return(logger) 20 | allow(logger).to receive(:error) 21 | end 22 | 23 | describe ".call" do 24 | it "calls probe with env" do 25 | expect(service).to receive(:probe).with(env) 26 | service.call(env) 27 | end 28 | 29 | it "returns 500 when there's an error" do 30 | allow(service).to receive(:probe).and_raise("Unexpected error") 31 | expect(service.call(env)).to eq [ 32 | 500, 33 | {"Content-Type" => "application/json"}, 34 | [{error_class: "RuntimeError", error_message: "Unexpected error"}.to_json] 35 | ] 36 | end 37 | 38 | describe ".probe_ok" do 39 | it "returns 200 with meta" do 40 | expect(service.probe_ok).to eq [200, {"Content-Type" => "application/json"}, ["{}"]] 41 | end 42 | end 43 | 44 | describe ".probe_error" do 45 | it "logs the error message and returns 500 with meta" do 46 | error_meta = {foo: "bar"} 47 | expect(service.probe_error(error_meta)).to eq [500, {"Content-Type" => "application/json"}, [error_meta.to_json]] 48 | 49 | expect(logger).to have_received(:error).with("probe error meta: #{error_meta.inspect}") 50 | end 51 | end 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/routing/karafka_v1_consumer_mapper_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Routing::KarafkaV1ConsumerMapper do 6 | describe ".call" do 7 | it "underscores client-id in consumer-group name" do 8 | # client_id is "some-name" in kafka_consumer.yml 9 | expect(described_class.new.call("consumer-group")) 10 | .to eq("some_name_consumer-group") 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer/serialization/null_deserializer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaConsumer::Serialization::NullDeserializer do 6 | let(:message) { double("message") } 7 | let(:raw_message_body) { "body" } 8 | 9 | describe ".call" do 10 | it "returns raw_message" do 11 | expect(message).to receive(:raw_payload).and_return(raw_message_body) 12 | described_class.new.call(message) == raw_message_body 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_consumer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | RSpec.describe Sbmt::KafkaConsumer do 4 | it "has a version number" do 5 | expect(Sbmt::KafkaConsumer::VERSION).not_to be_nil 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | ENV["RAILS_ENV"] = "test" 4 | 5 | require "bundler/setup" 6 | 7 | require "simplecov" 8 | SimpleCov.start do 9 | minimum_coverage 90 10 | end 11 | 12 | require "rspec" 13 | require "rspec_junit_formatter" 14 | require "yabeda" 15 | require "yabeda/rspec" 16 | 17 | RSpec::Matchers.define_negated_matcher :not_change, :change 18 | RSpec::Matchers.define_negated_matcher :not_include, :include 19 | 20 | RSpec.configure do |config| 21 | config.expect_with :rspec do |expectations| 22 | expectations.include_chain_clauses_in_custom_matcher_descriptions = true 23 | end 24 | 25 | config.mock_with :rspec do |mocks| 26 | mocks.verify_partial_doubles = true 27 | end 28 | 29 | config.filter_run_when_matching :focus 30 | config.example_status_persistence_file_path = ".rspec_status" 31 | config.run_all_when_everything_filtered = true 32 | 33 | if config.files_to_run.one? 34 | # Use the documentation formatter for detailed output, 35 | # unless a formatter has already been configured 36 | # (e.g. via a command-line flag). 37 | config.default_formatter = "doc" 38 | end 39 | 40 | config.order = :random 41 | Kernel.srand config.seed 42 | end 43 | --------------------------------------------------------------------------------