├── .rspec ├── lefthook-local.dip_example.yml ├── bin ├── setup ├── test └── console ├── Gemfile ├── lib ├── sbmt │ ├── kafka_producer │ │ ├── testing.rb │ │ ├── version.rb │ │ ├── outbox_producer.rb │ │ ├── types.rb │ │ ├── outbox_transport_factory.rb │ │ ├── instrumentation │ │ │ ├── tracing_middleware.rb │ │ │ ├── open_telemetry_loader.rb │ │ │ ├── open_telemetry_tracer.rb │ │ │ └── yabeda_metrics_listener.rb │ │ ├── logger.rb │ │ ├── railtie.rb │ │ ├── testing │ │ │ └── configure_producer_client.rb │ │ ├── error_tracker.rb │ │ ├── config │ │ │ ├── producer.rb │ │ │ ├── kafka.rb │ │ │ └── auth.rb │ │ ├── yabeda_configurer.rb │ │ ├── kafka_client_factory.rb │ │ └── base_producer.rb │ └── kafka_producer.rb └── generators │ └── kafka_producer │ ├── install │ ├── USAGE │ ├── install_generator.rb │ └── templates │ │ └── kafka_producer.yml │ ├── outbox_producer │ ├── USAGE │ └── outbox_producer_generator.rb │ ├── producer │ ├── USAGE │ ├── templates │ │ └── producer.rb.erb │ └── producer_generator.rb │ └── concerns │ └── configuration.rb ├── spec ├── internal │ ├── app │ │ └── middlewares │ │ │ └── test_global_middleware.rb │ └── config │ │ ├── initializers │ │ ├── kafka_producer.rb │ │ ├── open_telemetry.rb │ │ └── sentry.rb │ │ └── kafka_producer.yml ├── sbmt │ └── kafka_producer │ │ ├── error_tracker_spec.rb │ │ ├── outbox_transport_factory_spec.rb │ │ ├── logger_spec.rb │ │ ├── instrumentation │ │ ├── open_telemetry_tracer_spec.rb │ │ └── yabeda_metrics_listener_spec.rb │ │ ├── config │ │ ├── kafka_config_spec.rb │ │ ├── auth_config_spec.rb │ │ └── producer_spec.rb │ │ ├── outbox_producer_spec.rb │ │ ├── kafka_client_factory_spec.rb │ │ └── base_producer_spec.rb ├── spec_helper.rb └── rails_helper.rb ├── lefthook.yml ├── Rakefile ├── .gitignore ├── docker-compose.yml ├── rubocop └── rspec.yml ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── ISSUE_TEMPLATE │ └── bug_report.md └── workflows │ └── tests.yml ├── .rubocop.yml ├── Appraisals ├── .gitlab-ci.yml ├── LICENSE ├── dip.yml ├── sbmt-kafka_producer.gemspec ├── CHANGELOG.md └── README.md /.rspec: -------------------------------------------------------------------------------- 1 | --color 2 | --require spec_helper 3 | --require rails_helper 4 | -------------------------------------------------------------------------------- /lefthook-local.dip_example.yml: -------------------------------------------------------------------------------- 1 | pre-commit: 2 | commands: 3 | rubocop: 4 | run: dip {cmd} 5 | -------------------------------------------------------------------------------- /bin/setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euxo pipefail 4 | 5 | bundle install 6 | bundle exec appraisal install 7 | -------------------------------------------------------------------------------- /bin/test: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euxo pipefail 4 | 5 | bundle exec rubocop 6 | bundle exec appraisal rspec 7 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source ENV.fetch("RUBYGEMS_PUBLIC_SOURCE", "https://rubygems.org") 4 | 5 | gemspec 6 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/testing.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rspec" 4 | 5 | Dir["#{__dir__}/testing/*.rb"].sort.each { |f| require f } 6 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/version.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | VERSION = "3.3.0" 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /spec/internal/app/middlewares/test_global_middleware.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class TestGlobalMiddleware 4 | def call(payload, options) 5 | yield 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /lefthook.yml: -------------------------------------------------------------------------------- 1 | pre-commit: 2 | commands: 3 | rubocop: 4 | tags: backend 5 | glob: "{*.rb,**/*.rb,Gemfile,Rakefile}" 6 | run: bundle exec rubocop -A --force-exclusion {staged_files} && git add {staged_files} 7 | -------------------------------------------------------------------------------- /spec/internal/config/initializers/kafka_producer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "../../app/middlewares/test_global_middleware" 4 | 5 | Sbmt::KafkaProducer.middlewares.push( 6 | TestGlobalMiddleware 7 | ) 8 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/install/USAGE: -------------------------------------------------------------------------------- 1 | Description: 2 | Generates the Kafka producer's initial setup 3 | 4 | Example: 5 | bin/rails generate kafka_producer:install 6 | 7 | This will create: 8 | config/kafka_producer.yml 9 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "bundler/gem_tasks" 4 | require "rspec/core/rake_task" 5 | 6 | RSpec::Core::RakeTask.new(:spec) 7 | 8 | require "rubocop/rake_task" 9 | 10 | RuboCop::RakeTask.new 11 | 12 | task default: %i[spec rubocop] 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .idea/ 3 | /.bundle/ 4 | /.yardoc 5 | /_yardoc/ 6 | /coverage/ 7 | /doc/ 8 | /pkg/ 9 | /spec/reports/ 10 | /tmp/ 11 | /test-results/ 12 | .rspec_status 13 | lefthook-local.yml 14 | /Gemfile.lock 15 | /gemfiles/*gemfile* 16 | /spec/internal/log/*.log 17 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/outbox_producer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class OutboxProducer < Sbmt::KafkaProducer::BaseProducer 6 | def call(outbox_item, payload) 7 | sync_publish!(payload, **outbox_item.options) 8 | end 9 | end 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/types.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | module Types 6 | include Dry.Types 7 | 8 | ConfigAttrs = Dry::Types["hash"].constructor { |hsh| hsh.deep_symbolize_keys } 9 | ConfigProducer = Types.Constructor(Config::Producer) 10 | end 11 | end 12 | end 13 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/outbox_producer/USAGE: -------------------------------------------------------------------------------- 1 | Description: 2 | Modifies the config/outbox.yml file to work with the outbox gem 3 | Pass in the outbox item, either CamelCased or under_scored 4 | 5 | Example: 6 | bin/rails generate kafka_producer:outbox_producer some_namespace/outbox_item 7 | 8 | This will modify: 9 | config/outbox.yml 10 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/outbox_transport_factory.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class OutboxTransportFactory 6 | class << self 7 | def build(topic:, kafka: {}) 8 | OutboxProducer.new(topic: topic, client: KafkaClientFactory.build(kafka)) 9 | end 10 | end 11 | end 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/producer/USAGE: -------------------------------------------------------------------------------- 1 | Description: 2 | Stubs out a new non-outbox producer. Pass the producer name, either 3 | CamelCased or under_scored 4 | It takes the producer type, topic as arguments. 5 | 6 | Example: 7 | bin/rails generate kafka_producer:producer Test producer_type topic 8 | 9 | This will create: 10 | app/producers/test_producer.rb 11 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | ruby: 3 | image: ruby:${RUBY_VERSION:-3.2} 4 | environment: 5 | HISTFILE: /app/tmp/.bash_history 6 | BUNDLE_PATH: /usr/local/bundle 7 | BUNDLE_CONFIG: /app/.bundle/config 8 | command: bash 9 | working_dir: /app 10 | volumes: 11 | - .:/app:cached 12 | - bundler_data:/usr/local/bundle 13 | 14 | volumes: 15 | bundler_data: 16 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/producer/templates/producer.rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | <%- module_namespacing do -%> 4 | class <%= "#{name.classify}Producer" %> < Sbmt::KafkaProducer::BaseProducer 5 | option :topic, default: -> { "<%= topic %>" } 6 | 7 | def publish(payload, **options) 8 | <%= ("#{producer_type}_publish") %>(payload, options) 9 | end 10 | end 11 | <%- end -%> 12 | -------------------------------------------------------------------------------- /spec/internal/config/initializers/open_telemetry.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | return if Rails.env.test? 4 | 5 | OpenTelemetry::SDK.configure do |c| 6 | c.add_span_processor( 7 | OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new( 8 | OpenTelemetry::SDK::Trace::Export::ConsoleSpanExporter.new 9 | ) 10 | ) 11 | c.service_name = "rails-app" 12 | c.use_all 13 | end 14 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/instrumentation/tracing_middleware.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | module Instrumentation 6 | class TracingMiddleware < ::WaterDrop::Middleware 7 | def initialize 8 | super 9 | 10 | append(OpenTelemetryTracer.new) if defined?(OpenTelemetryTracer) 11 | end 12 | end 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /bin/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require "bundler/setup" 5 | require "sbmt/kafka_producer" 6 | 7 | # You can add fixtures and/or initialization code here to make experimenting 8 | # with your gem easier. You can also use a different console, if you like. 9 | 10 | # (If you use this, don't forget to add pry to your Gemfile!) 11 | # require "pry" 12 | # Pry.start 13 | 14 | require "irb" 15 | IRB.start(__FILE__) 16 | -------------------------------------------------------------------------------- /rubocop/rspec.yml: -------------------------------------------------------------------------------- 1 | RSpec/AnyInstance: 2 | Enabled: false 3 | 4 | RSpec/MultipleExpectations: 5 | Enabled: false 6 | 7 | RSpec/LetSetup: 8 | Enabled: false 9 | 10 | RSpec/StubbedMock: 11 | Enabled: false 12 | 13 | RSpec/MessageSpies: 14 | Enabled: false 15 | 16 | RSpec/NestedGroups: 17 | Enabled: false 18 | 19 | RSpec/EmptyExampleGroup: 20 | Enabled: false 21 | 22 | RSpec/ExampleLength: 23 | Enabled: false 24 | 25 | RSpec/MultipleMemoizedHelpers: 26 | Enabled: false 27 | 28 | RSpec/VariableName: 29 | Enabled: false 30 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/install/install_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails/generators" 4 | require "generators/kafka_producer/concerns/configuration" 5 | 6 | module KafkaProducer 7 | module Generators 8 | class InstallGenerator < Rails::Generators::Base 9 | include Concerns::Configuration 10 | 11 | source_root File.expand_path("templates", __dir__) 12 | 13 | def create_kafka_producer_yml 14 | copy_file "kafka_producer.yml", CONFIG_PATH 15 | end 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/producer/producer_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails/generators" 4 | 5 | module KafkaProducer 6 | module Generators 7 | class ProducerGenerator < Rails::Generators::NamedBase 8 | source_root File.expand_path("templates", __dir__) 9 | 10 | argument :producer_type, type: :string, banner: "sync/async" 11 | argument :topic, type: :string, banner: "topic" 12 | 13 | def insert_producer_class 14 | template "producer.rb.erb", "app/producers/#{file_path}_producer.rb" 15 | end 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | # Context 2 | 3 | 6 | - 7 | 8 | ## Related tickets 9 | 10 | - 11 | 12 | # What's inside 13 | 14 | 19 | - [x] A 20 | 21 | # Checklist: 22 | 23 | - [ ] I have added tests 24 | - [ ] I have made corresponding changes to the documentation 25 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/logger.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class Logger 6 | delegate :logger, to: :Rails 7 | 8 | %i[ 9 | debug 10 | info 11 | warn 12 | error 13 | fatal 14 | ].each do |log_level| 15 | define_method log_level do |*args| 16 | logger.send(log_level, *args) 17 | end 18 | end 19 | 20 | def add(...) 21 | logger.add(...) 22 | end 23 | 24 | def tagged(...) 25 | logger.tagged(...) 26 | end 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/railtie.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class Railtie < Rails::Railtie 6 | config.before_initialize do 7 | require "sbmt/kafka_producer/yabeda_configurer" 8 | ::Sbmt::KafkaProducer::YabedaConfigurer.configure 9 | end 10 | 11 | initializer "sbmt_kafka_producer_opentelemetry_init.configure_rails_initialization", 12 | after: "opentelemetry.configure" do 13 | require "sbmt/kafka_producer/instrumentation/open_telemetry_loader" if defined?(::OpenTelemetry) 14 | end 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/testing/configure_producer_client.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class FakeWaterDropClient 4 | Report = Struct.new(:topic_name, :partition, :offset) 5 | 6 | def produce_sync(*) 7 | Report.new("fake_topic", 0, 0) 8 | end 9 | 10 | def produce_async(*) 11 | # no op 12 | end 13 | end 14 | 15 | Sbmt::KafkaProducer::KafkaClientFactory.singleton_class.prepend( 16 | Module.new do 17 | def default_client 18 | @default_client ||= FakeWaterDropClient.new 19 | end 20 | 21 | def build(*) 22 | @default_client ||= FakeWaterDropClient.new 23 | end 24 | end 25 | ) 26 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/instrumentation/open_telemetry_loader.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "opentelemetry" 4 | require "opentelemetry-common" 5 | require "opentelemetry-instrumentation-base" 6 | 7 | require_relative "open_telemetry_tracer" 8 | 9 | module Sbmt 10 | module KafkaProducer 11 | module Instrumentation 12 | class OpenTelemetryLoader < ::OpenTelemetry::Instrumentation::Base 13 | install do |_config| 14 | OpenTelemetryTracer.enabled = true 15 | end 16 | 17 | present do 18 | defined?(OpenTelemetryTracer) 19 | end 20 | end 21 | end 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/error_tracker_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::ErrorTracker do 4 | let(:error) { StandardError.new("wrong attr") } 5 | let(:message) { "Add new error" } 6 | let(:params) { {response: "qwerty"} } 7 | 8 | describe ".error" do 9 | it "is exception" do 10 | expect(Sentry).to receive(:capture_exception).with(error, level: :error) 11 | described_class.error(error) 12 | end 13 | 14 | it "is message" do 15 | expect(Sentry).to receive(:capture_message).with(message, level: :error) 16 | described_class.error(message) 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | inherit_mode: 2 | merge: 3 | - Exclude 4 | 5 | require: 6 | - rubocop-performance 7 | - rubocop-rails 8 | - rubocop-rspec 9 | - standard 10 | 11 | inherit_gem: 12 | standard: config/base.yml 13 | 14 | inherit_from: 15 | - rubocop/rspec.yml 16 | 17 | AllCops: 18 | NewCops: enable 19 | SuggestExtensions: false 20 | TargetRubyVersion: 2.7 21 | TargetRailsVersion: 6.0 22 | 23 | RSpec/VerifiedDoubles: 24 | Exclude: 25 | - spec/**/*_spec.rb 26 | 27 | Style/SingleLineMethods: 28 | Enabled: false 29 | 30 | Style/EmptyMethod: 31 | Enabled: false 32 | 33 | Rails/Exit: 34 | Exclude: 35 | - spec/rails_helper.rb 36 | -------------------------------------------------------------------------------- /Appraisals: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # See compatibility table at https://www.fastruby.io/blog/ruby/rails/versions/compatibility-table.html 4 | 5 | versions_map = { 6 | "6.1" => %w[2.7 3.0], 7 | "7.0" => %w[3.1], 8 | "7.1" => %w[3.2], 9 | "7.2" => %w[3.3], 10 | "8.0" => %w[3.3] 11 | } 12 | 13 | current_ruby_version = RUBY_VERSION.split(".").first(2).join(".") 14 | 15 | versions_map.each do |rails_version, ruby_versions| 16 | ruby_versions.each do |ruby_version| 17 | next if ruby_version != current_ruby_version 18 | 19 | appraise "rails-#{rails_version}" do 20 | gem "rails", "~> #{rails_version}.0" 21 | end 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /spec/internal/config/initializers/sentry.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "sentry-ruby" 4 | 5 | SENTRY_DUMMY_DSN = "http://12345:67890@sentry.localdomain/sentry/42" 6 | Sentry.init do |config| 7 | config.dsn = SENTRY_DUMMY_DSN 8 | config.enabled_environments = [Rails.env] 9 | config.background_worker_threads = 1 10 | config.transport.transport_class = Class.new(Sentry::HTTPTransport) do 11 | def send_data(data) 12 | # for local sentry testing purposes 13 | Rails.logger.info("sending #{data.inspect} to Sentry") 14 | sleep 1 # rubocop:disable Rails/CallingSleepInsideTests 15 | end 16 | end 17 | config.traces_sample_rate = 1.0 18 | end 19 | -------------------------------------------------------------------------------- /spec/internal/config/kafka_producer.yml: -------------------------------------------------------------------------------- 1 | default: &default 2 | deliver: true 3 | wait_on_queue_full: true 4 | max_payload_size: 1000012 5 | max_wait_timeout: 60000 6 | auth: 7 | kind: plaintext 8 | kafka: 9 | servers: "kafka:9092" 10 | max_retries: 2 11 | required_acks: -1 12 | ack_timeout: 1000 13 | retry_backoff: 1000 14 | connect_timeout: 2000 15 | message_timeout: 55000 16 | kafka_config: 17 | queue.buffering.max.messages: 1 18 | queue.buffering.max.ms: 10000 19 | 20 | development: 21 | <<: *default 22 | test: 23 | <<: *default 24 | deliver: false 25 | wait_on_queue_full: false 26 | staging: &staging 27 | <<: *default 28 | production: 29 | <<: *staging 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Provide kafka_producer.yml config 16 | 2. Run command 17 | 3. See error 18 | 19 | **Expected behavior** 20 | A clear and concise description of what you expected to happen. 21 | 22 | **Screenshots** 23 | If applicable, add screenshots to help explain your problem. 24 | 25 | **Context (please complete the following information):** 26 | - Ruby version 27 | - Rails version 28 | - Gem version 29 | 30 | **Additional context** 31 | Add any other context about the problem here. 32 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/outbox_producer/outbox_producer_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails/generators" 4 | 5 | module KafkaProducer 6 | module Generators 7 | class OutboxProducerGenerator < Rails::Generators::NamedBase 8 | source_root File.expand_path("templates", __dir__) 9 | 10 | class_option :skip_item, type: :boolean, default: false, desc: "Skip creating InboxItem" 11 | 12 | def insert_outbox_producer 13 | generate "outbox:item", "#{item_name.underscore} --kind outbox" unless options[:skip_item] 14 | generate "outbox:transport", "#{item_name.underscore} sbmt/kafka_producer --kind outbox" 15 | end 16 | 17 | private 18 | 19 | def item_name 20 | file_path 21 | end 22 | end 23 | end 24 | end 25 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/error_tracker.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class ErrorTracker 6 | class << self 7 | def error(arr) 8 | logging(:error, arr) 9 | end 10 | 11 | private 12 | 13 | def logging(level, arr) 14 | return unless defined?(::Sentry) 15 | 16 | sentry_logging(level, arr) if ::Sentry.initialized? 17 | end 18 | 19 | def sentry_logging(level, arr) 20 | Sentry.with_scope do |_scope| 21 | if arr.is_a?(Exception) 22 | Sentry.capture_exception(arr, level: level) 23 | else 24 | Sentry.capture_message(arr, level: level) 25 | end 26 | end 27 | end 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | include: 2 | - project: "nstmrt/rubygems/templates" 3 | ref: master 4 | file: "build-rubygems.yml" 5 | 6 | lint: 7 | stage: test 8 | image: ${BUILD_CONF_HARBOR_REGISTRY}/dhub/library/ruby:3.3 9 | tags: 10 | - paas-tests 11 | script: 12 | - bundle install 13 | - bundle exec rubocop 14 | 15 | tests: 16 | stage: test 17 | image: ${BUILD_CONF_HARBOR_REGISTRY}/dhub/library/ruby:$RUBY_VERSION 18 | tags: 19 | - paas-tests 20 | parallel: 21 | matrix: 22 | - RUBY_VERSION: ['3.0', '3.1', '3.2', '3.3'] 23 | before_script: 24 | - gem sources --remove https://rubygems.org/ 25 | - gem sources --add ${RUBYGEMS_PUBLIC_SOURCE} 26 | - bin/setup 27 | script: 28 | - bundle exec appraisal rspec --format RspecJunitFormatter --out test-results/rspec_$RUBY_VERSION.xml --format documentation 29 | artifacts: 30 | reports: 31 | junit: test-results/rspec*.xml 32 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/outbox_transport_factory_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::OutboxTransportFactory do 4 | let(:topic) { "test_topic" } 5 | let(:kafka_config) { {bootstrap_servers: ["localhost:9092"]} } 6 | 7 | describe ".build" do 8 | it "returns an instance of OutboxProducer" do 9 | expect(described_class.build(topic: topic, kafka: kafka_config)).to be_instance_of(Sbmt::KafkaProducer::OutboxProducer) 10 | end 11 | 12 | it "passes the topic and a client to OutboxProducer" do 13 | client = instance_double(WaterDrop::Producer) 14 | 15 | expect(Sbmt::KafkaProducer::KafkaClientFactory).to receive(:build).with(kafka_config).and_return(client) 16 | expect(Sbmt::KafkaProducer::OutboxProducer).to receive(:new).with(topic: topic, client: client) 17 | described_class.build(topic: topic, kafka: kafka_config) 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/concerns/configuration.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module KafkaProducer 4 | module Generators 5 | module Concerns 6 | module Configuration 7 | extend ActiveSupport::Concern 8 | 9 | CONFIG_PATH = "config/kafka_producer.yml" 10 | 11 | def check_config_file! 12 | config_path = File.expand_path(CONFIG_PATH) 13 | return if File.exist?(config_path) 14 | 15 | generator_name = "kafka_producer:install" 16 | if yes?( 17 | "The file #{config_path} does not appear to exist." \ 18 | " Would you like to generate it?" 19 | ) 20 | generate generator_name 21 | else 22 | raise Rails::Generators::Error, "Please generate #{config_path} " \ 23 | "by running `bin/rails g #{generator_name}` " \ 24 | "or add this file manually." 25 | end 26 | end 27 | end 28 | end 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | ENV["RAILS_ENV"] = "test" 4 | 5 | require "bundler/setup" 6 | 7 | require "simplecov" 8 | SimpleCov.start do 9 | minimum_coverage 90 10 | end 11 | 12 | require "rspec" 13 | require "rspec_junit_formatter" 14 | 15 | RSpec.configure do |config| 16 | config.expect_with :rspec do |expectations| 17 | expectations.include_chain_clauses_in_custom_matcher_descriptions = true 18 | end 19 | 20 | config.mock_with :rspec do |mocks| 21 | mocks.verify_partial_doubles = true 22 | end 23 | 24 | config.filter_run_when_matching :focus 25 | config.example_status_persistence_file_path = ".rspec_status" 26 | config.run_all_when_everything_filtered = true 27 | 28 | if config.files_to_run.one? 29 | # Use the documentation formatter for detailed output, 30 | # unless a formatter has already been configured 31 | # (e.g. via a command-line flag). 32 | config.default_formatter = "doc" 33 | end 34 | 35 | config.order = :random 36 | Kernel.srand config.seed 37 | end 38 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/logger_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::Logger do 4 | context "when logger" do 5 | let(:msg) { "message" } 6 | 7 | it "receives debug and calls Rails.logger" do 8 | expect(Rails.logger).to receive(:debug).with(msg) 9 | described_class.new.debug(msg) 10 | end 11 | 12 | it "receives info and calls Rails.logger" do 13 | expect(Rails.logger).to receive(:info).with(msg) 14 | described_class.new.info(msg) 15 | end 16 | 17 | it "receives warn and calls Rails.logger" do 18 | expect(Rails.logger).to receive(:warn).with(msg) 19 | described_class.new.warn(msg) 20 | end 21 | 22 | it "receives error and calls Rails.logger" do 23 | expect(Rails.logger).to receive(:error).with(msg) 24 | described_class.new.error(msg) 25 | end 26 | 27 | it "receives fatal and calls Rails.logger" do 28 | expect(Rails.logger).to receive(:fatal).with(msg) 29 | described_class.new.fatal(msg) 30 | end 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /lib/generators/kafka_producer/install/templates/kafka_producer.yml: -------------------------------------------------------------------------------- 1 | default: &default 2 | deliver: true 3 | wait_on_queue_full: true 4 | max_payload_size: 1000012 5 | max_wait_timeout: 60000 6 | 7 | auth: 8 | sasl_username: <%= ENV.fetch('KAFKA_BROKERS'){ 'SCRAM-SHA-512:kafka_login:kafka_password' }.split(':').second %> 9 | sasl_password: <%= ENV.fetch('KAFKA_BROKERS'){ 'SCRAM-SHA-512:kafka_login:kafka_password' }.split(':').last %> 10 | sasl_mechanism: <%= ENV.fetch('KAFKA_BROKERS'){ 'SCRAM-SHA-512:kafka_login:kafka_password' }.split(':').first %> 11 | kind: 'sasl_plaintext' 12 | 13 | kafka: 14 | servers: "kafka:9092" 15 | connect_timeout: 2000 16 | message_timeout: 55000 17 | ack_timeout: 10000 18 | retry_backoff: 10000 19 | max_retries: 2 20 | required_acks: -1 21 | 22 | development: 23 | <<: *default 24 | auth: 25 | kind: plaintext 26 | test: 27 | <<: *default 28 | deliver: false 29 | wait_on_queue_full: false 30 | auth: 31 | kind: plaintext 32 | staging: &staging 33 | <<: *default 34 | production: 35 | <<: *staging 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Kuper Tech 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /spec/rails_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Engine root is used by rails_configuration to correctly 4 | # load fixtures and support files 5 | require "pathname" 6 | ENGINE_ROOT = Pathname.new(File.expand_path("..", __dir__)) 7 | 8 | require "spec_helper" 9 | require "logger" 10 | require "combustion" 11 | 12 | begin 13 | Combustion.initialize! :action_controller do 14 | config.log_level = :fatal if ENV["LOG"].to_s.empty? 15 | config.i18n.available_locales = %i[ru en] 16 | config.i18n.default_locale = :ru 17 | end 18 | rescue => e 19 | # Fail fast if application couldn't be loaded 20 | warn "💥 Failed to load the app: #{e.message}\n#{e.backtrace.join("\n")}" 21 | exit(1) 22 | end 23 | 24 | require "rspec/rails" 25 | # Add additional requires below this line. Rails is not loaded until this point! 26 | require "yabeda" 27 | require "yabeda/rspec" 28 | 29 | # when using with combustion, anyway is required earlier than rails 30 | # so it's railtie does nothing, but that require is cached 31 | # we must require it explicitly to force anyway autoload our configs 32 | require "anyway/rails" if defined?(Rails::Railtie) 33 | 34 | require "sbmt/kafka_producer/instrumentation/open_telemetry_loader" 35 | 36 | RSpec.configure do |config| 37 | config.include ActiveSupport::Testing::TimeHelpers 38 | 39 | config.infer_spec_type_from_file_location! 40 | config.filter_rails_from_backtrace! 41 | end 42 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Ruby 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ '**' ] 8 | 9 | jobs: 10 | lint: 11 | runs-on: ubuntu-latest 12 | env: 13 | RUBY_VERSION: "3.3" 14 | name: Rubocop 15 | steps: 16 | - name: Checkout code 17 | uses: actions/checkout@v3 18 | - name: Setup Ruby w/ same version as image 19 | uses: ruby/setup-ruby@v1 20 | with: 21 | ruby-version: "3.3" 22 | - name: Install dependencies 23 | run: | 24 | gem install dip 25 | dip bundle install 26 | - name: Run linter 27 | run: dip rubocop 28 | 29 | test: 30 | runs-on: ubuntu-latest 31 | strategy: 32 | fail-fast: false 33 | matrix: 34 | ruby: [ '3.0', '3.1', '3.2', '3.3' ] 35 | env: 36 | RUBY_VERSION: ${{ matrix.ruby }} 37 | name: Ruby ${{ matrix.ruby }} 38 | steps: 39 | - name: Checkout code 40 | uses: actions/checkout@v3 41 | - name: Setup Ruby w/ same version as image 42 | uses: ruby/setup-ruby@v1 43 | with: 44 | ruby-version: ${{ matrix.ruby }} 45 | - name: Install dependencies 46 | run: | 47 | gem install dip 48 | dip provision 49 | - name: Run tests 50 | run: dip appraisal rspec --format RspecJunitFormatter --out test-results/rspec_${{ matrix.ruby }}.xml --format documentation 51 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "zeitwerk" 4 | require "waterdrop" 5 | require "connection_pool" 6 | require "dry-initializer" 7 | require "dry/types" 8 | require "dry-struct" 9 | require "yabeda" 10 | require "anyway_config" 11 | 12 | require "anyway/rails" if defined?(Rails) 13 | require_relative "kafka_producer/railtie" if defined?(Rails::Railtie) 14 | 15 | module Sbmt 16 | module KafkaProducer 17 | class << self 18 | def logger 19 | @logger ||= Logger.new 20 | end 21 | 22 | def middlewares 23 | @middlewares ||= [] 24 | end 25 | end 26 | class Error < StandardError; end 27 | end 28 | end 29 | 30 | loader = Zeitwerk::Loader.new 31 | loader.push_dir(File.join(__dir__, "..")) 32 | loader.tag = "sbmt-kafka_producer" 33 | # Do not load vendors instrumentation components. Those need to be required manually if needed 34 | loader.ignore("#{__dir__}/kafka_producer/version.rb") 35 | loader.ignore("#{File.expand_path("../", __dir__)}/generators") 36 | 37 | loader.do_not_eager_load("#{__dir__}/kafka_producer/instrumentation/open_telemetry_loader.rb") 38 | loader.do_not_eager_load("#{__dir__}/kafka_producer/instrumentation/open_telemetry_tracer.rb") 39 | 40 | # completely ignore testing helpers 41 | # because testing.rb just requires some files and does not contain any constants (e.g. Testing) which Zeitwerk expects 42 | loader.ignore("#{__dir__}/kafka_producer/testing.rb") 43 | loader.ignore("#{__dir__}/kafka_producer/testing") 44 | 45 | loader.setup 46 | loader.eager_load 47 | -------------------------------------------------------------------------------- /dip.yml: -------------------------------------------------------------------------------- 1 | version: '7' 2 | 3 | environment: 4 | RUBY_VERSION: '3.3' 5 | 6 | compose: 7 | files: 8 | - docker-compose.yml 9 | 10 | interaction: 11 | bash: 12 | description: Open the Bash shell in app's container 13 | service: ruby 14 | command: /bin/bash 15 | 16 | bundle: 17 | description: Run Bundler commands 18 | service: ruby 19 | command: bundle 20 | 21 | rails: 22 | description: Run RoR commands 23 | service: ruby 24 | command: bundle exec rails 25 | 26 | appraisal: 27 | description: Run Appraisal commands 28 | service: ruby 29 | command: bundle exec appraisal 30 | 31 | rspec: 32 | description: Run Rspec commands 33 | service: ruby 34 | command: bundle exec rspec 35 | subcommands: 36 | all: 37 | command: bundle exec appraisal rspec 38 | rails-6.1: 39 | command: bundle exec appraisal rails-6.1 rspec 40 | rails-7.0: 41 | command: bundle exec appraisal rails-7.0 rspec 42 | rails-7.1: 43 | command: bundle exec appraisal rails-7.1 rspec 44 | rails-7.2: 45 | command: bundle exec appraisal rails-7.2 rspec 46 | 47 | rubocop: 48 | description: Run Ruby linter 49 | service: ruby 50 | command: bundle exec rubocop 51 | 52 | setup: 53 | description: Install deps 54 | service: ruby 55 | command: bin/setup 56 | 57 | test: 58 | description: Run linters, run all tests 59 | service: ruby 60 | command: bin/test 61 | 62 | provision: 63 | - dip compose down --volumes 64 | - cp -f lefthook-local.dip_example.yml lefthook-local.yml 65 | - rm -f Gemfile.lock 66 | - rm -f gemfiles/*gemfile* 67 | - dip setup 68 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/instrumentation/open_telemetry_tracer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | module Instrumentation 6 | class OpenTelemetryTracer 7 | delegate :enabled?, to: :class 8 | 9 | class << self 10 | def enabled? 11 | !!@enabled 12 | end 13 | 14 | attr_writer :enabled 15 | end 16 | 17 | def call(message) 18 | return message unless enabled? 19 | 20 | topic = message[:topic] 21 | attributes = { 22 | "messaging.system" => "kafka", 23 | "messaging.destination" => topic, 24 | "messaging.destination_kind" => "topic" 25 | } 26 | 27 | message_key = extract_message_key(message[:key]) 28 | attributes["messaging.kafka.message_key"] = message_key if message_key 29 | 30 | message[:headers] ||= {} 31 | 32 | tracer.in_span("#{topic} publish", attributes: attributes, kind: :producer) do 33 | ::OpenTelemetry.propagation.inject(message[:headers]) 34 | end 35 | 36 | message 37 | end 38 | 39 | private 40 | 41 | def tracer 42 | ::Sbmt::KafkaProducer::Instrumentation::OpenTelemetryLoader.instance.tracer 43 | end 44 | 45 | def extract_message_key(key) 46 | # skip encode if already valid utf8 47 | return key if key.nil? || (key.encoding == Encoding::UTF_8 && key.valid_encoding?) 48 | 49 | key.encode(Encoding::UTF_8) 50 | rescue Encoding::UndefinedConversionError 51 | nil 52 | end 53 | end 54 | end 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/config/producer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | module Config 6 | class Producer < Anyway::Config 7 | class << self 8 | # Make it possible to access a singleton config instance 9 | # via class methods (i.e., without explicitly calling `instance`) 10 | delegate_missing_to :instance 11 | 12 | def coerce_to(struct) 13 | lambda do |raw_attrs| 14 | struct.new(**raw_attrs) 15 | rescue Dry::Types::SchemaError => e 16 | raise_validation_error "cannot parse #{struct}: #{e.message}" 17 | end 18 | end 19 | 20 | private 21 | 22 | # Returns a singleton config instance 23 | def instance 24 | @instance ||= new 25 | end 26 | end 27 | 28 | config_name :kafka_producer 29 | 30 | attr_config :deliver, :wait_on_queue_full, 31 | :max_payload_size, :max_wait_timeout, 32 | :wait_on_queue_full_timeout, 33 | auth: {}, kafka: {}, 34 | logger_class: "::Sbmt::KafkaProducer::Logger", 35 | metrics_listener_class: "::Sbmt::KafkaProducer::Instrumentation::YabedaMetricsListener" 36 | 37 | coerce_types deliver: :boolean, wait_on_queue_full: :boolean, 38 | max_payload_size: :integer, max_wait_timeout: :integer, 39 | wait_on_queue_full_timeout: :integer 40 | coerce_types kafka: coerce_to(Kafka) 41 | coerce_types auth: coerce_to(Auth) 42 | 43 | def to_kafka_options 44 | auth.to_kafka_options 45 | .merge(kafka.to_kafka_options) 46 | end 47 | end 48 | end 49 | end 50 | end 51 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/instrumentation/open_telemetry_tracer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "rails_helper" 4 | 5 | describe Sbmt::KafkaProducer::Instrumentation::OpenTelemetryTracer do 6 | let(:topic_name) { "topic" } 7 | let(:message) do 8 | { 9 | headers: {"some-key" => "value"}, 10 | topic: topic_name, 11 | payload: "message payload", 12 | key: "message-key" 13 | } 14 | end 15 | 16 | describe "when disabled" do 17 | before { described_class.enabled = false } 18 | 19 | it "does not trace consumed message" do 20 | expect(::Sbmt::KafkaProducer::Instrumentation::OpenTelemetryLoader).not_to receive(:instance) 21 | 22 | described_class.new.call(message) 23 | end 24 | end 25 | 26 | describe ".trace" do 27 | let(:tracer) { double("tracer") } 28 | let(:instrumentation_instance) { double("instrumentation instance") } 29 | 30 | before do 31 | described_class.enabled = true 32 | 33 | allow(::Sbmt::KafkaProducer::Instrumentation::OpenTelemetryLoader).to receive(:instance).and_return(instrumentation_instance) 34 | allow(instrumentation_instance).to receive(:tracer).and_return(tracer) 35 | end 36 | 37 | it "injects context into message headers" do 38 | expect(tracer).to receive(:in_span).with("topic publish", kind: :producer, attributes: { 39 | "messaging.destination" => topic_name, 40 | "messaging.kafka.message_key" => "message-key", 41 | "messaging.destination_kind" => "topic", 42 | "messaging.system" => "kafka" 43 | }).and_yield 44 | expect(::OpenTelemetry.propagation).to receive(:inject).with(message[:headers]) 45 | described_class.new.call(message) 46 | end 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/config/kafka_config_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::Config::Kafka, type: :config do 4 | let(:kafka_config_defaults) do 5 | { 6 | "socket.connection.setup.timeout.ms": 2000, 7 | "message.timeout.ms": 55000, 8 | "request.timeout.ms": 1000, 9 | "request.required.acks": -1, 10 | "message.send.max.retries": 2, 11 | "retry.backoff.ms": 1000 12 | } 13 | end 14 | 15 | context "with servers validation" do 16 | it "raises error if servers are not set" do 17 | expect { described_class.new } 18 | .to raise_error(/:servers is missing/) 19 | end 20 | 21 | it "raises error if servers have unexpected format" do 22 | expect { described_class.new(servers: "kafka://server:9092") } 23 | .to raise_error(/violates constraints/) 24 | end 25 | end 26 | 27 | context "when servers are properly set" do 28 | let(:servers) { "server1:9092,server2:9092" } 29 | let(:config) { described_class.new(servers: servers) } 30 | 31 | it "successfully loads config and translates to kafka options" do 32 | expect(config.servers).to eq(servers) 33 | expect(config.to_kafka_options) 34 | .to eq(kafka_config_defaults.merge("bootstrap.servers": servers)) 35 | end 36 | end 37 | 38 | context "when servers are also set in kafka options" do 39 | let(:root_servers) { "server1:9092,server2:9092" } 40 | let(:kafka_servers) { "server3:9092,server4:9092" } 41 | let(:config) { described_class.new(servers: root_servers, kafka_options: {"bootstrap.servers": kafka_servers}) } 42 | 43 | it "root servers option takes precedence over kafka config" do 44 | expect(config.servers).to eq(root_servers) 45 | expect(config.to_kafka_options) 46 | .to eq(kafka_config_defaults.merge("bootstrap.servers": root_servers)) 47 | end 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/outbox_producer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::OutboxProducer do 4 | subject(:outbox_producer) { described_class.new(client: client, topic: topic) } 5 | 6 | let(:client) { instance_double(WaterDrop::Producer) } 7 | let(:topic) { "test_topic" } 8 | let(:payload) { {message: "payload"} } 9 | let(:outbox_item) { double("OutboxItem", options: {partition: 0}) } 10 | 11 | before do 12 | allow(Sbmt::KafkaProducer::KafkaClientFactory).to receive(:default_client).and_return(client) 13 | end 14 | 15 | describe "#call" do 16 | it "calls sync_publish with payload and outbox_item options" do 17 | expect_any_instance_of(described_class).to receive(:sync_publish!).with(payload, partition: 0) 18 | outbox_producer.call(outbox_item, payload) 19 | end 20 | end 21 | 22 | describe "#sync_publish" do 23 | let(:delivery_report) do 24 | instance_double(Rdkafka::Producer::DeliveryReport, 25 | error: nil, 26 | label: nil, 27 | offset: 0, 28 | partition: 0, 29 | topic_name: "my_topic") 30 | end 31 | 32 | it "calls client.produce_sync with payload and merged options" do 33 | expect(client).to receive(:produce_sync).with(payload: payload, topic: topic, partition: 0).and_return(delivery_report) 34 | outbox_producer.sync_publish(payload, partition: 0) 35 | end 36 | end 37 | 38 | describe "dependencies" do 39 | it "sets client to the default_client if not specified" do 40 | allow(Sbmt::KafkaProducer::KafkaClientFactory).to receive(:default_client).and_return(client) 41 | outbox_producer = described_class.new(topic: topic) 42 | 43 | expect(outbox_producer.client).to eq(client) 44 | end 45 | 46 | it "sets topic to the specified value" do 47 | outbox_producer = described_class.new(client: client, topic: topic) 48 | 49 | expect(outbox_producer.topic).to eq(topic) 50 | end 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/config/kafka.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | module Config 6 | class Kafka < Dry::Struct 7 | transform_keys(&:to_sym) 8 | 9 | # srv1:port1,srv2:port2,... 10 | SERVERS_REGEXP = /^[a-z\d.\-:]+(,[a-z\d.\-:]+)*$/.freeze 11 | 12 | # https://github.com/karafka/waterdrop/blob/master/lib/waterdrop/config.rb 13 | KAFKA_CONFIG_KEYS_REMAP = { 14 | servers: :"bootstrap.servers", 15 | connect_timeout: :"socket.connection.setup.timeout.ms", 16 | message_timeout: :"message.timeout.ms", 17 | ack_timeout: :"request.timeout.ms", 18 | retry_backoff: :"retry.backoff.ms", 19 | max_retries: :"message.send.max.retries", 20 | required_acks: :"request.required.acks" 21 | } 22 | 23 | attribute :servers, Sbmt::KafkaProducer::Types::String.constrained(format: SERVERS_REGEXP) 24 | 25 | # defaults are rdkafka's 26 | # see https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md 27 | attribute :connect_timeout, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(2000) 28 | attribute :ack_timeout, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(1000) 29 | attribute :retry_backoff, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(1000) 30 | attribute :message_timeout, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(55000) 31 | attribute :required_acks, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(-1) 32 | attribute :max_retries, Sbmt::KafkaProducer::Types::Coercible::Integer.optional.default(2) 33 | 34 | attribute :kafka_config, Sbmt::KafkaProducer::Types::ConfigAttrs.optional.default({}.freeze) 35 | 36 | def to_kafka_options 37 | cfg = KAFKA_CONFIG_KEYS_REMAP.each_with_object({}) do |(key, kafka_key), hash| 38 | hash[kafka_key] = self[key] 39 | end 40 | 41 | kafka_config.symbolize_keys.merge(cfg) 42 | end 43 | end 44 | end 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/kafka_client_factory_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::KafkaClientFactory do 4 | describe ".default_client" do 5 | it "returns a ConnectionPool::Wrapper with a WaterDrop::Producer inside" do 6 | expect(described_class.default_client.with { |producer| producer }).to be_instance_of(WaterDrop::Producer) 7 | end 8 | end 9 | 10 | describe ".build" do 11 | context "when passed an empty hash" do 12 | it "returns the default client" do 13 | expect(described_class.build({})).to eq(described_class.default_client) 14 | end 15 | end 16 | 17 | context "when passed a kafka configuration hash" do 18 | let(:kafka_config) { {seed_brokers: "kafka://localhost:9092", producer: {connect_timeout: "10s"}} } 19 | 20 | it "returns a ConnectionPool::Wrapper with a WaterDrop::Producer inside" do 21 | expect(described_class.build(kafka_config).with { |producer| producer }).to be_instance_of(WaterDrop::Producer) 22 | end 23 | 24 | it "always returns the same client" do 25 | client = described_class.build(kafka_config) 26 | expect(described_class.build(kafka_config).object_id).to eq client.object_id 27 | end 28 | end 29 | end 30 | 31 | describe ".configure_client" do 32 | it "configures the client with the correct options" do 33 | # rubocop:disable Style/HashSyntax 34 | kafka_opts = { 35 | message_timeout: 54000, 36 | "queue.buffering.max.messages": 14, 37 | "ack_timeout" => 1555, 38 | "queue.buffering.max.ms" => 1345 39 | } 40 | # rubocop:enable Style/HashSyntax 41 | 42 | described_class.build(kafka_opts).with do |producer| 43 | expect(producer.config.deliver).to be(true) 44 | expect(producer.config.logger).to be_instance_of(Sbmt::KafkaProducer::Logger) 45 | expect(producer.config.wait_on_queue_full).to be(true) 46 | expect(producer.config.max_wait_timeout).to eq(60000) 47 | expect(producer.config.kafka).to include( 48 | "bootstrap.servers": "kafka:9092", 49 | "message.timeout.ms": 54000, 50 | "request.timeout.ms": 1555, 51 | "queue.buffering.max.ms": 1345 52 | ) 53 | end 54 | end 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/yabeda_configurer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class YabedaConfigurer 6 | SIZE_BUCKETS = [1, 10, 100, 1000, 10_000, 100_000, 1_000_000].freeze 7 | LATENCY_BUCKETS = [0.0001, 0.001, 0.01, 0.1, 1.0, 10, 100, 1000].freeze 8 | 9 | def self.configure 10 | Yabeda.configure do 11 | group :kafka_api do 12 | counter :calls, 13 | tags: %i[client broker api], 14 | comment: "API calls" 15 | histogram :latency, 16 | tags: %i[client broker api], 17 | buckets: LATENCY_BUCKETS, 18 | comment: "API latency" 19 | histogram :request_size, 20 | tags: %i[client broker api], 21 | buckets: SIZE_BUCKETS, 22 | comment: "API request size" 23 | histogram :response_size, 24 | tags: %i[client broker api], 25 | buckets: SIZE_BUCKETS, 26 | comment: "API response size" 27 | counter :errors, 28 | tags: %i[client broker api], 29 | comment: "API errors" 30 | end 31 | 32 | group :kafka_producer do 33 | counter :produced_messages, 34 | tags: %i[client topic], 35 | comment: "Messages produced" 36 | histogram :message_size, 37 | tags: %i[client topic], 38 | buckets: SIZE_BUCKETS, 39 | comment: "Producer message size" 40 | histogram :buffer_size, 41 | tags: %i[client], 42 | buckets: SIZE_BUCKETS, 43 | comment: "Producer buffer size" 44 | counter :produce_errors, 45 | tags: %i[client topic], 46 | comment: "Produce errors" 47 | counter :deliver_errors, 48 | tags: %i[client topic], 49 | comment: "Produce deliver error" 50 | histogram :deliver_latency, 51 | tags: %i[client topic], 52 | buckets: LATENCY_BUCKETS, 53 | comment: "Produce delivery latency" 54 | counter :deliver_messages, 55 | tags: %i[client topic], 56 | comment: "Total count of delivered messages produced" 57 | end 58 | end 59 | end 60 | end 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/config/auth_config_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::Config::Auth, type: :config do 4 | let(:config) { described_class.new } 5 | 6 | context "when no auth configured" do 7 | it "defaults to plaintext and properly translates to kafka options" do 8 | expect(config.kind).to eq("plaintext") 9 | expect(config.to_kafka_options).to eq("security.protocol": "plaintext") 10 | end 11 | end 12 | 13 | context "when sasl plaintext auth is used" do 14 | let(:config) { 15 | described_class.new( 16 | kind: "sasl_plaintext", sasl_mechanism: "PLAIN", 17 | sasl_username: "username", sasl_password: "password" 18 | ) 19 | } 20 | 21 | it "loads valid config and properly translates to kafka options" do 22 | with_env( 23 | "KAFKA_PRODUCER_AUTH_SASL_USERNAME" => "username", 24 | "KAFKA_PRODUCER_AUTH_SASL_PASSWORD" => "password", 25 | "KAFKA_PRODUCER_AUTH_SASL_MECHANISM" => "PLAIN" 26 | ) do 27 | expect(config.kind).to eq("sasl_plaintext") 28 | expect(config.sasl_username).to eq("username") 29 | expect(config.sasl_password).to eq("password") 30 | expect(config.sasl_mechanism).to eq("PLAIN") 31 | expect(config.to_kafka_options) 32 | .to eq({ 33 | "security.protocol": "sasl_plaintext", 34 | "sasl.password": "password", 35 | "sasl.username": "username", 36 | "sasl.mechanism": "PLAIN" 37 | }) 38 | end 39 | end 40 | 41 | it "raises on empty username" do 42 | expect { described_class.new(kind: "sasl_plaintext").to_kafka_options } 43 | .to raise_error(/sasl_username is required/) 44 | end 45 | 46 | it "raises on empty password" do 47 | expect { described_class.new(kind: "sasl_plaintext", sasl_username: "username").to_kafka_options } 48 | .to raise_error(/sasl_password is required/) 49 | end 50 | 51 | it "sasl_mechanism defaults to SCRAM-SHA-512" do 52 | expect(described_class.new(kind: "sasl_plaintext", 53 | sasl_username: "username", 54 | sasl_password: "password").to_kafka_options) 55 | .to eq({ 56 | "security.protocol": "sasl_plaintext", 57 | "sasl.password": "password", 58 | "sasl.username": "username", 59 | "sasl.mechanism": "SCRAM-SHA-512" 60 | }) 61 | end 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/config/auth.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | module Config 6 | class Auth < Dry::Struct 7 | transform_keys(&:to_sym) 8 | 9 | AVAILABLE_AUTH_KINDS = %w[plaintext sasl_plaintext].freeze 10 | DEFAULT_AUTH_KIND = "plaintext" 11 | 12 | AVAILABLE_SASL_MECHANISMS = %w[PLAIN SCRAM-SHA-256 SCRAM-SHA-512].freeze 13 | DEFAULT_SASL_MECHANISM = "SCRAM-SHA-512" 14 | 15 | attribute :kind, Sbmt::KafkaProducer::Types::Strict::String 16 | .default(DEFAULT_AUTH_KIND) 17 | .enum(*AVAILABLE_AUTH_KINDS) 18 | attribute? :sasl_mechanism, Sbmt::KafkaProducer::Types::Strict::String 19 | .default(DEFAULT_SASL_MECHANISM) 20 | .enum(*AVAILABLE_SASL_MECHANISMS) 21 | attribute? :sasl_username, Sbmt::KafkaProducer::Types::Strict::String 22 | attribute? :sasl_password, Sbmt::KafkaProducer::Types::Strict::String 23 | 24 | def to_kafka_options 25 | ensure_options_are_valid 26 | 27 | opts = {} 28 | 29 | case kind 30 | when "sasl_plaintext" 31 | opts.merge!( 32 | "security.protocol": kind, 33 | "sasl.mechanism": sasl_mechanism, 34 | "sasl.username": sasl_username, 35 | "sasl.password": sasl_password 36 | ) 37 | when "plaintext" 38 | opts[:"security.protocol"] = kind 39 | else 40 | raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}" 41 | end 42 | 43 | opts.symbolize_keys 44 | end 45 | 46 | private 47 | 48 | def ensure_options_are_valid 49 | raise Anyway::Config::ValidationError, "unknown auth kind: #{kind}" unless AVAILABLE_AUTH_KINDS.include?(kind) 50 | 51 | case kind 52 | when "sasl_plaintext" 53 | raise Anyway::Config::ValidationError, "sasl_username is required for #{kind} auth kind" if sasl_username.blank? 54 | raise Anyway::Config::ValidationError, "sasl_password is required for #{kind} auth kind" if sasl_password.blank? 55 | raise Anyway::Config::ValidationError, "sasl_mechanism is required for #{kind} auth kind" if sasl_mechanism.blank? 56 | raise Anyway::Config::ValidationError, "invalid sasl_mechanism for #{kind} auth kind, available options are: [#{AVAILABLE_SASL_MECHANISMS.join(",")}]" unless AVAILABLE_SASL_MECHANISMS.include?(sasl_mechanism) 57 | end 58 | end 59 | end 60 | end 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/kafka_client_factory.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class KafkaClientFactory 6 | CLIENTS_REGISTRY_MUTEX = Mutex.new 7 | CLIENTS_REGISTRY = {} 8 | 9 | class << self 10 | def default_client 11 | @default_client ||= ConnectionPool::Wrapper.new do 12 | WaterDrop::Producer.new do |config| 13 | configure_client(config) 14 | end 15 | end 16 | end 17 | 18 | def build(kafka_options = {}) 19 | return default_client if kafka_options.empty? 20 | 21 | fetch_client(kafka_options) do 22 | ConnectionPool::Wrapper.new do 23 | WaterDrop::Producer.new do |config| 24 | configure_client(config, kafka_options) 25 | end 26 | end 27 | end 28 | end 29 | 30 | private 31 | 32 | def fetch_client(kafka_options) 33 | key = Digest::SHA1.hexdigest(Marshal.dump(kafka_options)) 34 | return CLIENTS_REGISTRY[key] if CLIENTS_REGISTRY.key?(key) 35 | 36 | CLIENTS_REGISTRY_MUTEX.synchronize do 37 | return CLIENTS_REGISTRY[key] if CLIENTS_REGISTRY.key?(key) 38 | CLIENTS_REGISTRY[key] = yield 39 | end 40 | end 41 | 42 | def configure_client(kafka_config, kafka_options = {}) 43 | kafka_config.logger = config.logger_class.classify.constantize.new 44 | kafka_config.kafka = config.to_kafka_options.merge(custom_kafka_config(kafka_options)).symbolize_keys 45 | 46 | kafka_config.middleware = Instrumentation::TracingMiddleware.new 47 | 48 | kafka_config.deliver = config.deliver if config.deliver.present? 49 | kafka_config.wait_on_queue_full = config.wait_on_queue_full if config.wait_on_queue_full.present? 50 | kafka_config.max_payload_size = config.max_payload_size if config.max_payload_size.present? 51 | kafka_config.max_wait_timeout = config.max_wait_timeout if config.max_wait_timeout.present? 52 | kafka_config.wait_on_queue_full_timeout = config.wait_on_queue_full_timeout if config.wait_on_queue_full_timeout.present? 53 | 54 | kafka_config.monitor.subscribe(config.metrics_listener_class.classify.constantize.new) 55 | end 56 | 57 | def custom_kafka_config(kafka_options) 58 | kafka_options = kafka_options.symbolize_keys 59 | short_options = kafka_options.extract!(*Config::Kafka::KAFKA_CONFIG_KEYS_REMAP.keys) 60 | cfg = short_options.transform_keys(Config::Kafka::KAFKA_CONFIG_KEYS_REMAP) 61 | kafka_options.merge!(cfg) 62 | end 63 | 64 | def config 65 | Config::Producer 66 | end 67 | end 68 | end 69 | end 70 | end 71 | -------------------------------------------------------------------------------- /sbmt-kafka_producer.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "lib/sbmt/kafka_producer/version" 4 | 5 | Gem::Specification.new do |spec| 6 | spec.name = "sbmt-kafka_producer" 7 | spec.license = "MIT" 8 | spec.version = Sbmt::KafkaProducer::VERSION 9 | spec.authors = ["Kuper Ruby-Platform Team"] 10 | 11 | spec.summary = "Ruby gem for producing Kafka messages" 12 | spec.description = "This gem is used for producing Kafka messages. It represents a wrapper over Waterdrop gem and is recommended for using as a transport with sbmt-outbox" 13 | spec.homepage = "https://github.com/Kuper-Tech/sbmt-kafka_producer" 14 | spec.required_ruby_version = ">= 3.0.0" 15 | 16 | spec.metadata["allowed_push_host"] = ENV.fetch("NEXUS_URL", "https://rubygems.org") 17 | 18 | spec.metadata["homepage_uri"] = spec.homepage 19 | spec.metadata["source_code_uri"] = spec.homepage 20 | spec.metadata["changelog_uri"] = "#{spec.homepage}/blob/master/CHANGELOG.md" 21 | spec.metadata["rubygems_mfa_required"] = "false" # rubocop:disable Gemspec/RequireMFA 22 | 23 | # Specify which files should be added to the gem when it is released. 24 | # The `git ls-files -z` loads the files in the RubyGem that have been added into git. 25 | spec.files = Dir.chdir(__dir__) do 26 | `git ls-files -z`.split("\x0").reject do |f| 27 | (f == __FILE__) || f.match(%r{\A(?:(?:bin|test|spec|features)/|\.(?:git|travis|circleci)|appveyor)}) 28 | end 29 | end 30 | spec.bindir = "exe" 31 | spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) } 32 | spec.require_paths = ["lib"] 33 | 34 | spec.add_dependency "anyway_config", "~> 2.4" 35 | spec.add_dependency "connection_pool", "~> 2.0" 36 | spec.add_dependency "dry-initializer", "~> 3.0" 37 | spec.add_dependency "dry-struct", "~> 1.5" 38 | spec.add_dependency "waterdrop", "~> 2.7", "< 2.8" 39 | spec.add_dependency "zeitwerk", "~> 2.6" 40 | spec.add_dependency "yabeda", "~> 0.11" 41 | 42 | spec.add_development_dependency "appraisal", ">= 2.4" 43 | spec.add_development_dependency "bundler", ">= 2.1" 44 | spec.add_development_dependency "combustion", ">= 1.3" 45 | spec.add_development_dependency "opentelemetry-sdk" 46 | spec.add_development_dependency "opentelemetry-api", ">= 0.17.0" 47 | spec.add_development_dependency "opentelemetry-common", ">= 0.17.0" 48 | spec.add_development_dependency "opentelemetry-instrumentation-base", ">= 0.17.0" 49 | spec.add_development_dependency "rake", ">= 13.0" 50 | spec.add_development_dependency "rails", ">= 6.1" 51 | spec.add_development_dependency "rspec", ">= 3.0" 52 | spec.add_development_dependency "rspec_junit_formatter", ">= 0.6" 53 | spec.add_development_dependency "rspec-rails", ">= 4.0" 54 | spec.add_development_dependency "rubocop-rails", ">= 2.5" 55 | spec.add_development_dependency "rubocop-rspec", ">= 2.11" 56 | spec.add_development_dependency "sentry-ruby", "> 5.16" 57 | spec.add_development_dependency "simplecov", "~> 0.16" 58 | spec.add_development_dependency "standard", ">= 1.12" 59 | end 60 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/instrumentation/yabeda_metrics_listener.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | module Instrumentation 6 | class YabedaMetricsListener 7 | DEFAULT_CLIENT = {client: "waterdrop"}.freeze 8 | def on_statistics_emitted(event) 9 | # https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md 10 | stats = event.payload[:statistics] 11 | broker_stats = stats["brokers"] 12 | 13 | report_broker_stats(broker_stats) 14 | end 15 | 16 | def on_error_occurred(event) 17 | tags = {topic: event[:topic]}.merge!(DEFAULT_CLIENT) if event.payload.include?(:topic) 18 | 19 | case event[:type] 20 | when "message.produce_sync", "message.produce_async" 21 | Yabeda.kafka_producer.produce_errors 22 | .increment(produce_base_tags(event)) 23 | when "librdkafka.dispatch_error" 24 | Yabeda.kafka_producer.deliver_errors 25 | .increment(tags) 26 | end 27 | end 28 | 29 | %i[produced_sync produced_async].each do |event_scope| 30 | define_method(:"on_message_#{event_scope}") do |event| 31 | Yabeda.kafka_producer.produced_messages 32 | .increment(produce_base_tags(event)) 33 | 34 | Yabeda.kafka_producer.message_size 35 | .measure(produce_base_tags(event), event[:message].to_s.bytesize) 36 | 37 | Yabeda.kafka_producer.deliver_latency 38 | .measure(produce_base_tags(event), event[:time]) 39 | end 40 | end 41 | 42 | def on_message_buffered(event) 43 | Yabeda.kafka_producer.buffer_size 44 | .measure(DEFAULT_CLIENT, event[:buffer].size) 45 | end 46 | 47 | def on_message_acknowledged(event) 48 | tag = {topic: event[:topic]}.merge!(DEFAULT_CLIENT) 49 | 50 | Yabeda.kafka_producer.deliver_messages 51 | .increment(tag) 52 | end 53 | 54 | private 55 | 56 | def produce_base_tags(event) 57 | { 58 | client: DEFAULT_CLIENT[:client], 59 | topic: event[:message][:topic] 60 | } 61 | end 62 | 63 | def report_broker_stats(brokers) 64 | brokers.each_value do |broker_statistics| 65 | # Skip bootstrap nodes 66 | next if broker_statistics["nodeid"] == -1 67 | 68 | broker_tags = { 69 | client: DEFAULT_CLIENT[:client], 70 | broker: broker_statistics["nodename"] 71 | } 72 | 73 | Yabeda.kafka_api.calls 74 | .increment(broker_tags, by: broker_statistics["tx_d"]) 75 | Yabeda.kafka_api.latency 76 | .measure(broker_tags, broker_statistics["rtt"]["avg"]) 77 | Yabeda.kafka_api.request_size 78 | .measure(broker_tags, broker_statistics["txbytes"]) 79 | Yabeda.kafka_api.response_size 80 | .measure(broker_tags, broker_statistics["rxbytes"]) 81 | Yabeda.kafka_api.errors 82 | .increment(broker_tags, by: broker_statistics["txerrs_d"] + broker_statistics["rxerrs_d"]) 83 | end 84 | end 85 | end 86 | end 87 | end 88 | end 89 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/config/producer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | describe Sbmt::KafkaProducer::Config::Producer, type: :config do 4 | context "when app initialized" do 5 | let(:default_env) { 6 | { 7 | "KAFKA_PRODUCER_AUTH__KIND" => "sasl_plaintext", 8 | "KAFKA_PRODUCER_AUTH__SASL_USERNAME" => "username", 9 | "KAFKA_PRODUCER_AUTH__SASL_PASSWORD" => "password", 10 | "KAFKA_PRODUCER_AUTH__SASL_MECHANISM" => "PLAIN", 11 | 12 | "KAFKA_PRODUCER_KAFKA__SERVERS" => "server1:9092,server2:9092" 13 | } 14 | } 15 | let(:config) { described_class.new } 16 | let(:kafka_config_defaults) do 17 | { 18 | "socket.connection.setup.timeout.ms": 1000, 19 | "request.timeout.ms": 1000, 20 | "request.required.acks": -1, 21 | "message.send.max.retries": 2, 22 | "retry.backoff.ms": 1000, 23 | "message.timeout.ms": 55000 24 | } 25 | end 26 | 27 | it "properly merges kafka options" do 28 | with_env(default_env) do 29 | expect(config.to_kafka_options) 30 | .to eq(kafka_config_defaults.merge( 31 | "bootstrap.servers": "server1:9092,server2:9092", 32 | "security.protocol": "sasl_plaintext", 33 | "sasl.mechanism": "PLAIN", 34 | "sasl.password": "password", 35 | "sasl.username": "username", 36 | # loaded from kafka_producer.yml 37 | "message.send.max.retries": 2, 38 | "request.required.acks": -1, 39 | "request.timeout.ms": 1000, 40 | "retry.backoff.ms": 1000, 41 | "socket.connection.setup.timeout.ms": 2000, 42 | # arbitrary parameters for section kafka_config file kafka_producer.yml 43 | "queue.buffering.max.messages": 1, 44 | "queue.buffering.max.ms": 10000 45 | )) 46 | end 47 | end 48 | 49 | it "has correct defaults" do 50 | with_env(default_env) do 51 | expect(config.logger_class).to eq("::Sbmt::KafkaProducer::Logger") 52 | expect(config.metrics_listener_class).to eq("::Sbmt::KafkaProducer::Instrumentation::YabedaMetricsListener") 53 | end 54 | end 55 | 56 | context "when kafka_config options overwrite auth params" do 57 | let(:ca_cert) { OpenSSL::PKey::RSA.new(2048).to_s } 58 | let(:default_env) do 59 | super().merge( 60 | "KAFKA_PRODUCER_KAFKA__KAFKA_CONFIG__SECURITY.PROTOCOL" => "SASL_SSL", 61 | "KAFKA_PRODUCER_KAFKA__KAFKA_CONFIG__SASL.MECHANISM" => "SCRAM-SHA-512", 62 | "KAFKA_PRODUCER_KAFKA__KAFKA_CONFIG__SSL.CA.PEM" => ca_cert 63 | ) 64 | end 65 | 66 | it "properly merges kafka options uses auth params from low-level config" do 67 | with_env(default_env) do 68 | expect(config.to_kafka_options) 69 | .to eq(kafka_config_defaults.merge( 70 | "bootstrap.servers": "server1:9092,server2:9092", 71 | "security.protocol": "SASL_SSL", 72 | "sasl.mechanism": "SCRAM-SHA-512", 73 | "ssl.ca.pem": ca_cert, 74 | "sasl.password": "password", 75 | "sasl.username": "username", 76 | # loaded from kafka_producer.yml 77 | "message.send.max.retries": 2, 78 | "request.required.acks": -1, 79 | "request.timeout.ms": 1000, 80 | "retry.backoff.ms": 1000, 81 | "socket.connection.setup.timeout.ms": 2000, 82 | # arbitrary parameters for section kafka_config file kafka_producer.yml 83 | "queue.buffering.max.messages": 1, 84 | "queue.buffering.max.ms": 10000 85 | )) 86 | end 87 | end 88 | end 89 | end 90 | end 91 | -------------------------------------------------------------------------------- /lib/sbmt/kafka_producer/base_producer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sbmt 4 | module KafkaProducer 5 | class BaseProducer 6 | extend Dry::Initializer 7 | 8 | MSG_SUCCESS = "Message has been successfully sent to Kafka" 9 | 10 | option :client, default: -> { KafkaClientFactory.default_client } 11 | option :middlewares, default: -> { Sbmt::KafkaProducer.middlewares } 12 | option :topic 13 | 14 | def sync_publish!(payload, options = {}) 15 | report, produce_duration = around_publish do 16 | measure_time do 17 | do_produce(:sync, payload, options.merge(topic: topic)) 18 | end 19 | end 20 | log_success(report, produce_duration) 21 | true 22 | end 23 | 24 | def sync_publish(payload, options = {}) 25 | sync_publish!(payload, options) 26 | true 27 | rescue WaterDrop::Errors::ProduceError => e 28 | log_error(e) 29 | false 30 | end 31 | 32 | def async_publish!(payload, options = {}) 33 | around_publish do 34 | do_produce(:async, payload, options.merge(topic: topic)) 35 | end 36 | true 37 | end 38 | 39 | def async_publish(payload, options = {}) 40 | async_publish!(payload, options) 41 | true 42 | rescue WaterDrop::Errors::ProduceError => e 43 | log_error(e) 44 | false 45 | end 46 | 47 | private 48 | 49 | def do_produce(type, payload, options) 50 | call_middlewares(type, payload, options) do 51 | if type == :sync 52 | client.produce_sync(payload: payload, **options) 53 | else 54 | client.produce_async(payload: payload, **options) 55 | end 56 | end 57 | end 58 | 59 | def call_middlewares(type, payload, options) 60 | return yield if middlewares.empty? 61 | 62 | chain = middlewares.map { |middleware_class| middleware_class.new } 63 | 64 | traverse_chain = proc do 65 | if chain.empty? 66 | yield 67 | else 68 | chain.shift.call( 69 | payload, 70 | options.deep_dup.merge(producing_type: type), 71 | &traverse_chain 72 | ) 73 | end 74 | end 75 | traverse_chain.call 76 | end 77 | 78 | def logger 79 | ::Sbmt::KafkaProducer.logger 80 | end 81 | 82 | def around_publish 83 | with_sentry_transaction { yield } 84 | end 85 | 86 | def with_sentry_transaction 87 | return yield unless defined?(::Sentry) 88 | return yield unless ::Sentry.initialized? 89 | 90 | transaction = ::Sentry.start_transaction( 91 | name: "Karafka/#{self.class.name}", 92 | op: "kafka-producer" 93 | ) 94 | 95 | # Tracing is disabled by config 96 | return yield unless transaction 97 | 98 | result = nil 99 | transaction.with_child_span do |span| 100 | span.set_data(:topic, topic) 101 | result = yield 102 | end 103 | 104 | transaction.finish 105 | result 106 | end 107 | 108 | def log_error(error) 109 | log_tags = {stacktrace: error.backtrace.join("\n")} 110 | 111 | logger.tagged(log_tags) do 112 | logger.send(:error, "KAFKA ERROR: #{format_exception_error(error)}") 113 | end 114 | 115 | ErrorTracker.error(error) 116 | end 117 | 118 | def log_success(report, produce_duration) 119 | log_tags = {kafka: log_tags(report, produce_duration)} 120 | 121 | log_with_tags(log_tags) 122 | end 123 | 124 | def format_exception_error(error) 125 | text = "#{format_exception_error(error.cause)}. " if with_cause?(error) 126 | 127 | if error.respond_to?(:message) 128 | "#{text}#{error.class.name} #{error.message}" 129 | else 130 | "#{text}#{error}" 131 | end 132 | end 133 | 134 | def with_cause?(error) 135 | error.respond_to?(:cause) && error.cause.present? 136 | end 137 | 138 | def log_tags(report, produce_duration) 139 | { 140 | topic: report.topic_name, 141 | partition: report.partition, 142 | offset: report.offset, 143 | produce_duration_ms: produce_duration 144 | } 145 | end 146 | 147 | def log_with_tags(log_tags) 148 | return unless logger.respond_to?(:tagged) 149 | 150 | logger.tagged(log_tags) do 151 | logger.send(:info, MSG_SUCCESS) 152 | end 153 | end 154 | 155 | def measure_time 156 | start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC) 157 | result = yield 158 | end_time = Process.clock_gettime(Process::CLOCK_MONOTONIC) 159 | 160 | elapsed_time = end_time - start_time 161 | 162 | [result, elapsed_time] 163 | end 164 | 165 | def config 166 | Config::Producer 167 | end 168 | end 169 | end 170 | end 171 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](http://keepachangelog.com/) 6 | and this project adheres to [Semantic Versioning](http://semver.org/). 7 | 8 | ## [Unreleased] - yyyy-mm-dd 9 | 10 | ### Added 11 | 12 | ### Changed 13 | 14 | ### Fixed 15 | 16 | ## [3.3.0] - 2025-12-05 17 | 18 | ### Added 19 | 20 | - Added option `middlewares` to add middleware when producing a message 21 | 22 | ## [3.2.3] - 2025-04-02 23 | 24 | ### Fixed 25 | 26 | - Properly resolve constant `YabedaConfigurer` 27 | 28 | ## [3.2.2] - 2025-01-27 29 | 30 | ### Fixed 31 | 32 | - Kafka configuration was overwritten with the default authorization options (#2) 33 | 34 | ## [3.2.1] - 2024-12-26 35 | 36 | ### Fixed 37 | 38 | - Fix initialize yabeda metrics 39 | 40 | ## [3.2.0] - 2024-11-22 41 | 42 | ### Changed 43 | 44 | - Removed `ignore_kafka_error` option 45 | 46 | ## [3.1.1] - 2024-10-31 47 | 48 | ### Changed 49 | 50 | - Update README 51 | 52 | ## [3.1.0] - 2024-09-13 53 | 54 | ### Added 55 | 56 | - For synchronous messages and errors, we place logs in tags 57 | 58 | ### Fixed 59 | 60 | - Fixed mock for tests 61 | 62 | ## [3.0.0] - 2024-08-27 63 | 64 | ## BREAKING 65 | 66 | - Drop support for Ruby 2.7 67 | - Drop support for Rails 6.0 68 | - Add support for Waterdrop 2.7 69 | - `wait_timeout` configuration no longer deeded 70 | - All time-related values are now configured in milliseconds: `connect_timeout`, `ack_timeout`, `retry_backoff`, `max_wait_timeout`, `wait_on_queue_full_timeout` 71 | 72 | ## Added 73 | 74 | - Add `message_timeout` configuration 75 | 76 | ## [2.2.3] - 2024-06-20 77 | 78 | ### Fixed 79 | 80 | - Remove mock for producer client 81 | - A singleton class of the producer client has been added for testing 82 | 83 | ## [2.2.2] - 2024-06-15 84 | 85 | ### Fixed 86 | 87 | - Fixed display of metrics `kafka_api_calls` and `kafka_api_errors` 88 | 89 | ## [2.2.1] - 2024-06-07 90 | 91 | ### Changed 92 | 93 | - Drop support for Rails 5 94 | - Temporary limit upper version of Waterdrop to less than 2.7 95 | 96 | ## [2.2.0] - 2024-04-12 97 | 98 | ### Changed 99 | 100 | - Add logs with `offset`. 101 | 102 | ## [2.1.0] - 2024-03-14 103 | 104 | ### Changed 105 | 106 | - Memoize kafka clients. Add a registry with them to KafkaClientFactory. 107 | 108 | ## [2.0.0] - 2024-01-29 109 | 110 | ### Changed 111 | 112 | - Remove `sbmt-dev` 113 | 114 | ## [1.0.0] - 2024-01-12 115 | 116 | ### Added 117 | 118 | - Use mainstream karafka instead of custom fork 119 | 120 | ## [0.8.0] - 2023-10-05 121 | 122 | ### Added 123 | 124 | - Errors' `cause` handling 125 | 126 | ### Fixed 127 | 128 | - change from `double` to `instance_double` 129 | 130 | ## [0.7.0] - 2023-09-14 131 | 132 | ### Added 133 | 134 | - Plug OpenTelemetry 135 | 136 | ## [0.6.3] - 2023-08-10 137 | 138 | ### Fixed 139 | 140 | - Return True when publishing with bang methods 141 | 142 | ## [0.6.2] - 2023-08-08 143 | 144 | ### Added 145 | 146 | - add ErrorTracker for Sentry 147 | 148 | ## [0.6.1] - 2023-08-07 149 | 150 | ### Fixed 151 | 152 | - Don't catch an exception when publishing through the Sbmt::KafkaProducer::OutboxProducer 153 | 154 | ## [0.6.0] - 2023-07-23 155 | 156 | ### Added 157 | 158 | - rails generator for initial configuration 159 | - rails generator for producer/outbox_producer creation 160 | 161 | ## [0.5.1] - 2023-07-21 162 | 163 | ### Fixed 164 | 165 | - change sentry method from capture_message to capture_exception 166 | 167 | ## [0.5.0] - 2023-06-26 168 | 169 | ### Fixed 170 | - Mock BaseProducer for rspec 171 | 172 | ## [Unreleased] - 2023-06-21 173 | 174 | ### Changed 175 | - update README 176 | 177 | ## [0.4.2] - 2023-06-20 178 | 179 | ### Fixed 180 | - fixed version **sbmt-waterdrop** 181 | 182 | ## [0.4.1] - 2023-06-19 183 | 184 | ### Fixed 185 | - fixed error handling in the method **on_error_occurred** 186 | 187 | ## [0.4.0] - 2023-06-13 188 | 189 | ### Changed 190 | - config changed from anyway to Dry::Struct 191 | 192 | ## [0.3.0] - 2023-06-01 193 | 194 | ### Added 195 | - implement producer metrics 196 | 197 | ## [0.2.3] - 2023-05-19 198 | 199 | ### Added 200 | - for outbox, if the default settings for the kafka section are overridden, they are overwritten 201 | 202 | ### Changed 203 | 204 | ### Fixed 205 | 206 | ## [0.2.2] - 2023-05-18 207 | 208 | ### Added 209 | - arbitrary parameters from kafka 210 | 211 | ### Changed 212 | 213 | ### Fixed 214 | 215 | ## [0.2.1] - 2023-05-16 216 | 217 | ### Added 218 | - fix logger 219 | 220 | ### Changed 221 | 222 | ### Fixed 223 | 224 | ## [0.2.0] - 2023-05-16 225 | 226 | ### Added 227 | - basic options for producer 228 | 229 | ### Changed 230 | 231 | ### Fixed 232 | 233 | ## [Unreleased] - 2023-05-04 234 | 235 | ### Added 236 | - basic config for producer via gem anyway_config 237 | 238 | ### Changed 239 | 240 | ### Fixed 241 | 242 | ## [Unreleased] - 2023-05-02 243 | 244 | ### Added 245 | - BaseProducer 246 | - OutboxProducer 247 | - Sentry, logger 248 | 249 | ### Changed 250 | 251 | ### Fixed 252 | 253 | 254 | ## [Unreleased] 255 | 256 | ## [0.1.0] - 2023-04-17 257 | 258 | - Initial release 259 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/instrumentation/yabeda_metrics_listener_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "ostruct" 4 | 5 | describe Sbmt::KafkaProducer::Instrumentation::YabedaMetricsListener do 6 | describe ".on_statistics_emitted" do 7 | let(:base_rdkafka_stats) { 8 | { 9 | "client_id" => "waterdrop", 10 | "brokers" => { 11 | "kafka:9092/1001" => { 12 | "name" => "kafka:9092/1001", 13 | "nodeid" => 1001, 14 | "nodename" => "kafka:9092", 15 | "tx_d" => 7, 16 | "txbytes" => 338, 17 | "txerrs_d" => 0, 18 | "rx" => 7, 19 | "rxbytes" => 827, 20 | "rxerrs_d" => 0, 21 | "rtt" => { 22 | "avg" => 1984 23 | } 24 | } 25 | } 26 | }.freeze 27 | } 28 | 29 | context "when only base data is available" do 30 | let(:event) do 31 | Karafka::Core::Monitoring::Event.new( 32 | "statistics.emitted", 33 | {statistics: base_rdkafka_stats} 34 | ) 35 | end 36 | 37 | it "reports only broker metrics" do 38 | tags = {client: "waterdrop", broker: "kafka:9092"} 39 | expect { 40 | described_class.new.on_statistics_emitted(event) 41 | }.to measure_yabeda_histogram(Yabeda.kafka_api.latency).with_tags(tags) 42 | .and measure_yabeda_histogram(Yabeda.kafka_api.request_size).with_tags(tags) 43 | .and measure_yabeda_histogram(Yabeda.kafka_api.response_size).with_tags(tags) 44 | .and increment_yabeda_counter(Yabeda.kafka_api.calls).with_tags(tags) 45 | .and increment_yabeda_counter(Yabeda.kafka_api.errors).with_tags(tags) 46 | end 47 | end 48 | end 49 | 50 | describe ".on_error_occurred" do 51 | let(:topic) { OpenStruct.new({topic: "topic"}) } 52 | let(:tags) do 53 | { 54 | client: "waterdrop", 55 | topic: "topic" 56 | } 57 | end 58 | 59 | context "when error type is message.produce_sync" do 60 | let(:event) { Karafka::Core::Monitoring::Event.new("error.occurred", type: "message.produce_sync", message: topic) } 61 | 62 | it "increments producer error counter" do 63 | expect { described_class.new.on_error_occurred(event) } 64 | .to increment_yabeda_counter(Yabeda.kafka_producer.produce_errors) 65 | .with_tags(tags) 66 | end 67 | end 68 | 69 | context "when error type is message.produce_async" do 70 | let(:event) { Karafka::Core::Monitoring::Event.new("error.occurred", type: "message.produce_async", message: topic) } 71 | 72 | it "increments producer error counter" do 73 | expect { described_class.new.on_error_occurred(event) } 74 | .to increment_yabeda_counter(Yabeda.kafka_producer.produce_errors) 75 | .with_tags(tags) 76 | end 77 | end 78 | 79 | context "when error type is librdkafka.dispatch_error" do 80 | let(:event) { Karafka::Core::Monitoring::Event.new("error.occurred", topic: "topic", type: "librdkafka.dispatch_error") } 81 | let(:tags) { {client: "waterdrop", topic: "topic"} } 82 | 83 | it "increments producer error counter" do 84 | expect { described_class.new.on_error_occurred(event) } 85 | .to increment_yabeda_counter(Yabeda.kafka_producer.deliver_errors) 86 | .with_tags(tags) 87 | end 88 | end 89 | end 90 | 91 | describe ".on_message_produced_sync" do 92 | let(:topic) { OpenStruct.new({topic: "topic"}) } 93 | let(:event) { Karafka::Core::Monitoring::Event.new("on_message_produced_sync", message: topic, time: 25) } 94 | let(:tags) { {client: "waterdrop", topic: "topic"} } 95 | 96 | it "reports produced sync message metrics" do 97 | expect { described_class.new.on_message_produced_sync(event) } 98 | .to increment_yabeda_counter(Yabeda.kafka_producer.produced_messages).with_tags(tags) 99 | .and measure_yabeda_histogram(Yabeda.kafka_producer.message_size).with_tags(tags) 100 | .and measure_yabeda_histogram(Yabeda.kafka_producer.deliver_latency).with_tags(tags) 101 | end 102 | end 103 | 104 | describe ".on_message_produced_async" do 105 | let(:topic) { OpenStruct.new({topic: "topic"}) } 106 | let(:event) { Karafka::Core::Monitoring::Event.new("on_message_produced_async", message: topic, time: 25) } 107 | let(:tags) { {client: "waterdrop", topic: "topic"} } 108 | 109 | it "reports produced async message metrics" do 110 | expect { described_class.new.on_message_produced_async(event) } 111 | .to increment_yabeda_counter(Yabeda.kafka_producer.produced_messages).with_tags(tags) 112 | .and measure_yabeda_histogram(Yabeda.kafka_producer.message_size).with_tags(tags) 113 | .and measure_yabeda_histogram(Yabeda.kafka_producer.deliver_latency).with_tags(tags) 114 | end 115 | end 116 | 117 | describe ".on_message_buffered" do 118 | let(:event) { Karafka::Core::Monitoring::Event.new("on_message_buffered", buffer: "buffer") } 119 | 120 | it "histogram produced buffer size metrics" do 121 | expect { described_class.new.on_message_buffered(event) } 122 | .to measure_yabeda_histogram(Yabeda.kafka_producer.buffer_size).with_tags({client: "waterdrop"}) 123 | end 124 | end 125 | 126 | describe ".on_message_acknowledged" do 127 | let(:topic) { OpenStruct.new({topic: "topic"}) } 128 | let(:event) { Karafka::Core::Monitoring::Event.new("on_message_acknowledged", topic: "topic") } 129 | let(:tags) { {client: "waterdrop", topic: "topic"} } 130 | 131 | it "increments produced acknowledged metrics" do 132 | expect { described_class.new.on_message_acknowledged(event) } 133 | .to increment_yabeda_counter(Yabeda.kafka_producer.deliver_messages).with_tags(tags) 134 | end 135 | end 136 | end 137 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Gem Version](https://badge.fury.io/rb/sbmt-kafka_producer.svg)](https://badge.fury.io/rb/sbmt-kafka_producer) 2 | [![Build Status](https://github.com/Kuper-Tech/sbmt-kafka_producer/actions/workflows/tests.yml/badge.svg?branch=master)](https://github.com/Kuper-Tech/sbmt-kafka_producer/actions?query=branch%3Amaster) 3 | 4 | # Sbmt-KafkaProducer 5 | 6 | This gem is used to produce Kafka messages. It is a wrapper over the [waterdrop](https://github.com/karafka/waterdrop) gem, and it is recommended for use as a transport with the [sbmt-outbox](https://github.com/Kuper-Tech/sbmt-outbox) gem. 7 | 8 | ## Installation 9 | 10 | Add this line to your app's Gemfile: 11 | 12 | ```ruby 13 | gem "sbmt-kafka_producer" 14 | ``` 15 | 16 | And then execute: 17 | 18 | ```bash 19 | bundle install 20 | ``` 21 | 22 | ## Demo 23 | 24 | Learn how to use this gem and how it works with Ruby on Rails at here https://github.com/Kuper-Tech/outbox-example-apps 25 | 26 | ## Auto configuration 27 | 28 | We recommend going through the configuration and file creation process using the following Rails generators. Each generator can be run by using the `--help` option to learn more about the available arguments. 29 | 30 | ### Initial configuration 31 | 32 | If you plug the gem into your application for the first time, you can generate the initial configuration: 33 | 34 | ```shell 35 | rails g kafka_producer:install 36 | ``` 37 | 38 | As a result, the `config/kafka_producer.yml` file will be created. 39 | 40 | ### Producer class 41 | 42 | A producer class can be generated with the following command: 43 | 44 | ```shell 45 | rails g kafka_producer:producer MaybeNamespaced::Name sync topic 46 | ``` 47 | 48 | As the result, a sync producer will be created. 49 | 50 | ### Outbox producer 51 | 52 | To generate an Outbox producer for use with Gem [sbmt-Outbox](https://github.com/Kuper-Tech/sbmt-outbox), run the following command: 53 | 54 | ```shell 55 | rails g kafka_producer:outbox_producer SomeOutboxItem 56 | ``` 57 | 58 | ## Manual configuration 59 | 60 | The `config/kafka_producer.yml` file is the main configuration for this gem. 61 | 62 | ```yaml 63 | default: &default 64 | deliver: true 65 | # see more options at https://github.com/karafka/waterdrop/blob/master/lib/waterdrop/config.rb 66 | wait_on_queue_full: true 67 | max_payload_size: 1000012 68 | max_wait_timeout: 60000 69 | auth: 70 | kind: plaintext 71 | kafka: 72 | servers: "kafka:9092" # required 73 | max_retries: 2 # optional, default: 2 74 | required_acks: -1 # optional, default: -1 75 | ack_timeout: 1000 # in milliseconds, optional, default: 1000 76 | retry_backoff: 1000 # in milliseconds, optional, default: 1000 77 | connect_timeout: 2000 # in milliseconds, optional, default: 2000 78 | message_timeout: 55000 # in milliseconds, optional, default: 55000 79 | # kafka_config: # optional, low-level custom Kafka options (see https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md) 80 | # queue.buffering.max.messages: 100000 81 | # queue.buffering.max.ms: 5 82 | 83 | development: 84 | <<: *default 85 | 86 | test: 87 | <<: *default 88 | deliver: false 89 | wait_on_queue_full: false 90 | 91 | production: 92 | <<: *default 93 | ``` 94 | 95 | ### `auth` config section 96 | 97 | The gem supports 2 variants: plaintext (default) and SASL-plaintext 98 | 99 | SASL-plaintext: 100 | 101 | ```yaml 102 | auth: 103 | kind: sasl_plaintext 104 | sasl_username: user 105 | sasl_password: pwd 106 | sasl_mechanism: SCRAM-SHA-512 107 | ``` 108 | 109 | If you need to use another variant, use the low-level custom Kafka options `kafka_config:` of `config/kafka_producer.yml`. These options will overwrite the options in the auth section. 110 | 111 | Example of SASL_SSL protocol auth via `kafka_config`: 112 | 113 | ```yaml 114 | kafka_config: 115 | security.protocol: SASL_SSL 116 | sasl.username: user 117 | sasl.password: pwd 118 | ssl.ca.pem: ca_cert 119 | sasl.mechanism: SCRAM-SHA-512 120 | ``` 121 | ### `kafka` config section 122 | 123 | The `servers` key is required and should be in rdkafka format: without `kafka://` prefix, for example: `srv1:port1,srv2:port2,...`. 124 | 125 | The `kafka_config` section may contain any [rdkafka option](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md) 126 | 127 | ### Producer class 128 | 129 | To create a producer that will be responsible for sending messages to Kafka, copy the following code: 130 | 131 | ```ruby 132 | # app/producers/some_producer.rb 133 | class SomeProducer < Sbmt::KafkaProducer::BaseProducer 134 | option :topic, default: -> { "topic" } 135 | 136 | def publish(payload, **options) 137 | sync_publish(payload, options) 138 | # async_publish(payload, options) 139 | end 140 | end 141 | ``` 142 | 143 | ### Outbox producer config 144 | 145 | Add the following lines to your `config/outbox.yml` file in the `transports` section: 146 | 147 | ```yaml 148 | outbox_items: 149 | some_outbox_item: 150 | transports: 151 | sbmt/kafka_producer: 152 | topic: 'topic' 153 | kafka: # optional kafka options 154 | required_acks: -1 155 | ``` 156 | ## Middlewares 157 | 158 | Middleware is code configured to run before/after a message is produced. 159 | 160 | To add middleware, specify it in the configuration 161 | 162 | ```ruby 163 | # config/initializers/kafka_producer.rb 164 | 165 | Sbmt::KafkaProducer.middlewares.push( 166 | MyMiddleware 167 | ) 168 | 169 | # path/to/middlewares 170 | 171 | class MyMiddleware 172 | def call(payload, options) 173 | "Before producing" 174 | result = yield 175 | "After producing" 176 | 177 | result 178 | end 179 | end 180 | ``` 181 | ⚠️ note the method must return the result 182 | 183 | ## Usage 184 | 185 | To send a message to a Kafka topic, execute the following command: 186 | 187 | ```ruby 188 | SomeProducer.new.publish(payload, key: "123", headers: {"some-header" => "some-value"}) 189 | ``` 190 | 191 | ## Metrics 192 | 193 | The gem collects base producing metrics using [Yabeda](https://github.com/yabeda-rb/yabeda). See metrics at [YabedaConfigurer](./lib/sbmt/kafka_producer/yabeda_configurer.rb). 194 | 195 | ## Testing 196 | 197 | To stub a producer request to real Kafka broker, you can use a fake class. To do this, please add `require "sbmt/kafka_producer/testing"` to the `spec/rails_helper.rb`. 198 | 199 | ## Development 200 | 201 | Install [dip](https://github.com/bibendi/dip). 202 | 203 | And run: 204 | 205 | ```shell 206 | dip provision 207 | dip rspec 208 | ``` 209 | -------------------------------------------------------------------------------- /spec/sbmt/kafka_producer/base_producer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class TestWrapError < WaterDrop::Errors::ProduceError 4 | attr_reader :cause 5 | 6 | def initialize(message, cause) 7 | super(message) 8 | @cause = cause 9 | end 10 | end 11 | 12 | class MyMiddleware 13 | def call(payload, options) 14 | yield 15 | end 16 | end 17 | 18 | describe Sbmt::KafkaProducer::BaseProducer do 19 | let(:producer) { described_class.new(client: client, topic: topic, middlewares: middlewares) } 20 | let(:client) { instance_double(WaterDrop::Producer) } 21 | let(:topic) { "test_topic" } 22 | let(:payload) { {message: "payload"} } 23 | let(:error) { WaterDrop::Errors::ProduceError } 24 | let(:delivery_report) do 25 | instance_double(Rdkafka::Producer::DeliveryReport, 26 | error: nil, 27 | label: nil, 28 | offset: 0, 29 | partition: 0, 30 | topic_name: "my_topic") 31 | end 32 | let(:delivery_handle) do 33 | instance_double(Rdkafka::Producer::DeliveryHandle, 34 | label: nil, 35 | wait: delivery_report) 36 | end 37 | let(:logger) { ActiveSupport::TaggedLogging.new(Logger.new($stdout)) } 38 | let(:options) { {seed_brokers: "kafka://kafka:9092"} } 39 | let(:middlewares) { [] } 40 | 41 | before do 42 | allow(Sbmt::KafkaProducer::KafkaClientFactory).to receive(:default_client).and_return(client) 43 | allow(Sbmt::KafkaProducer).to receive(:logger).and_return(logger) 44 | end 45 | 46 | describe "#sync_publish" do 47 | context "when payload is successfully delivered" do 48 | before do 49 | allow(client).to receive(:produce_sync).with( 50 | payload: payload, 51 | topic: "test_topic", 52 | seed_brokers: "kafka://kafka:9092" 53 | ).and_return(delivery_report, 0.1) 54 | end 55 | 56 | it "produces the payload via the client and returns true" do 57 | expect(producer.sync_publish(payload, options)).to be(true) 58 | end 59 | 60 | it "logs the success message with correct tags" do 61 | expect(logger).to receive(:tagged).with(hash_including( 62 | kafka: hash_including( 63 | topic: "my_topic", 64 | partition: 0, 65 | offset: 0, 66 | produce_duration_ms: kind_of(Numeric) 67 | ) 68 | )).and_yield 69 | 70 | expect(logger).to receive(:info).with("Message has been successfully sent to Kafka") 71 | 72 | producer.sync_publish(payload, options) 73 | end 74 | 75 | context "when used middlewares" do 76 | let(:middlewares) { [MyMiddleware] } 77 | let(:middleware_options) do 78 | options.merge(topic: topic, producing_type: :sync) 79 | end 80 | 81 | it "calls middlewares" do 82 | expect_any_instance_of(MyMiddleware).to receive(:call).with(payload, middleware_options).and_call_original 83 | producer.sync_publish(payload, options) 84 | end 85 | end 86 | 87 | context "when used global middlewares" do 88 | let(:producer) { described_class.new(client: client, topic: topic) } 89 | let(:middleware_options) do 90 | options.merge(topic: topic, producing_type: :sync) 91 | end 92 | 93 | it "calls middlewares" do 94 | expect_any_instance_of(TestGlobalMiddleware).to receive(:call).with(payload, middleware_options).and_call_original 95 | producer.sync_publish(payload, options) 96 | end 97 | end 98 | end 99 | 100 | context "when delivery fails with Kafka::DeliveryFailed" do 101 | before do 102 | allow(client).to receive(:produce_sync).and_raise(error) 103 | end 104 | 105 | it "logs the error and returns false" do 106 | expect(producer).to receive(:log_error).once 107 | expect(producer.sync_publish(payload, options)).to be(false) 108 | end 109 | 110 | context "when multiple exception" do 111 | let(:error) do 112 | cause = StandardError.new("Second Exception") 113 | TestWrapError.new("First Exception", cause) 114 | end 115 | 116 | it "raises an error" do 117 | expect(logger).to receive(:tagged).with(include(:stacktrace)).and_yield 118 | expect(logger).to receive(:error).with(/KAFKA ERROR: StandardError Second Exception. TestWrapError First Exception/) 119 | 120 | expect(producer.sync_publish(payload, options)).to be(false) 121 | end 122 | end 123 | end 124 | end 125 | 126 | describe "#sync_publish!" do 127 | context "when payload is successfully delivered" do 128 | before do 129 | allow(client).to receive(:produce_sync).with( 130 | payload: payload, 131 | topic: "test_topic", 132 | seed_brokers: "kafka://kafka:9092" 133 | ).and_return(delivery_report, 0.2) 134 | end 135 | 136 | it "produces the payload via the client and returns true" do 137 | expect(producer.sync_publish!(payload, options)).to be(true) 138 | end 139 | 140 | it "logs the success message with correct tags" do 141 | expect(logger).to receive(:tagged).with(hash_including( 142 | kafka: hash_including( 143 | topic: "my_topic", 144 | partition: 0, 145 | offset: 0, 146 | produce_duration_ms: kind_of(Numeric) 147 | ) 148 | )).and_yield 149 | 150 | expect(logger).to receive(:info).with("Message has been successfully sent to Kafka") 151 | 152 | producer.sync_publish!(payload, options) 153 | end 154 | 155 | context "when used middlewares" do 156 | let(:middlewares) { [MyMiddleware] } 157 | let(:middleware_options) do 158 | options.merge(topic: topic, producing_type: :sync) 159 | end 160 | 161 | it "calls middlewares" do 162 | expect_any_instance_of(MyMiddleware).to receive(:call).with(payload, middleware_options).and_call_original 163 | producer.sync_publish(payload, options) 164 | end 165 | end 166 | end 167 | 168 | context "when delivery fails with Kafka::DeliveryFailed" do 169 | before do 170 | allow(client).to receive(:produce_sync).and_raise(error) 171 | end 172 | 173 | it "raises an error" do 174 | expect { producer.sync_publish!(payload, options) }.to raise_error(error) 175 | end 176 | end 177 | end 178 | 179 | describe "#async_publish" do 180 | context "when payload is successfully delivered" do 181 | before do 182 | allow(client).to receive(:produce_async).with( 183 | payload: payload, 184 | topic: "test_topic", 185 | seed_brokers: "kafka://kafka:9092" 186 | ).and_return(delivery_handle) 187 | end 188 | 189 | it "produces the payload via the client and returns true" do 190 | expect(producer.async_publish(payload, options)).to be(true) 191 | end 192 | 193 | context "when used middlewares" do 194 | let(:middlewares) { [MyMiddleware] } 195 | let(:middleware_options) do 196 | options.merge(topic: topic, producing_type: :async) 197 | end 198 | 199 | it "calls middlewares" do 200 | expect_any_instance_of(MyMiddleware).to receive(:call).with(payload, middleware_options).and_call_original 201 | producer.async_publish(payload, options) 202 | end 203 | end 204 | 205 | context "when used global middlewares" do 206 | let(:producer) { described_class.new(client: client, topic: topic) } 207 | let(:middleware_options) do 208 | options.merge(topic: topic, producing_type: :async) 209 | end 210 | 211 | it "calls middlewares" do 212 | expect_any_instance_of(TestGlobalMiddleware).to receive(:call).with(payload, middleware_options).and_call_original 213 | producer.async_publish(payload, options) 214 | end 215 | end 216 | end 217 | 218 | context "when delivery fails with Kafka::DeliveryFailed" do 219 | before do 220 | allow(client).to receive(:produce_async).and_raise(error) 221 | end 222 | 223 | it "logs the error and returns false" do 224 | expect(producer).to receive(:log_error).once 225 | expect(producer.async_publish(payload, options)).to be(false) 226 | end 227 | 228 | context "when multiple exception" do 229 | let(:error) do 230 | cause = StandardError.new("Second Exception") 231 | TestWrapError.new("First Exception", cause) 232 | end 233 | 234 | it "raises an error" do 235 | expect(logger).to receive(:tagged).with(include(:stacktrace)).and_yield 236 | expect(logger).to receive(:error).with(/KAFKA ERROR: StandardError Second Exception. TestWrapError First Exception/) 237 | 238 | expect(producer.async_publish(payload, options)).to be(false) 239 | end 240 | end 241 | end 242 | end 243 | 244 | describe "#async_publish!" do 245 | context "when payload is successfully delivered" do 246 | before do 247 | allow(client).to receive(:produce_async).with( 248 | payload: payload, 249 | topic: "test_topic", 250 | seed_brokers: "kafka://kafka:9092" 251 | ).and_return(delivery_handle) 252 | end 253 | 254 | it "produces the payload via the client and returns true" do 255 | expect(producer.async_publish!(payload, options)).to be(true) 256 | end 257 | 258 | context "when used middlewares" do 259 | let(:middlewares) { [MyMiddleware] } 260 | let(:middleware_options) do 261 | options.merge(topic: topic, producing_type: :async) 262 | end 263 | 264 | it "calls middlewares" do 265 | expect_any_instance_of(MyMiddleware).to receive(:call).with(payload, middleware_options).and_call_original 266 | producer.async_publish(payload, options) 267 | end 268 | end 269 | end 270 | 271 | context "when delivery fails with Kafka::DeliveryFailed" do 272 | before do 273 | allow(client).to receive(:produce_async).and_raise(error) 274 | end 275 | 276 | it "raises an error" do 277 | expect { producer.async_publish!(payload, options) }.to raise_error(error) 278 | end 279 | end 280 | end 281 | 282 | describe "#initialize" do 283 | it "sets the client to the default client if no client is provided" do 284 | producer = described_class.new(topic: topic) 285 | expect(producer.client).to eq(client) 286 | end 287 | 288 | it "sets the topic" do 289 | producer = described_class.new(client: client, topic: topic) 290 | 291 | expect(producer.topic).to eq(topic) 292 | end 293 | end 294 | end 295 | --------------------------------------------------------------------------------