├── .rspec ├── .rubocop.yml ├── CHANGELOG.md ├── lib └── sourced │ ├── version.rb │ ├── rails │ ├── templates │ │ ├── bin_sors │ │ └── create_sors_tables.rb.erb │ ├── railtie.rb │ └── install_generator.rb │ ├── backends │ ├── sequel_backend │ │ ├── group_updater.rb │ │ └── installer.rb │ ├── test_backend │ │ ├── test_pub_sub.rb │ │ ├── state.rb │ │ └── group.rb │ ├── pg_pub_sub.rb │ └── test_backend.rb │ ├── thread_executor.rb │ ├── types.rb │ ├── injector.rb │ ├── command_context.rb │ ├── actions.rb │ ├── async_executor.rb │ ├── consumer.rb │ ├── house_keeper.rb │ ├── handler.rb │ ├── error_strategy.rb │ ├── sync.rb │ ├── worker.rb │ ├── supervisor.rb │ ├── projector.rb │ ├── configuration.rb │ ├── evolve.rb │ ├── command_methods.rb │ ├── react.rb │ └── message.rb ├── .gitignore ├── examples ├── workers.rb └── cart.rb ├── sig └── sors.rbs ├── bin ├── setup └── console ├── Rakefile ├── spec ├── async_executor_spec.rb ├── thread_executor_spec.rb ├── backends │ ├── sequel_backend_sqlite_spec.rb │ ├── test_backend_spec.rb │ ├── sequel_backend_postgres_spec.rb │ └── concurrent_projectors_spec.rb ├── actions_spec.rb ├── shared_examples │ └── executor_examples.rb ├── spec_helper.rb ├── sync_spec.rb ├── handler_spec.rb ├── consumer_spec.rb ├── sourced_spec.rb ├── command_context_spec.rb ├── error_strategy_spec.rb ├── worker_spec.rb ├── configuration_spec.rb ├── evolve_spec.rb ├── command_methods_spec.rb ├── load_actor_spec.rb ├── injector_spec.rb ├── react_spec.rb ├── supervisor_spec.rb ├── message_spec.rb ├── testing │ └── rspec_spec.rb ├── projector_spec.rb └── actor_spec.rb ├── Gemfile ├── .github └── workflows │ └── deploy-docs.yml ├── sourced.gemspec ├── Gemfile.lock └── CLAUDE.md /.rspec: -------------------------------------------------------------------------------- 1 | --color 2 | --require spec_helper 3 | --format documentation 4 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | Layout/FirstArgumentIndentation: 2 | Enabled: false 3 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [Unreleased] 2 | 3 | ## [0.1.0] - 2024-09-27 4 | 5 | - Initial release 6 | -------------------------------------------------------------------------------- /lib/sourced/version.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | VERSION = '0.0.1' 5 | end 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .rspec_status 2 | .ruby-version 3 | .env 4 | examples/carts 5 | logs/ 6 | pkg 7 | .DS_Store 8 | .claude 9 | -------------------------------------------------------------------------------- /examples/workers.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative './cart' 4 | 5 | Sourced::Supervisor.start 6 | -------------------------------------------------------------------------------- /sig/sors.rbs: -------------------------------------------------------------------------------- 1 | module Sourced 2 | VERSION: String 3 | # See the writing guide of rbs: https://github.com/ruby/rbs#guides 4 | end 5 | -------------------------------------------------------------------------------- /bin/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | IFS=$'\n\t' 4 | set -vx 5 | 6 | bundle install 7 | 8 | # Do any other automated setup that you need to do here 9 | -------------------------------------------------------------------------------- /lib/sourced/rails/templates/bin_sors: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require_relative "../config/environment" 4 | require "sourced" 5 | 6 | ActiveRecord::Base.logger = nil 7 | 8 | Sourced::Supervisor.start 9 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "bundler/gem_tasks" 4 | require "rspec/core/rake_task" 5 | require "rspec/core/rake_task" 6 | require 'docco/tasks' 7 | 8 | RSpec::Core::RakeTask.new(:spec) 9 | 10 | task default: :spec 11 | -------------------------------------------------------------------------------- /spec/async_executor_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/async_executor' 5 | 6 | RSpec.describe Sourced::AsyncExecutor, type: :executor do 7 | subject(:executor) { described_class.new } 8 | 9 | it_behaves_like 'an executor' 10 | end 11 | -------------------------------------------------------------------------------- /spec/thread_executor_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/thread_executor' 5 | 6 | RSpec.describe Sourced::ThreadExecutor, type: :executor do 7 | subject(:executor) { described_class.new } 8 | 9 | it_behaves_like 'an executor' 10 | end 11 | -------------------------------------------------------------------------------- /bin/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require "bundler/setup" 5 | require "sourced" 6 | 7 | # You can add fixtures and/or initialization code here to make experimenting 8 | # with your gem easier. You can also use a different console, if you like. 9 | 10 | require "irb" 11 | IRB.start(__FILE__) 12 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source 'https://rubygems.org' 4 | 5 | # Specify your gem's dependencies in sourced.gemspec 6 | gemspec 7 | 8 | gem 'debug' 9 | gem 'rake', '~> 13.0' 10 | gem 'rubocop' 11 | 12 | group :development do 13 | gem 'docco', github: 'ismasan/docco' 14 | end 15 | 16 | group :test do 17 | gem 'dotenv' 18 | gem 'pg' 19 | gem 'rspec', '~> 3.0' 20 | gem 'sequel' 21 | gem 'sqlite3' 22 | gem 'timecop' 23 | gem 'logger' 24 | end 25 | -------------------------------------------------------------------------------- /lib/sourced/rails/railtie.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | module Rails 5 | class Railtie < ::Rails::Railtie 6 | # TODO: review this. 7 | # Workers use Async, so this is needed 8 | # but not sure this can be safely used with non Async servers like Puma. 9 | # config.active_support.isolation_level = :fiber 10 | 11 | generators do 12 | require 'sourced/rails/install_generator' 13 | end 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /spec/backends/sequel_backend_sqlite_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/backends/sequel_backend' 5 | 6 | RSpec.describe 'Sourced::Backends::SequelBackend with sqlite', skip: true, type: :backend do 7 | subject(:backend) { Sourced::Backends::SequelBackend.new(db) } 8 | 9 | let(:db) do 10 | Sequel.sqlite 11 | end 12 | 13 | before do 14 | backend.install unless backend.installed? 15 | end 16 | 17 | it_behaves_like 'a backend' 18 | end 19 | -------------------------------------------------------------------------------- /spec/actions_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | RSpec.describe Sourced::Actions do 6 | describe Sourced::Actions::AppendNext do 7 | specify '#==' do 8 | msg1 = Sourced::Message.new(stream_id: 'one') 9 | msg2 = Sourced::Message.new(stream_id: 'two') 10 | action1 = Sourced::Actions::AppendNext.new([msg1]) 11 | action2 = Sourced::Actions::AppendNext.new([msg1]) 12 | action3 = Sourced::Actions::AppendNext.new([msg2]) 13 | expect(action1).to eq(action2) 14 | expect(action1).not_to eq(action3) 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /spec/backends/test_backend_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/backends/test_backend' 5 | 6 | RSpec.describe Sourced::Backends::TestBackend, type: :backend do 7 | subject(:backend) { described_class.new } 8 | 9 | it_behaves_like 'a backend' 10 | 11 | describe 'housekeeping interfaces' do 12 | it 'exposes worker_heartbeat and release_stale_claims' do 13 | expect(backend.worker_heartbeat([])).to eq(0) 14 | expect(backend.worker_heartbeat(['w1', 'w2'])).to eq(2) 15 | expect(backend.release_stale_claims(ttl_seconds: 10)).to eq(0) 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /spec/shared_examples/executor_examples.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module ExecutorExamples 4 | RSpec.shared_examples 'an executor' do 5 | it 'runs work concurrently' do 6 | results = [] 7 | queue = Thread::Queue.new 8 | executor.start do |task| 9 | task.spawn do 10 | sleep 0.00001 11 | queue << 1 12 | end 13 | 14 | task.spawn do 15 | queue << 2 16 | end 17 | end 18 | 19 | queue.close 20 | while (it = queue.pop) 21 | results << it 22 | end 23 | 24 | expect(results).to eq([2, 1]) 25 | end 26 | 27 | it 'waits and re-raises errors' do 28 | expect do 29 | executor.start do |task| 30 | task.spawn do 31 | raise ArgumentError, 'Test error' 32 | end 33 | 34 | task.spawn do 35 | 36 | end 37 | end 38 | end.to raise_error(ArgumentError, 'Test error') 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'dotenv/load' 4 | require 'sourced' 5 | require 'debug' 6 | require 'logger' 7 | require 'timecop' 8 | require 'sourced/testing/rspec' 9 | require_relative './shared_examples/backend_examples' 10 | require_relative './shared_examples/executor_examples' 11 | 12 | ENV['ENVIRONMENT'] ||= 'test' 13 | 14 | Sourced.configure do |config| 15 | if ENV['LOGS_DIR'] 16 | FileUtils.mkdir_p(ENV['LOGS_DIR']) 17 | config.logger = Logger.new(File.join(ENV['LOGS_DIR'], 'test.log')) 18 | else 19 | config.logger = Logger.new(STDOUT) 20 | end 21 | end 22 | 23 | RSpec.configure do |config| 24 | # Enable flags like --only-failures and --next-failure 25 | config.example_status_persistence_file_path = '.rspec_status' 26 | 27 | # Disable RSpec exposing methods globally on `Module` and `main` 28 | config.disable_monkey_patching! 29 | 30 | config.expect_with :rspec do |c| 31 | c.syntax = :expect 32 | end 33 | 34 | config.include BackendExamples, type: :backend 35 | config.include ExecutorExamples, type: :executor 36 | config.include Sourced::Testing::RSpec 37 | end 38 | -------------------------------------------------------------------------------- /lib/sourced/backends/sequel_backend/group_updater.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | module Backends 5 | class SequelBackend 6 | class GroupUpdater 7 | attr_reader :group_id, :updates, :error_context 8 | 9 | def initialize(group_id, row, logger) 10 | @group_id = group_id 11 | @row = row 12 | @logger = logger 13 | @error_context = row[:error_context] 14 | @updates = { error_context: @error_context.dup } 15 | end 16 | 17 | def stop(reason = nil) 18 | @logger.error "stopping consumer group #{group_id}" 19 | @updates[:status] = STOPPED 20 | @updates[:retry_at] = nil 21 | @updates[:updated_at] = Time.now 22 | @updates[:error_context][:reason] = reason if reason 23 | end 24 | 25 | def retry(time, ctx = {}) 26 | @logger.warn "retrying consumer group #{group_id} at #{time}" 27 | @updates[:updated_at] = Time.now 28 | @updates[:retry_at] = time 29 | @updates[:error_context].merge!(ctx) 30 | end 31 | end 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /.github/workflows/deploy-docs.yml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy Docs 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | 7 | permissions: 8 | contents: read 9 | pages: write 10 | id-token: write 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - name: Check out repository 18 | uses: actions/checkout@v4 19 | 20 | - name: Set up Ruby 21 | uses: ruby/setup-ruby@v1 22 | with: 23 | ruby-version: '3.4.6' 24 | bundler-cache: true 25 | 26 | # Your script will overwrite docs/index.html, but style.css stays untouched 27 | - name: Generate docs HTML 28 | run: bundle exec rake docco:docs 29 | 30 | # Upload all files in docs (both static + generated) 31 | - name: Upload artifact 32 | uses: actions/upload-pages-artifact@v3 33 | with: 34 | path: docs/ 35 | 36 | deploy: 37 | needs: build 38 | runs-on: ubuntu-latest 39 | environment: 40 | name: github-pages 41 | url: ${{ steps.deployment.outputs.page_url }} 42 | 43 | steps: 44 | - name: Deploy to GitHub Pages 45 | id: deployment 46 | uses: actions/deploy-pages@v4 47 | -------------------------------------------------------------------------------- /lib/sourced/backends/test_backend/test_pub_sub.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | module Backends 5 | class TestBackend 6 | # An in-memory pubsub implementation for testing 7 | class TestPubSub 8 | def initialize 9 | @channels = {} 10 | end 11 | 12 | # @param channel_name [String] 13 | # @return [Channel] 14 | def subscribe(channel_name) 15 | @channels[channel_name] ||= Channel.new(channel_name) 16 | end 17 | 18 | # @param channel_name [String] 19 | # @param event [Sourced::Message] 20 | # @return [self] 21 | def publish(channel_name, event) 22 | channel = @channels[channel_name] 23 | channel&.publish(event) 24 | self 25 | end 26 | 27 | class Channel 28 | attr_reader :name 29 | 30 | def initialize(name) 31 | @name = name 32 | @handlers = [] 33 | end 34 | 35 | def start(handler: nil, &block) 36 | handler ||= block 37 | @handlers << handler 38 | end 39 | 40 | def publish(event) 41 | @handlers.each do |handler| 42 | catch(:stop) do 43 | handler.call(event, self) 44 | end 45 | end 46 | end 47 | 48 | def stop = nil 49 | end 50 | end 51 | 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /sourced.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative 'lib/sourced/version' 4 | 5 | Gem::Specification.new do |spec| 6 | spec.name = 'sourced' 7 | spec.version = Sourced::VERSION 8 | spec.authors = ['Ismael Celis'] 9 | spec.email = ['ismaelct@gmail.com'] 10 | 11 | spec.homepage = 'https://github.com/ismasan/sourced' 12 | spec.description = 'Event Sourcing and CQRS for Ruby' 13 | spec.summary = 'Event Sourcing for Ruby built on Decide, Evolve, React' 14 | spec.required_ruby_version = '>= 3.0.0' 15 | 16 | spec.metadata['homepage_uri'] = spec.homepage 17 | spec.metadata['source_code_uri'] = spec.homepage 18 | 19 | # Specify which files should be added to the gem when it is released. 20 | # The `git ls-files -z` loads the files in the RubyGem that have been added into git. 21 | gemspec = File.basename(__FILE__) 22 | spec.files = IO.popen(%w[git ls-files -z], chdir: __dir__, err: IO::NULL) do |ls| 23 | ls.readlines("\x0", chomp: true).reject do |f| 24 | (f == gemspec) || 25 | f.start_with?(*%w[bin/ test/ spec/ features/ .git appveyor Gemfile]) 26 | end 27 | end 28 | spec.bindir = 'exe' 29 | spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) } 30 | spec.require_paths = ['lib'] 31 | 32 | spec.add_dependency 'async' 33 | spec.add_dependency 'plumb', '>= 0.0.17' 34 | 35 | # For more information and examples about making a new gem, check out our 36 | # guide at: https://bundler.io/guides/creating_gem.html 37 | end 38 | -------------------------------------------------------------------------------- /spec/sync_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | RSpec.describe Sourced::Sync do 6 | let(:host) do 7 | Class.new do 8 | include Sourced::Sync 9 | 10 | attr_reader :calls1, :calls2 11 | 12 | def initialize 13 | @calls1 = [] 14 | @calls2 = [] 15 | end 16 | 17 | sync do |name:, age:| 18 | @calls1 << [name, age] 19 | end 20 | 21 | sync do |name:, age:| 22 | @calls2 << age + 2 23 | end 24 | end 25 | end 26 | 27 | context 'with Procs' do 28 | it 'returns sync blocks bound to host instance and arguments' do 29 | object = host.new 30 | blocks = object.sync_blocks_with(name: 'Joe', age: 30) 31 | blocks.each(&:call) 32 | expect(object.calls1).to eq([['Joe', 30]]) 33 | expect(object.calls2).to eq([32]) 34 | end 35 | end 36 | 37 | context 'with custom #call interfaces' do 38 | it 'returns sync blocks bound to passed arguments' do 39 | host = Class.new do 40 | include Sourced::Sync 41 | end 42 | 43 | collaborator = Struct.new(:args) do 44 | def call(**args) 45 | self.args = args 46 | end 47 | end 48 | 49 | synccer = collaborator.new(nil) 50 | 51 | host.sync synccer 52 | 53 | object = host.new 54 | blocks = object.sync_blocks_with(name: 'Joe', age: 30) 55 | blocks.each(&:call) 56 | expect(synccer.args).to eq(name: 'Joe', age: 30) 57 | end 58 | end 59 | end 60 | -------------------------------------------------------------------------------- /lib/sourced/thread_executor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'thread' 4 | 5 | module Sourced 6 | # An executor that runs blocks of code concurrently using Ruby threads. 7 | # 8 | # @example Basic usage 9 | # executor = ThreadExecutor.new 10 | # executor.start do |task| 11 | # task.spawn { puts "First thread: #{Thread.current}" } 12 | # task.spawn { puts "Second thread: #{Thread.current}" } 13 | # end 14 | # 15 | class ThreadExecutor 16 | def self.start(&) 17 | new.start(&) 18 | end 19 | 20 | # Initialize a new thread executor 21 | # Sets up internal state for tracking spawned threads. 22 | def initialize 23 | @threads = [] 24 | end 25 | 26 | # Return a string representation of this executor 27 | # 28 | # @return [String] The class name 29 | def to_s 30 | self.class.name 31 | end 32 | 33 | def new_queue 34 | Queue.new 35 | end 36 | 37 | # Start the executor and yield itself for spawning concurrent work 38 | # This method will block until all spawned threads have completed. 39 | # 40 | # @yieldparam self [ThreadExecutor] The executor instance for spawning threads 41 | # @return [void] Blocks until all spawned threads complete 42 | def start(wait: true, &) 43 | yield self 44 | self.wait if wait 45 | self 46 | end 47 | 48 | def wait 49 | @threads.map(&:join) 50 | end 51 | 52 | # Spawn a new thread to execute the given block concurrently 53 | # 54 | # @yieldparam work [Proc] The block to execute in a new thread 55 | # @return [Thread] The spawned thread 56 | def spawn(&work) 57 | @threads << Thread.new(&work) 58 | self 59 | end 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /lib/sourced/rails/install_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'rails/generators' 4 | require 'rails/generators/active_record' 5 | 6 | module Sourced 7 | module Rails 8 | class InstallGenerator < ::Rails::Generators::Base 9 | include ActiveRecord::Generators::Migration 10 | 11 | source_root File.expand_path('templates', __dir__) 12 | 13 | class_option :prefix, type: :string, default: 'sourced' 14 | 15 | def copy_initializer_file 16 | create_file 'config/initializers/sourced.rb' do 17 | <<~CONTENT 18 | # frozen_string_literal: true 19 | 20 | require 'sourced' 21 | require 'sourced/backends/active_record_backend' 22 | 23 | # This table prefix is used to generate the initial database migrations. 24 | # If you change the table prefix here, 25 | # make sure to migrate your database to the new table names. 26 | Sourced::Backends::ActiveRecordBackend.table_prefix = '#{table_prefix}' 27 | 28 | # Configure Sors to use the ActiveRecord backend 29 | Sourced.configure do |config| 30 | config.backend = Sourced::Backends::ActiveRecordBackend.new 31 | config.logger = Rails.logger 32 | end 33 | CONTENT 34 | end 35 | end 36 | 37 | def copy_bin_file 38 | copy_file 'bin_sourced', 'bin/sourced' 39 | chmod 'bin/sourced', 0o755 40 | end 41 | 42 | def create_migration_file 43 | migration_template 'create_sourced_tables.rb.erb', File.join(db_migrate_path, 'create_sourced_tables.rb') 44 | end 45 | 46 | private 47 | 48 | def migration_version 49 | "[#{ActiveRecord::VERSION::STRING.to_f}]" 50 | end 51 | 52 | def table_prefix 53 | options['prefix'] 54 | end 55 | end 56 | end 57 | end 58 | -------------------------------------------------------------------------------- /spec/handler_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module HandlerTests 6 | class MyHandler 7 | include Sourced::Handler 8 | 9 | Event = Sourced::Message.define('handlertest.event') do 10 | attribute :value 11 | end 12 | 13 | on :start, name: String do |event| 14 | [event.follow(Event, value: event.payload.name)] 15 | end 16 | 17 | on :stop do |event, history:| 18 | [event.follow(Event, value: history.size)] 19 | end 20 | 21 | on :foo, :bar do |event, history:| 22 | [event.follow(Event, value: event.class.name)] 23 | end 24 | end 25 | end 26 | 27 | RSpec.describe Sourced::Handler do 28 | it 'implements the Reactor interface' do 29 | expect(Sourced::ReactorInterface === HandlerTests::MyHandler).to be(true) 30 | end 31 | 32 | specify '.handle' do 33 | msg = HandlerTests::MyHandler::Start.build('aa', name: 'Joe') 34 | result = HandlerTests::MyHandler.handle(msg) 35 | expect(result.first).to be_a(Sourced::Actions::AppendNext) 36 | expect(result.first.messages.first.payload.value).to eq('Joe') 37 | 38 | msg2 = HandlerTests::MyHandler::Stop.build('aa') 39 | result = HandlerTests::MyHandler.handle(msg2, history: [msg2]) 40 | expect(result.first).to be_a(Sourced::Actions::AppendNext) 41 | expect(result.first.messages.first.payload.value).to eq(1) 42 | end 43 | 44 | specify '.on with multiple messages' do 45 | msg = HandlerTests::MyHandler::Foo.build('aa') 46 | result = HandlerTests::MyHandler.handle(msg) 47 | expect(result.first).to be_a(Sourced::Actions::AppendNext) 48 | expect(result.first.messages.first.payload.value).to eq('HandlerTests::MyHandler::Foo') 49 | 50 | msg = HandlerTests::MyHandler::Bar.build('aa') 51 | result = HandlerTests::MyHandler.handle(msg) 52 | expect(result.first).to be_a(Sourced::Actions::AppendNext) 53 | expect(result.first.messages.first.payload.value).to eq('HandlerTests::MyHandler::Bar') 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /lib/sourced/types.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'plumb' 4 | require 'time' 5 | require 'securerandom' 6 | 7 | module Sourced 8 | # Type definitions and validations for Sourced using the Plumb gem. 9 | # This module provides custom types for UUID generation, hash symbolization, 10 | # and interface validation used throughout the Sourced framework. 11 | # 12 | # @example Using AutoUUID type 13 | # AutoUUID.parse(nil) # => generates new UUID 14 | # AutoUUID.parse("existing-uuid") # => "existing-uuid" 15 | # 16 | # @example Using SymbolizedHash type 17 | # SymbolizedHash.parse({ 'a' => { 'b' => 'c' } }) # => { a: { b: 'c' } } 18 | # 19 | # @see https://github.com/ismasan/plumb Plumb gem documentation 20 | module Types 21 | include Plumb::Types 22 | 23 | # A type that accepts UUID strings or generates a new UUID if none provided. 24 | # Useful for default values in message definitions where a UUID is required. 25 | # 26 | # @example Generate new UUID when nil 27 | # AutoUUID.parse(nil) # => "550e8400-e29b-41d4-a716-446655440000" 28 | # @example Use existing UUID 29 | # AutoUUID.parse("test-uuid") # => "test-uuid" 30 | AutoUUID = UUID::V4.default { SecureRandom.uuid } 31 | 32 | # Turn "Foo::Bar::FooBar" into "foo_bar" 33 | TrailingModuleName = String.transform(::String) { |v| v.split('::').last } 34 | ModulesToDots = String.transform(::String) { |v| v.gsub('::', '.') } 35 | Underscore = String.build(::String) { |v| 36 | v 37 | .gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2') # Handle sequences like "HTTPResponse" -> "HTTP_Response" 38 | .gsub(/([a-z\d])([A-Z])/, '\1_\2') # Handle transitions from lowercase to uppercase 39 | .gsub(/-/, '_') # Replace hyphens with underscores 40 | .downcase # Convert to lowercase 41 | } 42 | ModuleToMethodName = TrailingModuleName >> Underscore 43 | ModuleToMessageType = ModulesToDots >> Underscore 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /spec/consumer_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module TestConsumer 6 | class TestConsumer 7 | extend Sourced::Consumer 8 | end 9 | end 10 | 11 | RSpec.describe Sourced::Consumer do 12 | describe '#group_id' do 13 | it 'is class name by default' do 14 | expect(TestConsumer::TestConsumer.consumer_info.group_id).to eq('TestConsumer::TestConsumer') 15 | end 16 | 17 | it 'can be set' do 18 | klass = Class.new do 19 | extend Sourced::Consumer 20 | 21 | consumer do |info| 22 | info.group_id = 'my-group' 23 | end 24 | end 25 | 26 | expect(klass.consumer_info.group_id).to eq('my-group') 27 | end 28 | end 29 | 30 | describe '#start_from' do 31 | specify 'default is nil' do 32 | expect(TestConsumer::TestConsumer.consumer_info.start_from.call).to be_nil 33 | end 34 | 35 | it 'can be set to a proc that returns a Time' do 36 | klass = Class.new do 37 | extend Sourced::Consumer 38 | 39 | consumer do |info| 40 | info.group_id = 'my-group' 41 | info.start_from = -> { Time.new(2020, 1, 1) } 42 | end 43 | end 44 | 45 | expect(klass.consumer_info.start_from.call).to be_a(Time) 46 | end 47 | 48 | it 'can be set to an :now which is a 5 second time window' do 49 | klass = Class.new do 50 | extend Sourced::Consumer 51 | 52 | consumer do |info| 53 | info.group_id = 'my-group' 54 | info.start_from = :now 55 | end 56 | end 57 | 58 | now = Time.now 59 | Timecop.freeze(now) do 60 | expect(klass.consumer_info.start_from.call).to eq(now - 5) 61 | end 62 | end 63 | end 64 | 65 | describe '.on_exception' do 66 | it 'stops the consumer group by default' do 67 | group = double('group', error_context: {}, stop: true) 68 | exception = StandardError.new('test error') 69 | message = { id: 1 } 70 | TestConsumer::TestConsumer.on_exception(exception, message, group) 71 | expect(group).to have_received(:stop).with(exception:, message:) 72 | end 73 | end 74 | end 75 | -------------------------------------------------------------------------------- /lib/sourced/injector.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | # The Injector analyzes method signatures to determine which keyword arguments 5 | # should be automatically provided when calling methods. This enables dependency 6 | # injection based on method parameter declarations. 7 | # 8 | # @example Basic usage 9 | # class MyReactor 10 | # def self.handle(event, replaying:, history:) 11 | # # Method that expects replaying and history arguments 12 | # end 13 | # end 14 | # 15 | # args = Injector.resolve_args(MyReactor, :handle) 16 | # # => [:replaying, :history] 17 | class Injector 18 | # Parameter types that indicate injectable keyword arguments 19 | # - :keyreq - Required keyword arguments (replaying:) 20 | # - :key - Optional keyword arguments (replaying: false) 21 | KEYS = %i[keyreq key].freeze 22 | 23 | class << self 24 | # Analyze a method signature and return the names of keyword arguments 25 | # that should be automatically injected. 26 | # 27 | # @param args [Array] Method specification - either [Class, Symbol] or [Proc] 28 | # @return [Array] List of keyword argument names to inject 29 | # 30 | # @example Analyze a class method 31 | # Injector.resolve_args(MyClass, :handle) 32 | # # => [:replaying, :history] 33 | # 34 | # @example Analyze a constructor 35 | # Injector.resolve_args(MyClass, :new) 36 | # # => [:backend, :logger] 37 | # 38 | # @example Analyze a proc 39 | # my_proc = proc { |event, replaying:| ... } 40 | # Injector.resolve_args(my_proc) 41 | # # => [:replaying] 42 | def resolve_args(*args) 43 | parameters = case args 44 | in [Proc] # single proc 45 | args.first.parameters 46 | in [Class => obj, :new] 47 | obj.instance_method(:initialize).parameters 48 | in [Object => obj, Symbol => meth] 49 | obj.method(meth).parameters 50 | end 51 | 52 | parameters.each_with_object([]) do |(type, name), list| 53 | list << name if KEYS.include?(type) 54 | end 55 | end 56 | end 57 | end 58 | end 59 | -------------------------------------------------------------------------------- /lib/sourced/rails/templates/create_sors_tables.rb.erb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class CreateSorsTables < ActiveRecord::Migration<%= migration_version %> 4 | def change 5 | # Uncomment for Postgres v12 or earlier to enable gen_random_uuid() support 6 | # enable_extension 'pgcrypto' 7 | 8 | if connection.class.name == 'ActiveRecord::ConnectionAdapters::SQLite3Adapter' 9 | create_table :<%= table_prefix %>_events, id: false do |t| 10 | t.string :id, null: false, index: { unique: true } 11 | t.bigint :global_seq, primary_key: true 12 | t.bigint :seq 13 | t.string :stream_id, null: false, index: true 14 | t.string :type, null: false 15 | t.datetime :created_at 16 | t.string :producer 17 | t.string :causation_id, index: true 18 | t.string :correlation_id 19 | t.text :payload 20 | end 21 | else 22 | create_table :<%= table_prefix %>_events, id: :uuid do |t| 23 | t.bigserial :global_seq, index: true 24 | t.bigint :seq 25 | t.string :stream_id, null: false, index: true 26 | t.string :type, null: false 27 | t.datetime :created_at 28 | t.string :producer 29 | t.uuid :causation_id, index: true 30 | t.uuid :correlation_id 31 | t.jsonb :payload 32 | end 33 | end 34 | 35 | add_index :<%= table_prefix %>_events, %i[stream_id seq], unique: true 36 | 37 | create_table :<%= table_prefix %>_streams do |t| 38 | t.text :stream_id, null: false, index: { unique: true } 39 | t.boolean :locked, default: false, null: false 40 | end 41 | 42 | create_table :<%= table_prefix %>_commands do |t| 43 | t.string :stream_id, null: false 44 | if t.class.name == 'ActiveRecord::ConnectionAdapters::SQLite3::TableDefinition' 45 | t.text :data, null: false 46 | t.datetime :scheduled_at, null: false, default: -> { 'CURRENT_TIMESTAMP' } 47 | else 48 | t.jsonb :data, null: false 49 | t.datetime :scheduled_at, null: false, default: -> { 'NOW()' } 50 | end 51 | end 52 | 53 | add_foreign_key :<%= table_prefix %>_commands, :<%= table_prefix %>_streams, column: :stream_id, primary_key: :stream_id 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /lib/sourced/command_context.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'sourced/types' 4 | 5 | module Sourced 6 | # A command factory to instantiate commands from Hash attributes 7 | # including extra metadata. 8 | # @example 9 | # 10 | # ctx = Sourced::CommandContext.new( 11 | # stream_id: params[:stream_id], 12 | # metadata: { 13 | # user_id: session[:user_id] 14 | # } 15 | # ) 16 | # 17 | # # params[:command] should be a Hash with { type: String, payload: Hash | nil } 18 | # 19 | # cmd = ctx.build(params[:command]) 20 | # cmd.stream_id # String 21 | # cmd.metadata[:user_id] # == session[:user_id] 22 | # 23 | # Passing a command subclass will scope command lookup to subclasses of that class. 24 | # Useful for restricting clients to a specific set of commands. 25 | # 26 | # @example 27 | # 28 | # ctx = Sourced::CommandContext.new(scope: PublicCommand) 29 | # 30 | # cmd = ctx.build(type: 'do_something', payload: { foo: 'bar' }) 31 | # 32 | # # Or with class and attrs 33 | # cmd = ctx.build(SomeCommand, stream_id: '111', payload: { foo: 'bar' }) 34 | # 35 | # Attempting to build a command not in the scope will raise an error. 36 | class CommandContext 37 | # @option stream_id [String] 38 | # @option metadata [Hash] metadata to add to commands built by this context 39 | # @option scope [Sourced::Message] Message class to use as command registry 40 | def initialize(stream_id: nil, metadata: Plumb::BLANK_HASH, scope: Sourced::Command) 41 | @defaults = { 42 | stream_id:, 43 | metadata: 44 | }.freeze 45 | @scope = scope 46 | end 47 | 48 | # @param attrs [Hash] attributes to lookup and buils a scope from. 49 | # @return [Sourced::Message] 50 | def build(*args) 51 | case args 52 | in [Class => klass, Hash => attrs] 53 | attrs = defaults.merge(Types::SymbolizedHash.parse(attrs)) 54 | klass.parse(attrs) 55 | in [Hash => attrs] 56 | attrs = defaults.merge(Types::SymbolizedHash.parse(attrs)) 57 | scope.from(attrs) 58 | else 59 | raise ArgumentError, "Invalid arguments: #{args.inspect}" 60 | end 61 | end 62 | 63 | private 64 | 65 | attr_reader :defaults, :scope 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /lib/sourced/actions.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | module Actions 5 | # Split a list of messages into 6 | # Actions::AppendNext or 7 | # Actions::Schedule 8 | # based on their #created_at 9 | def self.build_for(messages) 10 | actions = [] 11 | return actions if messages.empty? 12 | 13 | # TODO: I really need a uniform Clock object 14 | now = Time.now 15 | to_schedule, to_append = messages.partition { |e| e.created_at > now } 16 | actions << AppendNext.new(to_append) if to_append.any? 17 | to_schedule.group_by(&:created_at).each do |at, msgs| 18 | actions << Schedule.new(msgs, at:) 19 | end 20 | 21 | actions 22 | end 23 | 24 | RETRY = :retry 25 | OK = :ok 26 | 27 | # ACK an arbitrary message ID 28 | Ack = Data.define(:message_id) 29 | 30 | # Append mesages to event store 31 | # using Backend#append_next_to_stream 32 | # which auto-increments stream sequence 33 | class AppendNext 34 | include Enumerable 35 | 36 | attr_reader :messages 37 | 38 | def initialize(messages) 39 | @messages = messages 40 | freeze 41 | end 42 | 43 | def ==(other) 44 | other.is_a?(self.class) && messages == other.messages 45 | end 46 | 47 | def each(&block) 48 | return enum_for(:each) unless block_given? 49 | 50 | messages.each do |message| 51 | block.call(message.stream_id, message) 52 | end 53 | end 54 | end 55 | 56 | # Append messages to a stream in event store 57 | # expecting messages to be in order 58 | # and with correct sequence numbers. 59 | # The backend will raise an error if mesages with same sequence 60 | # exist in the store (ie optimistic concurrency control). 61 | class AppendAfter 62 | attr_reader :stream_id, :messages 63 | 64 | def initialize(stream_id, messages) 65 | @stream_id = stream_id 66 | @messages = messages 67 | end 68 | end 69 | 70 | class Schedule 71 | attr_reader :messages, :at 72 | 73 | def initialize(messages, at:) 74 | @messages, @at = messages, at 75 | end 76 | end 77 | 78 | class Sync 79 | def initialize(work) 80 | @work = work 81 | end 82 | 83 | def call(...) = @work.call(...) 84 | end 85 | end 86 | end 87 | -------------------------------------------------------------------------------- /lib/sourced/async_executor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'async' 4 | require 'async/queue' 5 | require 'async/barrier' 6 | 7 | module Sourced 8 | # An executor that runs blocks of code concurrently using fibers 9 | # via the Async gem. This is the default executor for Sourced. 10 | # 11 | # @example Basic usage 12 | # executor = AsyncExecutor.new 13 | # executor.start do |task| 14 | # task.spawn { puts "First task" } 15 | # task.spawn { puts "Second task" } 16 | # end 17 | # 18 | # @see https://github.com/socketry/async Async gem documentation 19 | class AsyncExecutor 20 | # A wrapper around Async::Task that provides the spawn interface 21 | # for creating concurrent fiber-based tasks. 22 | class Task 23 | # Initialize a new task wrapper 24 | # 25 | # @param task [Async::Task] The underlying async task 26 | # @option wait [Boolean] Whether to wait for tasks 27 | def initialize(wait: true) 28 | @wait = wait 29 | @blocks = [] 30 | @barrier = nil 31 | # freeze 32 | end 33 | 34 | # Spawn a new concurrent fiber within this task's context 35 | # 36 | # @yieldparam block [Proc] The block to execute concurrently 37 | # @return [Async::Task] The spawned async task 38 | def spawn(&block) 39 | @blocks << block 40 | self 41 | end 42 | 43 | def start 44 | @barrier = Async::Barrier.new 45 | Async(transient: !@wait) do |t| 46 | @blocks.each do |bl| 47 | @barrier.async(&bl) 48 | end 49 | end 50 | end 51 | 52 | def wait = @barrier&.wait 53 | end 54 | 55 | def self.start(&) 56 | new.start(&) 57 | end 58 | 59 | # Return a string representation of this executor 60 | # 61 | # @return [String] The class name 62 | def to_s 63 | self.class.name 64 | end 65 | 66 | def new_queue 67 | Async::Queue.new 68 | end 69 | 70 | # Start the executor and yield a task interface for spawning concurrent work 71 | # 72 | # @yieldparam task [Task] Interface for spawning concurrent tasks 73 | # @return [void] Blocks until all spawned tasks complete 74 | def start(wait: true, &) 75 | task = Task.new(wait:) 76 | yield task if block_given? 77 | task.start 78 | task.wait if wait 79 | task 80 | end 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /spec/sourced_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | RSpec.describe Sourced do 4 | it 'has a version number' do 5 | expect(Sourced::VERSION).not_to be nil 6 | end 7 | 8 | describe '.new_stream_id' do 9 | specify 'no prefix' do 10 | si1 = Sourced.new_stream_id 11 | si2 = Sourced.new_stream_id 12 | expect(si1).not_to eq(si2) 13 | end 14 | 15 | specify 'with prefix' do 16 | si1 = Sourced.new_stream_id('cart') 17 | si2 = Sourced.new_stream_id('cart') 18 | expect(si1).not_to eq(si2) 19 | expect(si1).to start_with('cart') 20 | end 21 | end 22 | 23 | describe '.dispatch(message)' do 24 | before(:all) do 25 | @message_class = Sourced::Message.define('dispatch.test') do 26 | attribute :name, String 27 | end 28 | end 29 | 30 | it 'appends message' do 31 | msg = @message_class.parse(stream_id: 'aaa', payload: { name: 'Joe' }) 32 | expect(Sourced.dispatch(msg)).to eq(msg) 33 | expect(Sourced.config.backend.read_stream('aaa').map(&:id)).to eq([msg.id]) 34 | end 35 | 36 | it 'raises if message is invalid' do 37 | msg = @message_class.new(stream_id: 'aaa', payload: { name: 22 }) 38 | expect { 39 | Sourced.dispatch(msg) 40 | }.to raise_error(Sourced::InvalidMessageError) 41 | end 42 | 43 | it 'raises if backend fails to append' do 44 | msg = @message_class.parse(stream_id: 'aaa', payload: { name: 'Joe' }) 45 | allow(Sourced.config.backend).to receive(:append_next_to_stream).and_return false 46 | expect { 47 | Sourced.dispatch(msg) 48 | }.to raise_error(Sourced::BackendError) 49 | end 50 | end 51 | 52 | specify '.registered?' do 53 | reactor1 = Class.new do 54 | extend Sourced::Consumer 55 | 56 | consumer do |info| 57 | info.group_id = 'reactor1' 58 | end 59 | 60 | def self.handled_messages = [Sourced::Event] 61 | def self.handle(...) = [] 62 | end 63 | 64 | reactor2 = Class.new do 65 | extend Sourced::Consumer 66 | 67 | consumer do |info| 68 | info.group_id = 'reactor2' 69 | end 70 | 71 | def self.handled_messages = [Sourced::Event] 72 | def self.handle(...) = [] 73 | end 74 | 75 | Sourced.register(reactor1) 76 | 77 | expect(Sourced.registered?(reactor1)).to be true 78 | expect(Sourced.registered?(reactor2)).to be false 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /spec/command_context_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module ContextTest 6 | Add = Sourced::Command.define('ctest.add') do 7 | attribute :value, Integer 8 | end 9 | 10 | Added = Sourced::Event.define('ctest.added') 11 | end 12 | 13 | RSpec.describe Sourced::CommandContext do 14 | describe '#build' do 15 | it 'builds command with stream_id and metadata' do 16 | ctx = described_class.new(stream_id: '123', metadata: { user_id: 10 }) 17 | cmd = ctx.build(type: 'ctest.add', payload: { value: 1 }) 18 | expect(cmd).to be_a(ContextTest::Add) 19 | expect(cmd.stream_id).to eq('123') 20 | expect(cmd.payload.value).to eq(1) 21 | expect(cmd.metadata[:user_id]).to eq(10) 22 | end 23 | 24 | it 'allows overriding stream_id' do 25 | ctx = described_class.new(stream_id: '123', metadata: { user_id: 10 }) 26 | cmd = ctx.build(stream_id: 'aaa', type: 'ctest.add', payload: { value: 1 }) 27 | expect(cmd.stream_id).to eq('aaa') 28 | end 29 | 30 | it 'can take a command class' do 31 | ctx = described_class.new(metadata: { user_id: 10 }) 32 | cmd = ctx.build(ContextTest::Add, stream_id: '123', payload: { value: 1 }) 33 | expect(cmd).to be_a(ContextTest::Add) 34 | expect(cmd.stream_id).to eq('123') 35 | expect(cmd.payload.value).to eq(1) 36 | expect(cmd.metadata[:user_id]).to eq(10) 37 | expect(cmd.valid?).to eq(true) 38 | end 39 | 40 | it 'symbolizes attributes' do 41 | ctx = described_class.new(stream_id: '123', metadata: { user_id: 10 }) 42 | cmd = ctx.build('type' => 'ctest.add', 'payload' => { 'value' => 1 }) 43 | expect(cmd).to be_a(ContextTest::Add) 44 | expect(cmd.stream_id).to eq('123') 45 | expect(cmd.payload.value).to eq(1) 46 | expect(cmd.metadata[:user_id]).to eq(10) 47 | end 48 | 49 | it 'raises an exception if command type does not exist' do 50 | ctx = described_class.new(stream_id: '123', metadata: { user_id: 10 }) 51 | expect do 52 | ctx.build('type' => 'nope', 'payload' => { 'value' => 1 }) 53 | end.to raise_error(Sourced::UnknownMessageError) 54 | end 55 | 56 | it 'raises an exception if command type does not exist in scope class' do 57 | ctx = described_class.new(stream_id: '123', metadata: { user_id: 10 }) 58 | expect do 59 | ctx.build('type' => 'ctest.added', 'payload' => { 'value' => 1 }) 60 | end.to raise_error(Sourced::UnknownMessageError) 61 | end 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /spec/error_strategy_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | RSpec.describe Sourced::ErrorStrategy do 6 | let(:group_class) do 7 | Struct.new(:status, :retry_at, :error_context) do 8 | def retry(later, ctx = {}) 9 | self.retry_at = later 10 | self.error_context.merge!(ctx) 11 | self 12 | end 13 | 14 | def stop(reason = {}) 15 | self.status = :stopped 16 | self.error_context.merge!(reason:) 17 | self 18 | end 19 | end 20 | end 21 | let(:group) { group_class.new(:active, nil, {}) } 22 | let(:exception) { StandardError.new } 23 | let(:message) { Sourced::Message.new } 24 | 25 | before do 26 | allow(group).to receive(:retry).and_call_original 27 | allow(group).to receive(:stop).and_call_original 28 | end 29 | 30 | it 'stops the group immediatly by default' do 31 | strategy = described_class.new 32 | strategy.call(exception, message, group) 33 | expect(group).to have_received(:stop).with(exception:, message:) 34 | end 35 | 36 | it 'can be configured with retries' do 37 | now = Time.new(2020, 1, 1).utc 38 | 39 | retries = [] 40 | stop_call = nil 41 | 42 | strategy = described_class.new do |s| 43 | s.retry(times: 3, after: 5, backoff: ->(retry_after, retry_count) { retry_after * retry_count }) 44 | 45 | s.on_retry do |n, exception, message, later| 46 | retries << [n, exception, message, later] 47 | end 48 | 49 | s.on_stop do |exception, message| 50 | stop_call = [exception, message] 51 | end 52 | end 53 | 54 | Timecop.freeze(now) do 55 | strategy.call(exception, message, group) 56 | strategy.call(exception, message, group) 57 | strategy.call(exception, message, group) 58 | 59 | expect(stop_call).to be(nil) 60 | 61 | strategy.call(exception, message, group) 62 | 63 | expect(retries).to eq([ 64 | [1, exception, message, now + 5], 65 | [2, exception, message, now + 10], 66 | [3, exception, message, now + 15] 67 | ]) 68 | 69 | expect(stop_call).to eq([exception, message]) 70 | 71 | expect(group).to have_received(:retry).with(now + 5, retry_count: 2).exactly(1).times 72 | expect(group).to have_received(:retry).with(now + 10, retry_count: 3).exactly(1).times 73 | expect(group).to have_received(:retry).with(now + 15, retry_count: 4).exactly(1).times 74 | expect(group).to have_received(:stop).exactly(1).times 75 | end 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /lib/sourced/consumer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | # This mixin provides consumer info configuration 5 | # and a .consumer_info method to access it. 6 | # @example 7 | # 8 | # class MyConsumer 9 | # extend Sourced::Consumer 10 | # 11 | # consumer do |c| 12 | # # consumer group 13 | # c.group_id = 'my-group' 14 | # 15 | # # Start consuming events from the beginning of history 16 | # c.start_from = :beginning 17 | # end 18 | # end 19 | # 20 | # MyConsumer.consumer_info.group_id # => 'my-group' 21 | # 22 | module Consumer 23 | class ConsumerInfo < Types::Data 24 | ToBlock = Types::Any.transform(Proc) { |v| -> { v } } 25 | StartFromBeginning = Types::Value[:beginning] >> Types::Static[nil] >> ToBlock 26 | StartFromNow = Types::Value[:now] >> Types::Static[-> { Time.now - 5 }.freeze] 27 | StartFromTime = Types::Interface[:call].check('must return a Time') { |v| v.call.is_a?(Time) } 28 | 29 | StartFrom = ( 30 | StartFromBeginning | StartFromNow | StartFromTime 31 | ).default { -> { nil } } 32 | 33 | attribute :group_id, Types::String.present, writer: true 34 | attribute :start_from, StartFrom, writer: true 35 | end 36 | 37 | def consumer_info 38 | @consumer_info ||= ConsumerInfo.new(group_id: name, start_from: :beginning) 39 | end 40 | 41 | def consumer(&) 42 | return consumer_info unless block_given? 43 | 44 | info = ConsumerInfo.new(group_id: name) 45 | yield info 46 | raise Plumb::ParseError, info.errors unless info.valid? 47 | 48 | @consumer_info = info 49 | end 50 | 51 | # Implement this in your reactors 52 | # to manage exception handling in eventually-consistent workflows 53 | # @example retry with exponential back off 54 | # 55 | # def self.on_exception(exception, _message, group) 56 | # retry_count = group.error_context[:retry_count] || 0 57 | # if retry_count < 3 58 | # later = 5 + 5 * retry_count 59 | # group.retry(later, retry_count: retry_count + 1) 60 | # else 61 | # group.stop(exception) 62 | # end 63 | # end 64 | # 65 | # @param exception [Exception] the exception raised 66 | # @param message [Sourced::Message] the event or command being handled 67 | # @param group [#stop, #retry] consumer group object to update state, ie. for retries 68 | def on_exception(exception, message, group) 69 | Sourced.config.error_strategy.call(exception, message, group) 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /lib/sourced/house_keeper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | class HouseKeeper 5 | attr_reader :name 6 | 7 | def initialize( 8 | logger: Sourced.config.logger, 9 | interval: 3, 10 | heartbeat_interval: 5, 11 | claim_ttl_seconds: 120, 12 | backend:, 13 | name:, 14 | worker_ids_provider: nil 15 | ) 16 | @logger = logger 17 | @interval = interval 18 | @heartbeat_interval = heartbeat_interval 19 | @claim_ttl_seconds = claim_ttl_seconds 20 | @backend = backend 21 | @name = name 22 | @running = false 23 | @worker_ids_provider = worker_ids_provider || -> { [] } 24 | end 25 | 26 | def work 27 | @running = true 28 | 29 | # On start wait for a random period 30 | # in order to space out multiple house-keepers 31 | sleep rand(5) 32 | logger.info "HouseKeeper #{name}: starting" 33 | 34 | # Reap stale claims on startup (from previous runs where workers were killed) 35 | released = backend.release_stale_claims(ttl_seconds: @claim_ttl_seconds) 36 | logger.info "HouseKeeper #{name}: startup cleanup released #{released} stale claims" if released && released > 0 37 | 38 | last_heartbeat = Time.at(0) 39 | last_stale_reaping = Time.now 40 | while @running 41 | sleep @interval 42 | 43 | # 1) Schedule due messages 44 | schcount = backend.update_schedule! 45 | logger.info "HouseKeeper #{name}: appended #{schcount} scheduled messages" if schcount > 0 46 | 47 | now = Time.now 48 | 49 | # 2) Heartbeat alive workers (bulk upsert) 50 | if now - last_heartbeat >= @heartbeat_interval 51 | ids = Array(@worker_ids_provider.call).uniq 52 | hb = backend.worker_heartbeat(ids) 53 | logger.debug "HouseKeeper #{name}: heartbeated #{hb} workers" if hb > 0 54 | last_heartbeat = now 55 | end 56 | 57 | # 3) Reap stale claims (only every claim_ttl_seconds, since claims can't be stale until then) 58 | if now - last_stale_reaping >= @claim_ttl_seconds 59 | released = backend.release_stale_claims(ttl_seconds: @claim_ttl_seconds) 60 | logger.info "HouseKeeper #{name}: released #{released} stale claims" if released && released > 0 61 | last_stale_reaping = now 62 | end 63 | end 64 | 65 | logger.info "HouseKeeper #{name}: stopped" 66 | end 67 | 68 | def stop 69 | @running = false 70 | end 71 | 72 | private 73 | 74 | attr_reader :logger, :backend, :interval 75 | end 76 | end 77 | -------------------------------------------------------------------------------- /lib/sourced/handler.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | module Handler 5 | PREFIX = 'handle' 6 | 7 | def self.included(base) 8 | base.send :extend, Consumer 9 | base.send :extend, ClassMethods 10 | end 11 | 12 | def handle(message, history:, replaying: false) 13 | handler_name = Sourced.message_method_name(PREFIX, message.class.name) 14 | return Actions::OK unless respond_to?(handler_name) 15 | 16 | args = {history:, replaying:} 17 | expected_args = self.class.__args_lookup[handler_name] 18 | handler_args = expected_args.each.with_object({}) do |key, memo| 19 | memo[key] = args[key] 20 | end 21 | 22 | send(handler_name, message, **handler_args) 23 | end 24 | 25 | module ClassMethods 26 | def handled_messages 27 | @handled_messsages ||= [] 28 | end 29 | 30 | def on(*args, &block) 31 | case args 32 | in [Symbol => msg_name, Hash => payload_schema] 33 | __register_named_message_handler(msg_name, payload_schema, &block) 34 | in [Symbol => msg_name] 35 | __register_named_message_handler(msg_name, &block) 36 | in [Class => msg_type] if msg_type < Sourced::Message 37 | __register_class_message_handler(msg_type, &block) 38 | else 39 | args.each do |arg| 40 | on(*arg, &block) 41 | end 42 | end 43 | end 44 | 45 | def handle(message, history: [], replaying: false) 46 | results = new.handle(message, history:, replaying:) 47 | Actions.build_for(results) 48 | end 49 | 50 | def __args_lookup 51 | @__args_lookup ||= {} 52 | end 53 | 54 | private 55 | 56 | def __register_named_message_handler(msg_name, payload_schema = nil, &block) 57 | msg_class = Sourced::Message.define(__message_type(msg_name), payload_schema:) 58 | klass_name = msg_name.to_s.split('_').map(&:capitalize).join 59 | const_set(klass_name, msg_class) 60 | __register_class_message_handler(msg_class, &block) 61 | end 62 | 63 | def __register_class_message_handler(msg_type, &block) 64 | handled_messages << msg_type 65 | handler_name = Sourced.message_method_name(PREFIX, msg_type.name) 66 | __args_lookup[handler_name] = Sourced::Injector.resolve_args(block) 67 | define_method(handler_name, &block) 68 | end 69 | 70 | # TODO: these are in Actor too 71 | def __message_type(msg_name) 72 | [__message_type_prefix, msg_name].join('.').downcase 73 | end 74 | 75 | def message_namespace 76 | Types::ModuleToMessageType.parse(name.to_s) 77 | end 78 | 79 | def __message_type_prefix 80 | @__message_type_prefix ||= message_namespace 81 | end 82 | end 83 | end 84 | end 85 | -------------------------------------------------------------------------------- /lib/sourced/error_strategy.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | # Built-in configurable error strategy 5 | # for handling exceptions raised during processing messages (commands or events) 6 | # By default it stops the consumer group immediately. 7 | # It can be configured to retry a number of times with a delay between retries. 8 | # It can also register callbacks to be called on retry and on stop. 9 | # 10 | # @example retry with exponential back off and callbacks 11 | # strategy = Sourced::ErrorStrategy.new do |s| 12 | # s.retry(times: 3, after: 5, backoff: ->(retry_after, retry_count) { retry_after * retry_count }) 13 | # 14 | # s.on_retry do |n, exception, message, later| 15 | # LOGGER.info("Retrying #{n} times") 16 | # end 17 | # 18 | # s.on_stop do |exception, message| 19 | # Sentry.capture_exception(exception) 20 | # end 21 | # end 22 | class ErrorStrategy 23 | MAX_RETRIES = 0 24 | RETRY_AFTER = 3 # seconds 25 | BACKOFF = ->(retry_after, retry_count) { retry_after * retry_count } 26 | NOOP_CALLBACK = ->(*_) {} 27 | 28 | attr_reader :max_retries, :retry_after 29 | 30 | def initialize(&setup) 31 | @max_retries = MAX_RETRIES 32 | @retry_after = RETRY_AFTER 33 | @backoff = BACKOFF 34 | @on_retry = NOOP_CALLBACK 35 | @on_stop = NOOP_CALLBACK 36 | 37 | yield(self) if block_given? 38 | freeze 39 | end 40 | 41 | # @option times [Integer] number of retries. Default: 0 42 | # @option after [Integer] delay in seconds between retries. Default: 3 43 | # @option backoff [Proc] a callable that takes retry_after and retry_count and returns the delay for the next retry 44 | # @return [self] 45 | def retry(times: nil, after: nil, backoff: nil) 46 | @max_retries = times if times 47 | @retry_after = after if after 48 | @backoff = backoff if backoff 49 | self 50 | end 51 | 52 | def on_retry(callable = nil, &blk) 53 | @on_retry = callable || blk 54 | end 55 | 56 | def on_stop(callable = nil, &blk) 57 | @on_stop = callable || blk 58 | end 59 | 60 | # The Error Strategy interface 61 | # 62 | # @param exception [Exception] 63 | # @param message [Sourced::Message] 64 | # @param group [#retry, #stop] 65 | def call(exception, message, group) 66 | retry_count = group.error_context[:retry_count] || 1 67 | if retry_count <= max_retries 68 | now = Time.now 69 | later = now + (backoff.call(retry_after, retry_count)) 70 | @on_retry.call(retry_count, exception, message, later) 71 | retry_count += 1 72 | group.retry(later, retry_count:) 73 | else 74 | @on_stop.call(exception, message) 75 | group.stop(exception:, message:) 76 | end 77 | end 78 | 79 | private 80 | 81 | attr_reader :backoff 82 | end 83 | end 84 | -------------------------------------------------------------------------------- /lib/sourced/sync.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | # This mixin provides a .sync macro to registering blocks 5 | # that will run within a transaction. Ie. in Actors when appending events to the backend, 6 | # or projectors when persisting their state. 7 | # @example 8 | # 9 | # class CartActor < Sourced::Actor 10 | # # Run this block within a transaction 11 | # # when appending messages to storage 12 | # sync do |state:, command:, events:| 13 | # # Do something here, like updating a view, sending an email, etc. 14 | # end 15 | # end 16 | # 17 | # When given a ReactorInterface, it will run that reactor synchronously 18 | # and ACK the offsets for the embedded reactor and consumed events. 19 | # This is so that reactors can be run in a strong consistency manner 20 | # within an Actor lifecycle. 21 | # ACKing the events will ensure that the events are not reprocessed 22 | # if the child reactor is later moved to eventually consistent processing. 23 | # @example 24 | # 25 | # class CartActor < Sourced::Actor 26 | # # The CartListings projector 27 | # # will be run synchronously when events are appended by this Actor 28 | # # Any error raised by the projector will cause the transaction to rollback 29 | # sync CartListings 30 | # end 31 | module Sync 32 | CallableInterface = Sourced::Types::Interface[:call] 33 | 34 | def self.included(base) 35 | super 36 | base.extend ClassMethods 37 | end 38 | 39 | def sync_blocks_with(**args) 40 | self.class.sync_blocks.map do |callable| 41 | case callable 42 | when Proc 43 | proc { instance_exec(**args, &callable) } 44 | when CallableInterface 45 | proc { callable.call(**args) } 46 | else 47 | raise ArgumentError, "Not a valid sync block: #{callable.inspect}" 48 | end 49 | end 50 | end 51 | 52 | def sync_actions_with(**args) 53 | sync_blocks_with(**args).map do |bl| 54 | Actions::Sync.new(bl) 55 | end 56 | end 57 | 58 | module ClassMethods 59 | def inherited(subclass) 60 | super 61 | sync_blocks.each do |blk| 62 | subclass.sync_blocks << blk 63 | end 64 | end 65 | 66 | def sync_blocks 67 | @sync_blocks ||= [] 68 | end 69 | 70 | # The .sync macro 71 | # @example 72 | # 73 | # sync do |state, command, events| 74 | # # Do something here, like updating a view, sending an email, etc. 75 | # end 76 | # 77 | # sync CartListings 78 | # 79 | # @param callable [Nil, Proc, ReactorInterface, CallableInterface] the block to run 80 | # @yieldparam state [Object] the state of the host class 81 | # @yieldparam command [Object, Nil] the command being processed 82 | # @yieldparam events [Array] the events being appended 83 | def sync(callable = nil, &block) 84 | sync_blocks << (block || callable) 85 | end 86 | end 87 | end 88 | end 89 | -------------------------------------------------------------------------------- /lib/sourced/backends/test_backend/state.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | module Backends 5 | class TestBackend 6 | class State 7 | attr_reader :messages, :groups, :messages_by_correlation_id, :messages_by_stream_id, :stream_id_seq_index, :streams, :scheduled_messages 8 | 9 | def initialize( 10 | messages: [], 11 | groups: Hash.new { |h, k| h[k] = Group.new(k, self) }, 12 | messages_by_correlation_id: Hash.new { |h, k| h[k] = [] }, 13 | messages_by_stream_id: Hash.new { |h, k| h[k] = [] }, 14 | stream_id_seq_index: {}, 15 | streams: {}, 16 | scheduled_messages: [] 17 | ) 18 | 19 | @messages = messages 20 | @groups = groups 21 | @messages_by_correlation_id = messages_by_correlation_id 22 | @messages_by_stream_id = messages_by_stream_id 23 | @stream_id_seq_index = stream_id_seq_index 24 | @streams = streams 25 | @scheduled_messages = scheduled_messages 26 | end 27 | 28 | ScheduledMessageRecord = Data.define(:message, :at, :position) do 29 | def <=>(other) 30 | self.position <=> other.position 31 | end 32 | end 33 | 34 | Stream = Data.define(:stream_id, :seq, :updated_at) do 35 | def hash = stream_id 36 | def eql?(other) = other.is_a?(Stream) && stream_id == other.stream_id 37 | end 38 | 39 | def upsert_stream(stream_id, seq) 40 | str = Stream.new(stream_id, seq, Time.now) 41 | @streams[stream_id] = str 42 | end 43 | 44 | def schedule_messages(messages, at: Time.now) 45 | counter = @scheduled_messages.size 46 | records = messages.map do |a| 47 | counter += 1 48 | ScheduledMessageRecord.new(a, at, [at, counter]) 49 | end 50 | @scheduled_messages += records 51 | @scheduled_messages.sort! 52 | end 53 | 54 | def next_scheduled_messages(&) 55 | now = Time.now 56 | next_records, @scheduled_messages = @scheduled_messages.partition do |r| 57 | r.at <= now 58 | end 59 | next_messages = next_records.map(&:message) 60 | yield next_messages if next_messages.any? 61 | next_messages 62 | end 63 | 64 | def copy 65 | self.class.new( 66 | messages: messages.dup, 67 | groups: deep_dup(groups), 68 | messages_by_correlation_id: deep_dup(messages_by_correlation_id), 69 | messages_by_stream_id: deep_dup(messages_by_stream_id), 70 | stream_id_seq_index: deep_dup(stream_id_seq_index), 71 | streams: streams.dup, 72 | scheduled_messages: scheduled_messages.dup 73 | ) 74 | end 75 | 76 | private 77 | 78 | def deep_dup(hash) 79 | hash.each.with_object(hash.dup.clear) do |(k, v), new_hash| 80 | new_hash[k] = v.dup 81 | end 82 | end 83 | end 84 | end 85 | end 86 | end 87 | -------------------------------------------------------------------------------- /spec/worker_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | RSpec.describe Sourced::Worker do 6 | let(:router) { instance_double(Sourced::Router) } 7 | let(:logger) { instance_double('Logger', warn: nil, info: nil) } 8 | let(:reactor1) { double('Reactor1', handled_messages: ['event1']) } 9 | let(:reactor2) { double('Reactor2', handled_messages: ['event2']) } 10 | let(:reactor3) { double('Reactor3', handled_messages: []) } 11 | let(:reactors) { [reactor1, reactor2, reactor3] } 12 | 13 | before do 14 | allow(router).to receive(:async_reactors).and_return(reactors) 15 | allow(router).to receive(:handle_next_event_for_reactor).and_return(true) 16 | end 17 | 18 | describe '#tick' do 19 | subject(:worker) do 20 | described_class.new( 21 | logger:, 22 | name: 'test-worker', 23 | shuffler: ->(arr) { arr }, # No shuffle for predictability 24 | router: 25 | ) 26 | end 27 | 28 | it 'delegates to Router#handle_next_event_for_reactor with current reactor and worker name' do 29 | expect(router).to receive(:handle_next_event_for_reactor) 30 | .with(reactor1, worker.name) 31 | .and_return(true) 32 | 33 | result = worker.tick 34 | 35 | expect(result).to eq(true) 36 | end 37 | 38 | it 'cycles through reactors in round-robin fashion when called repeatedly' do 39 | # First call should use reactor1 (index 0) 40 | expect(router).to receive(:handle_next_event_for_reactor) 41 | .with(reactor1, worker.name) 42 | .and_return(true) 43 | worker.tick 44 | 45 | # Second call should use reactor2 (index 1) 46 | expect(router).to receive(:handle_next_event_for_reactor) 47 | .with(reactor2, worker.name) 48 | .and_return(false) 49 | worker.tick 50 | 51 | # Fourth call should wrap around to reactor1 (index 0) 52 | expect(router).to receive(:handle_next_event_for_reactor) 53 | .with(reactor1, worker.name) 54 | .and_return(false) 55 | worker.tick 56 | end 57 | 58 | it 'returns the result from Router#handle_next_event_for_reactor' do 59 | allow(router).to receive(:handle_next_event_for_reactor).and_return(false) 60 | expect(worker.tick).to eq(false) 61 | 62 | allow(router).to receive(:handle_next_event_for_reactor).and_return(true) 63 | expect(worker.tick).to eq(true) 64 | end 65 | 66 | context 'when called with a specific reactor' do 67 | it 'uses the provided reactor instead of cycling' do 68 | expect(router).to receive(:handle_next_event_for_reactor) 69 | .with(reactor2, worker.name) 70 | .and_return(true) 71 | 72 | worker.tick(reactor2) 73 | end 74 | 75 | it 'does not advance the reactor index when using a specific reactor' do 76 | # Call with specific reactor 77 | worker.tick(reactor2) 78 | 79 | # Next call without reactor should still use reactor1 (first in cycle) 80 | expect(router).to receive(:handle_next_event_for_reactor) 81 | .with(reactor1, worker.name) 82 | .and_return(true) 83 | worker.tick 84 | end 85 | end 86 | end 87 | end 88 | -------------------------------------------------------------------------------- /spec/backends/sequel_backend_postgres_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/backends/sequel_backend' 5 | 6 | RSpec.describe 'Sourced::Backends::SequelBackend with Postgres', type: :backend do 7 | subject(:backend) { Sourced::Backends::SequelBackend.new(db) } 8 | 9 | let(:db) do 10 | Sequel.postgres('sourced_test') 11 | end 12 | 13 | before do 14 | backend.setup!(Sourced.config) 15 | backend.uninstall 16 | backend.install 17 | end 18 | 19 | it_behaves_like 'a backend' 20 | 21 | describe 'worker heartbeats and stale claim reaping' do 22 | it 'records heartbeats and releases stale claims' do 23 | # Prepare a consumer group and a stream with one event 24 | backend.register_consumer_group('group_hb') 25 | 26 | evt = BackendExamples::Tests::SomethingHappened1.parse(stream_id: 's-hb', seq: 1, payload: { account_id: 1 }) 27 | backend.append_to_stream('s-hb', [evt]) 28 | 29 | # First record a heartbeat for a worker that will become stale 30 | stale_time = Time.now - 3600 31 | backend.worker_heartbeat(['dead-worker-1'], at: stale_time) 32 | 33 | # Insert a stale claimed offset for the dead worker 34 | group_fk = db[:sourced_consumer_groups].where(group_id: 'group_hb').get(:id) 35 | stream_fk = db[:sourced_streams].where(stream_id: 's-hb').get(:id) 36 | 37 | off_id = db[:sourced_offsets].insert( 38 | group_id: group_fk, 39 | stream_id: stream_fk, 40 | global_seq: 0, 41 | created_at: stale_time, 42 | claimed: true, 43 | claimed_at: stale_time, 44 | claimed_by: 'dead-worker-1' 45 | ) 46 | 47 | # Heartbeat two workers 48 | backend.worker_heartbeat(['live-worker-1', 'live-worker-2']) 49 | expect(db[:sourced_workers].where(id: 'live-worker-1').count).to eq(1) 50 | 51 | # Reap stale claims 52 | released = backend.release_stale_claims(ttl_seconds: 60) 53 | expect(released).to be >= 1 54 | 55 | row = db[:sourced_offsets].where(id: off_id).first 56 | expect(row[:claimed]).to eq(false) 57 | expect(row[:claimed_by]).to be_nil 58 | expect(row[:claimed_at]).to be_nil 59 | 60 | # Ensure live worker claims are not reaped 61 | # Create a fresh claimed offset for a live worker on a different stream 62 | evt2 = BackendExamples::Tests::SomethingHappened1.parse(stream_id: 's-hb-2', seq: 1, payload: { account_id: 2 }) 63 | backend.append_to_stream('s-hb-2', [evt2]) 64 | stream2_fk = db[:sourced_streams].where(stream_id: 's-hb-2').get(:id) 65 | 66 | fresh_time = Time.now 67 | off2_id = db[:sourced_offsets].insert( 68 | group_id: group_fk, 69 | stream_id: stream2_fk, 70 | global_seq: 0, 71 | created_at: fresh_time, 72 | claimed: true, 73 | claimed_at: fresh_time, 74 | claimed_by: 'live-worker-1' 75 | ) 76 | 77 | backend.worker_heartbeat(['live-worker-1']) 78 | not_released = backend.release_stale_claims(ttl_seconds: 60) 79 | expect(not_released).to be_a(Integer) 80 | 81 | row2 = db[:sourced_offsets].where(id: off2_id).first 82 | expect(row2[:claimed]).to eq(true) 83 | expect(row2[:claimed_by]).to eq('live-worker-1') 84 | end 85 | end 86 | end 87 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | GIT 2 | remote: https://github.com/ismasan/docco.git 3 | revision: 032150bac85215da8470ca302003a63cc39632b2 4 | specs: 5 | docco (0.1.0) 6 | kramdown 7 | kramdown-parser-gfm 8 | 9 | PATH 10 | remote: . 11 | specs: 12 | sourced (0.0.1) 13 | async 14 | plumb (>= 0.0.17) 15 | 16 | GEM 17 | remote: https://rubygems.org/ 18 | specs: 19 | ast (2.4.2) 20 | async (2.17.0) 21 | console (~> 1.26) 22 | fiber-annotation 23 | io-event (~> 1.6, >= 1.6.5) 24 | bigdecimal (3.1.8) 25 | concurrent-ruby (1.3.4) 26 | console (1.27.0) 27 | fiber-annotation 28 | fiber-local (~> 1.1) 29 | json 30 | debug (1.9.2) 31 | irb (~> 1.10) 32 | reline (>= 0.3.8) 33 | diff-lcs (1.5.1) 34 | dotenv (3.1.4) 35 | fiber-annotation (0.2.0) 36 | fiber-local (1.1.0) 37 | fiber-storage 38 | fiber-storage (1.0.0) 39 | io-console (0.7.2) 40 | io-event (1.6.5) 41 | irb (1.14.0) 42 | rdoc (>= 4.0.0) 43 | reline (>= 0.4.2) 44 | json (2.7.2) 45 | kramdown (2.5.1) 46 | rexml (>= 3.3.9) 47 | kramdown-parser-gfm (1.1.0) 48 | kramdown (~> 2.0) 49 | language_server-protocol (3.17.0.4) 50 | lint_roller (1.1.0) 51 | logger (1.7.0) 52 | parallel (1.26.3) 53 | parser (3.3.7.1) 54 | ast (~> 2.4.1) 55 | racc 56 | pg (1.5.8) 57 | plumb (0.0.17) 58 | bigdecimal 59 | concurrent-ruby 60 | psych (5.1.2) 61 | stringio 62 | racc (1.8.1) 63 | rainbow (3.1.1) 64 | rake (13.2.1) 65 | rdoc (6.7.0) 66 | psych (>= 4.0.0) 67 | regexp_parser (2.10.0) 68 | reline (0.5.10) 69 | io-console (~> 0.5) 70 | rexml (3.4.4) 71 | rspec (3.13.0) 72 | rspec-core (~> 3.13.0) 73 | rspec-expectations (~> 3.13.0) 74 | rspec-mocks (~> 3.13.0) 75 | rspec-core (3.13.1) 76 | rspec-support (~> 3.13.0) 77 | rspec-expectations (3.13.3) 78 | diff-lcs (>= 1.2.0, < 2.0) 79 | rspec-support (~> 3.13.0) 80 | rspec-mocks (3.13.1) 81 | diff-lcs (>= 1.2.0, < 2.0) 82 | rspec-support (~> 3.13.0) 83 | rspec-support (3.13.1) 84 | rubocop (1.72.2) 85 | json (~> 2.3) 86 | language_server-protocol (~> 3.17.0.2) 87 | lint_roller (~> 1.1.0) 88 | parallel (~> 1.10) 89 | parser (>= 3.3.0.2) 90 | rainbow (>= 2.2.2, < 4.0) 91 | regexp_parser (>= 2.9.3, < 3.0) 92 | rubocop-ast (>= 1.38.0, < 2.0) 93 | ruby-progressbar (~> 1.7) 94 | unicode-display_width (>= 2.4.0, < 4.0) 95 | rubocop-ast (1.38.0) 96 | parser (>= 3.3.1.0) 97 | ruby-progressbar (1.13.0) 98 | sequel (5.84.0) 99 | bigdecimal 100 | sqlite3 (2.8.0-arm64-darwin) 101 | stringio (3.1.1) 102 | timecop (0.9.10) 103 | unicode-display_width (3.1.4) 104 | unicode-emoji (~> 4.0, >= 4.0.4) 105 | unicode-emoji (4.0.4) 106 | 107 | PLATFORMS 108 | arm64-darwin-23 109 | arm64-darwin-24 110 | 111 | DEPENDENCIES 112 | debug 113 | docco! 114 | dotenv 115 | logger 116 | pg 117 | rake (~> 13.0) 118 | rspec (~> 3.0) 119 | rubocop 120 | sequel 121 | sourced! 122 | sqlite3 123 | timecop 124 | 125 | BUNDLED WITH 126 | 4.0.0.beta2 127 | -------------------------------------------------------------------------------- /spec/configuration_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'sequel' 4 | 5 | RSpec.describe Sourced::Configuration do 6 | subject(:config) { described_class.new } 7 | 8 | it 'has a test backend by default' do 9 | expect(config.backend).to be_a(Sourced::Backends::TestBackend) 10 | end 11 | 12 | it 'has a default #error_strategy' do 13 | expect(config.error_strategy).to be_a(Sourced::ErrorStrategy) 14 | end 15 | 16 | specify '#error_strategy=' do 17 | st = Sourced::ErrorStrategy.new 18 | config.error_strategy = st 19 | expect(config.error_strategy).to eq(st) 20 | end 21 | 22 | describe '#error_strategy(&block)' do 23 | it 'configures the error strategy with a block' do 24 | config.error_strategy do |s| 25 | s.retry(times: 30, after: 50) 26 | end 27 | 28 | expect(config.error_strategy).to be_a(Sourced::ErrorStrategy) 29 | expect(config.error_strategy.max_retries).to eq(30) 30 | expect(config.error_strategy.retry_after).to eq(50) 31 | end 32 | end 33 | 34 | describe '#backend=' do 35 | it 'can configure backend with a Sequel database' do 36 | config.backend = Sequel.sqlite 37 | expect(config.backend).to be_a(Sourced::Backends::SequelBackend) 38 | end 39 | 40 | it 'accepts anything with the Backend interface' do 41 | backend = Struct.new( 42 | :installed?, 43 | :reserve_next_for_reactor, 44 | :append_to_stream, 45 | :read_correlation_batch, 46 | :read_stream, 47 | :updating_consumer_group, 48 | :register_consumer_group, 49 | :start_consumer_group, 50 | :stop_consumer_group, 51 | :reset_consumer_group, 52 | :stats, 53 | :transaction 54 | ) 55 | 56 | config.backend = backend.new(nil, nil, nil, nil, nil, nil) 57 | 58 | expect(config.backend).to be_a(backend) 59 | end 60 | 61 | it 'fails loudly if the backend does not implement the Backend interface' do 62 | expect { config.backend = Object.new }.to raise_error(Plumb::ParseError) 63 | end 64 | end 65 | 66 | specify '#executor' do 67 | expect(config.executor).to be_a(Sourced::AsyncExecutor) 68 | end 69 | 70 | describe 'subscribers' do 71 | it 'triggers subscribers on #setup!' do 72 | executor_class = nil 73 | config.subscribe do |c| 74 | executor_class = c.executor.class 75 | end 76 | config.executor = :thread 77 | config.setup! 78 | expect(executor_class).to eq(Sourced::ThreadExecutor) 79 | end 80 | end 81 | 82 | describe '#executor=()' do 83 | specify ':async' do 84 | config.executor = :async 85 | expect(config.executor).to be_a(Sourced::AsyncExecutor) 86 | end 87 | 88 | specify ':thread' do 89 | config.executor = :thread 90 | expect(config.executor).to be_a(Sourced::ThreadExecutor) 91 | end 92 | 93 | specify 'any #start interface' do 94 | custom = Class.new do 95 | def start; end 96 | end 97 | 98 | config.executor = custom.new 99 | expect(config.executor).to be_a(custom) 100 | end 101 | 102 | specify 'an invalid executor' do 103 | expect { 104 | config.executor = Object.new 105 | }.to raise_error(ArgumentError) 106 | end 107 | end 108 | end 109 | -------------------------------------------------------------------------------- /spec/backends/concurrent_projectors_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/backends/sequel_backend' 5 | 6 | module ConcurrencyExamples 7 | SomethingHappened = Sourced::Message.define('concurrent.something_happened') do 8 | attribute :number, Integer 9 | end 10 | 11 | class Store 12 | attr_reader :data, :queue, :trace 13 | 14 | def initialize 15 | @mutex = Mutex.new 16 | @queue = Queue.new 17 | @data = {} 18 | end 19 | 20 | def get(stream_id) 21 | @data[stream_id] 22 | end 23 | 24 | def set(stream_id, state) 25 | @mutex.synchronize do 26 | @data[stream_id] = state 27 | @queue << 1 28 | end 29 | end 30 | end 31 | 32 | STORE = Store.new 33 | 34 | class Projector < Sourced::Projector::StateStored 35 | state do |id| 36 | STORE.get(id) || {id:, seq: 0, seqs: []} 37 | end 38 | 39 | sync do |state:, events:, replaying:| 40 | STORE.set(state[:id], state) 41 | end 42 | 43 | event ConcurrencyExamples::SomethingHappened do |state, event| 44 | state[:seq] = event.seq 45 | state[:seqs] << event.seq 46 | end 47 | end 48 | end 49 | 50 | RSpec.describe 'Processing events concurrently', type: :backend do 51 | subject(:backend) { Sourced::Backends::SequelBackend.new(db) } 52 | 53 | let(:db) do 54 | # Sequel.sqlite 55 | Sequel.postgres('sourced_test') 56 | end 57 | 58 | let(:router) { Sourced::Router.new(backend:) } 59 | 60 | before do 61 | backend.clear! 62 | backend.uninstall 63 | backend.install 64 | 65 | router.register ConcurrencyExamples::Projector 66 | 67 | stream1_events = 100.times.map do |i| 68 | seq = i + 1 69 | ConcurrencyExamples::SomethingHappened.parse(stream_id: 'stream1', seq:, payload: { number: seq }) 70 | end 71 | 72 | stream2_events = 120.times.map do |i| 73 | seq = i + 1 74 | ConcurrencyExamples::SomethingHappened.parse(stream_id: 'stream2', seq:, payload: { number: seq }) 75 | end 76 | 77 | all_events = (stream2_events + stream1_events).flatten.compact 78 | all_events.each do |event| 79 | backend.append_to_stream(event.stream_id, [event]) 80 | end 81 | end 82 | 83 | specify 'consumes streams concurrently, maintaining per-stream event ordering, consuming all available events for each stream' do 84 | workers = 3.times.map do |i| 85 | Sourced::Worker.new(name: "worker-#{i}", router:) 86 | end 87 | 88 | threads = workers.map do |worker| 89 | Thread.new do 90 | worker.poll 91 | end 92 | end 93 | 94 | count = 0 95 | while count < 220 96 | ConcurrencyExamples::STORE.queue.pop 97 | count += 1 98 | end 99 | 100 | workers.each(&:stop) 101 | threads.each(&:join) 102 | 103 | duplicates = ConcurrencyExamples::STORE.data['stream1'][:seqs].group_by(&:itself).select {|k,v| v.size > 1 } 104 | expect(ConcurrencyExamples::STORE.data['stream1'][:seq]).to eq(100) 105 | expect(duplicates).to be_empty 106 | expect(ConcurrencyExamples::STORE.data['stream1'][:seqs]).to eq((1..100).to_a) 107 | expect(ConcurrencyExamples::STORE.data['stream2'][:seq]).to eq(120) 108 | expect(ConcurrencyExamples::STORE.data['stream2'][:seqs]).to eq((1..120).to_a) 109 | end 110 | end 111 | -------------------------------------------------------------------------------- /spec/evolve_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module EvolveTest 6 | class Reactor 7 | include Sourced::Evolve 8 | 9 | Event1 = Sourced::Message.define('evolvetest.reactor.event1') 10 | Event2 = Sourced::Message.define('evolvetest.reactor.event2') 11 | Event3 = Sourced::Message.define('evolvetest.reactor.event3') 12 | 13 | state do |_id| 14 | [] 15 | end 16 | 17 | event Event1 do |state, event| 18 | state << event 19 | end 20 | 21 | event Event2 do |state, event| 22 | state << event 23 | end 24 | end 25 | 26 | class ChildReactor < Reactor 27 | event Event3 28 | end 29 | 30 | class Noop 31 | include Sourced::Evolve 32 | 33 | state do |_id| 34 | [] 35 | end 36 | 37 | event Reactor::Event1 38 | end 39 | 40 | class EvolveAll 41 | include Sourced::Evolve 42 | 43 | state do |_id| 44 | [] 45 | end 46 | 47 | evolve_all Reactor do |state, event| 48 | state << event 49 | end 50 | end 51 | 52 | class WithBefore < EvolveAll 53 | before_evolve do |state, event| 54 | state << event.seq 55 | end 56 | end 57 | end 58 | 59 | RSpec.describe Sourced::Evolve do 60 | describe '#evolve' do 61 | it 'evolves instance' do 62 | evt1 = EvolveTest::Reactor::Event1.new(stream_id: '1', seq: 1) 63 | evt2 = EvolveTest::Reactor::Event2.new(stream_id: '1', seq: 2) 64 | state = EvolveTest::Reactor.new.evolve([evt1, evt2]) 65 | expect(state).to eq([evt1, evt2]) 66 | end 67 | 68 | it 'accepts single message' do 69 | evt1 = EvolveTest::Reactor::Event1.new(stream_id: '1', seq: 1) 70 | instance = EvolveTest::Reactor.new 71 | instance.evolve(evt1) 72 | expect(instance.state).to eq([evt1]) 73 | end 74 | end 75 | 76 | specify '.handled_messages_for_evolve' do 77 | expect(EvolveTest::Reactor.handled_messages_for_evolve).to eq([ 78 | EvolveTest::Reactor::Event1, 79 | EvolveTest::Reactor::Event2 80 | ]) 81 | 82 | expect(EvolveTest::ChildReactor.handled_messages_for_evolve).to eq([ 83 | EvolveTest::Reactor::Event1, 84 | EvolveTest::Reactor::Event2, 85 | EvolveTest::Reactor::Event3, 86 | ]) 87 | end 88 | 89 | specify '.evolve handlers without a block' do 90 | expect(EvolveTest::Noop.handled_messages_for_evolve).to eq([EvolveTest::Reactor::Event1]) 91 | 92 | evt1 = EvolveTest::Reactor::Event1.new(stream_id: '1', seq: 1) 93 | new_state = EvolveTest::Noop.new.evolve([evt1]) 94 | expect(new_state).to eq([]) 95 | end 96 | 97 | specify '.evolve_all' do 98 | evt1 = EvolveTest::Reactor::Event1.new(stream_id: '1', seq: 1) 99 | evt2 = EvolveTest::Reactor::Event2.new(stream_id: '1', seq: 2) 100 | evolver = EvolveTest::EvolveAll.new 101 | expect(evolver.state).to eq([]) 102 | new_state = evolver.evolve([evt1, evt2]) 103 | expect(new_state).to eq([evt1, evt2]) 104 | expect(evolver.state).to eq([evt1, evt2]) 105 | end 106 | 107 | specify '.before_evolve' do 108 | evt1 = EvolveTest::Reactor::Event1.new(stream_id: '1', seq: 1) 109 | evt2 = EvolveTest::Reactor::Event2.new(stream_id: '1', seq: 2) 110 | # evt3 is not handled by the reactor 111 | evt3 = EvolveTest::Reactor::Event3.new(stream_id: '1', seq: 3) 112 | new_state = EvolveTest::WithBefore.new.evolve([evt1, evt2, evt3]) 113 | expect(new_state).to eq([1, evt1, 2, evt2]) 114 | end 115 | end 116 | -------------------------------------------------------------------------------- /spec/command_methods_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/command_methods' 5 | 6 | module CMDMethodsTest 7 | class Actor < Sourced::Actor 8 | include Sourced::CommandMethods 9 | 10 | UpdateAge = Sourced::Message.define('cmdtest.update_age') do 11 | attribute :age, Integer 12 | end 13 | 14 | command :start, name: String do |_, cmd| 15 | event :started, cmd.payload 16 | end 17 | 18 | event :started, name: String 19 | 20 | command UpdateAge do |_, cmd| 21 | event :age_updated, cmd.payload 22 | end 23 | 24 | event :age_updated, age: Integer 25 | end 26 | end 27 | 28 | RSpec.describe Sourced::CommandMethods do 29 | describe 'in-memory version (#start, #update_age)' do 30 | it 'creates method for symbolised commands' do 31 | actor = CMDMethodsTest::Actor.new(id: 'aa') 32 | cmd, new_events = actor.start(name: 'Joe') 33 | expect(cmd.valid?).to be(true) 34 | expect(actor.seq).to eq(1) 35 | expect(new_events).to match_sourced_messages([ 36 | CMDMethodsTest::Actor::Started.build('aa', name: 'Joe') 37 | ]) 38 | end 39 | 40 | it 'creates method for command class' do 41 | actor = CMDMethodsTest::Actor.new(id: 'aa') 42 | cmd, new_events = actor.update_age(age: 10) 43 | expect(cmd.valid?).to be(true) 44 | expect(new_events).to match_sourced_messages([ 45 | CMDMethodsTest::Actor::AgeUpdated.build('aa', age: 10) 46 | ]) 47 | end 48 | 49 | it 'returns invalid command on invalid arguments' do 50 | actor = CMDMethodsTest::Actor.new(id: 'aa') 51 | cmd, new_events = actor.start(name: 20) 52 | expect(cmd.valid?).to be(false) 53 | expect(actor.seq).to eq(0) 54 | expect(new_events).to eq([]) 55 | end 56 | end 57 | 58 | describe 'durable version that saves messages to backend (#start!, #update_age!)' do 59 | before do 60 | Sourced.config.backend.clear! 61 | end 62 | 63 | it 'appends messages to backend' do 64 | actor = CMDMethodsTest::Actor.new(id: 'aa') 65 | cmd, new_events = actor.start!(name: 'Joe') 66 | expect(cmd.valid?).to be(true) 67 | expect(actor.seq).to eq(1) 68 | events = Sourced.config.backend.read_stream(actor.id) 69 | expect(events).to eq(new_events) 70 | expect(new_events).to match_sourced_messages([ 71 | CMDMethodsTest::Actor::Started.build('aa', name: 'Joe') 72 | ]) 73 | end 74 | 75 | it 'works when command is a class' do 76 | actor = CMDMethodsTest::Actor.new(id: 'aa') 77 | cmd, new_events = actor.update_age!(age: 20) 78 | expect(cmd.valid?).to be(true) 79 | expect(actor.seq).to eq(1) 80 | events = Sourced.config.backend.read_stream(actor.id) 81 | expect(events).to eq(new_events) 82 | expect(new_events).to match_sourced_messages([ 83 | CMDMethodsTest::Actor::AgeUpdated.build('aa', age: 20) 84 | ]) 85 | end 86 | 87 | it 'validates command' do 88 | actor = CMDMethodsTest::Actor.new(id: 'aa') 89 | cmd, new_events = actor.update_age!(age: 'nope') 90 | expect(cmd.valid?).to be(false) 91 | expect(actor.seq).to eq(0) 92 | expect(new_events).to eq([]) 93 | end 94 | 95 | it 'raises an exception if backend fails to append' do 96 | allow(Sourced.config.backend).to receive(:append_to_stream).and_return(false) 97 | actor = CMDMethodsTest::Actor.new(id: 'aa') 98 | expect { 99 | actor.update_age!(age: 20) 100 | }.to raise_error(Sourced::CommandMethods::FailedToAppendMessagesError) 101 | end 102 | end 103 | end 104 | -------------------------------------------------------------------------------- /spec/load_actor_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | RSpec.describe 'Sourced.load' do 6 | let(:stream_id) { 'abc' } 7 | 8 | let(:actor_class) do 9 | Class.new(Sourced::Actor) do 10 | state do |id| 11 | { id:, name: nil, age: 0 } 12 | end 13 | 14 | event :start, name: String do |state, event| 15 | state[:name] = event.payload.name 16 | end 17 | 18 | event :update_age, age: Integer do |state, event| 19 | state[:age] = event.payload.age 20 | end 21 | end 22 | end 23 | 24 | before do 25 | Sourced.config.backend.clear! 26 | end 27 | 28 | describe '.load' do 29 | it 'loads history and evolves actor' do 30 | actor = actor_class.new(id: stream_id) 31 | 32 | e1 = actor_class[:start].parse(stream_id:, seq: 1, payload: { name: 'Joe' }) 33 | e2 = actor_class[:update_age].parse(stream_id:, seq: 2, payload: { age: 40 }) 34 | 35 | Sourced.config.backend.append_to_stream(stream_id, [e1, e2]) 36 | actor, events = Sourced.load(actor) 37 | expect(actor.seq).to eq(2) 38 | expect(actor.state[:age]).to eq(40) 39 | expect(events.map(&:id)).to eq([e1.id, e2.id]) 40 | end 41 | 42 | it 'load from class and stream_id' do 43 | e1 = actor_class[:start].parse(stream_id:, seq: 1, payload: { name: 'Joe' }) 44 | e2 = actor_class[:update_age].parse(stream_id:, seq: 2, payload: { age: 40 }) 45 | 46 | Sourced.config.backend.append_to_stream(stream_id, [e1, e2]) 47 | 48 | actor, events = Sourced.load(actor_class, stream_id) 49 | expect(actor).to be_a(actor_class) 50 | expect(actor.seq).to eq(2) 51 | expect(actor.state[:age]).to eq(40) 52 | expect(events.map(&:id)).to eq([e1.id, e2.id]) 53 | end 54 | 55 | it 'catches up to latest history' do 56 | actor = actor_class.new(id: stream_id) 57 | 58 | e1 = actor_class[:start].parse(stream_id:, seq: 1, payload: { name: 'Joe' }) 59 | e2 = actor_class[:update_age].parse(stream_id:, seq: 2, payload: { age: 40 }) 60 | 61 | Sourced.config.backend.append_to_stream(stream_id, [e1]) 62 | actor, events = Sourced.load(actor) 63 | expect(actor.seq).to eq(1) 64 | expect(events.map(&:id)).to eq([e1.id]) 65 | 66 | Sourced.config.backend.append_to_stream(stream_id, [e2]) 67 | actor, events = Sourced.load(actor) 68 | expect(actor.seq).to eq(2) 69 | expect(events.map(&:id)).to eq([e2.id]) 70 | end 71 | 72 | it 'loads events up to a given sequence' do 73 | actor = actor_class.new(id: stream_id) 74 | 75 | e1 = actor_class[:start].parse(stream_id:, seq: 1, payload: { name: 'Joe' }) 76 | e2 = actor_class[:update_age].parse(stream_id:, seq: 2, payload: { age: 40 }) 77 | 78 | Sourced.config.backend.append_to_stream(stream_id, [e1, e2]) 79 | 80 | actor, events = Sourced.load(actor, upto: 1) 81 | expect(actor.seq).to eq(1) 82 | expect(events.map(&:id)).to eq([e1.id]) 83 | end 84 | end 85 | 86 | describe '.history_for' do 87 | it 'loads events for an #id interface' do 88 | actor = actor_class.new(id: stream_id) 89 | 90 | e1 = actor_class[:start].parse(stream_id:, seq: 1, payload: { name: 'Joe' }) 91 | e2 = actor_class[:update_age].parse(stream_id:, seq: 2, payload: { age: 40 }) 92 | 93 | Sourced.config.backend.append_to_stream(stream_id, [e1, e2]) 94 | 95 | history = Sourced.history_for(actor) 96 | expect(history.map(&:class)).to eq([actor_class[:start], actor_class[:update_age]]) 97 | expect(history.map(&:id)).to eq([e1.id, e2.id]) 98 | 99 | history = Sourced.history_for(actor, upto: 1) 100 | expect(history.map(&:class)).to eq([actor_class[:start]]) 101 | expect(history.map(&:id)).to eq([e1.id]) 102 | end 103 | end 104 | end 105 | -------------------------------------------------------------------------------- /spec/injector_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | require 'sourced/injector' 5 | 6 | RSpec.describe Sourced::Injector do 7 | describe '.resolve_args' do 8 | context 'with class method' do 9 | let(:target) do 10 | Class.new do 11 | def self.handle(backend:, logger:, clock: 'default-clock') 12 | [backend, logger, clock] 13 | end 14 | end 15 | end 16 | 17 | it 'returns a list of argument names for an object and method' do 18 | args = described_class.resolve_args(target, :handle) 19 | expect(args).to eq(%i[backend logger clock]) 20 | end 21 | end 22 | 23 | context 'with method having only required keyword arguments' do 24 | let(:target) do 25 | Class.new do 26 | def self.handle(replaying:, history:) 27 | [replaying, history] 28 | end 29 | end 30 | end 31 | 32 | it 'returns only required keyword argument names' do 33 | args = described_class.resolve_args(target, :handle) 34 | expect(args).to eq(%i[replaying history]) 35 | end 36 | end 37 | 38 | context 'with method having only optional keyword arguments' do 39 | let(:target) do 40 | Class.new do 41 | def self.handle(backend: nil, logger: 'default') 42 | [backend, logger] 43 | end 44 | end 45 | end 46 | 47 | it 'returns optional keyword argument names' do 48 | args = described_class.resolve_args(target, :handle) 49 | expect(args).to eq(%i[backend logger]) 50 | end 51 | end 52 | 53 | context 'with method having no keyword arguments' do 54 | let(:target) do 55 | Class.new do 56 | def self.handle(event) 57 | event 58 | end 59 | end 60 | end 61 | 62 | it 'returns empty array' do 63 | args = described_class.resolve_args(target, :handle) 64 | expect(args).to eq([]) 65 | end 66 | end 67 | 68 | context 'with method having mixed positional and keyword arguments' do 69 | let(:target) do 70 | Class.new do 71 | def self.handle(event, stream_id, replaying:, history: nil) 72 | [event, stream_id, replaying, history] 73 | end 74 | end 75 | end 76 | 77 | it 'returns only keyword argument names' do 78 | args = described_class.resolve_args(target, :handle) 79 | expect(args).to eq(%i[replaying history]) 80 | end 81 | end 82 | 83 | context 'with method having keyword splat arguments' do 84 | let(:target) do 85 | Class.new do 86 | def self.handle(event, replaying:, **kwargs) 87 | [event, replaying, kwargs] 88 | end 89 | end 90 | end 91 | 92 | it 'ignores keyword splat and returns explicit keywords' do 93 | args = described_class.resolve_args(target, :handle) 94 | expect(args).to eq(%i[replaying]) 95 | end 96 | end 97 | 98 | context 'with instance method via initialize' do 99 | let(:target) do 100 | Class.new do 101 | def initialize(backend:, logger: nil) 102 | @backend = backend 103 | @logger = logger 104 | end 105 | end 106 | end 107 | 108 | it 'resolves constructor arguments' do 109 | args = described_class.resolve_args(target, :new) 110 | expect(args).to eq(%i[backend logger]) 111 | end 112 | end 113 | 114 | context 'with proc argument' do 115 | let(:proc_target) do 116 | proc { |event, replaying:, history: nil| [event, replaying, history] } 117 | end 118 | 119 | it 'resolves proc parameters' do 120 | args = described_class.resolve_args(proc_target) 121 | expect(args).to eq(%i[replaying history]) 122 | end 123 | end 124 | end 125 | end 126 | -------------------------------------------------------------------------------- /lib/sourced/backends/test_backend/group.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | module Backends 5 | class TestBackend 6 | class Group 7 | attr_reader :group_id 8 | attr_accessor :status, :error_context, :retry_at 9 | 10 | Offset = Struct.new(:stream_id, :index, :locked) 11 | 12 | def initialize(group_id, backend) 13 | @group_id = group_id 14 | @backend = backend 15 | @status = :active 16 | @error_context = {} 17 | @retry_at = nil 18 | @highest_index = -1 19 | reset! 20 | end 21 | 22 | def active? = @status == :active 23 | 24 | def stop(reason = nil) 25 | @error_context[:reason] = reason if reason 26 | @status = :stopped 27 | end 28 | 29 | def reset! 30 | @offsets = {} 31 | reindex 32 | end 33 | 34 | def retry(time, ctx = {}) 35 | @error_context.merge!(ctx) 36 | @retry_at = time 37 | end 38 | 39 | def to_h 40 | active_offsets = @offsets.values.select { |o| o.index >= 0 } 41 | oldest_processed = (active_offsets.min_by(&:index)&.index || -1) + 1 42 | newest_processed = (active_offsets.max_by(&:index)&.index || -1) + 1 43 | stream_count = active_offsets.size 44 | 45 | { 46 | group_id:, 47 | status: @status.to_s, 48 | oldest_processed:, 49 | newest_processed:, 50 | stream_count:, 51 | retry_at: 52 | } 53 | end 54 | 55 | def reindex 56 | backend.messages.each do |e| 57 | @offsets[e.stream_id] ||= Offset.new(e.stream_id, -1, false) 58 | end 59 | end 60 | 61 | def ack_on(message_id, &) 62 | global_seq = backend.messages.find_index { |e| e.id == message_id } 63 | return unless global_seq 64 | 65 | evt = backend.messages[global_seq] 66 | offset = @offsets[evt.stream_id] 67 | if offset.locked 68 | raise Sourced::ConcurrentAckError, "Stream for message #{message_id} is being concurrently processed by #{group_id}" 69 | else 70 | offset.locked = true 71 | yield if block_given? 72 | offset.index = global_seq 73 | @highest_index = global_seq if global_seq > @highest_index 74 | offset.locked = false 75 | end 76 | end 77 | 78 | NOOP_FILTER = ->(_) { true } 79 | 80 | def reserve_next(handled_messages, time_window, process_actions, &) 81 | time_filter = time_window.is_a?(Time) ? ->(e) { e.created_at > time_window } : NOOP_FILTER 82 | evt = nil 83 | offset = nil 84 | index = -1 85 | 86 | backend.messages.each.with_index do |e, idx| 87 | offset = @offsets[e.stream_id] 88 | if offset.locked # stream locked by another consumer in the group 89 | next 90 | elsif idx > offset.index && handled_messages.include?(e.class) && time_filter.call(e) # new message for the stream 91 | evt = e 92 | offset.locked = true 93 | index = idx 94 | break 95 | else # messages already consumed 96 | end 97 | end 98 | 99 | if evt 100 | replaying = @highest_index >= index 101 | if block_given? 102 | actions = yield(evt, replaying) 103 | 104 | acker = -> { ack(offset, index) } 105 | process_actions.(group_id, actions, acker, evt, offset) 106 | end 107 | 108 | offset.locked = false 109 | end 110 | 111 | evt 112 | end 113 | 114 | private 115 | 116 | def ack(offset, index) 117 | # ACK reactor/message 118 | offset.index = index 119 | @highest_index = index if index > @highest_index 120 | end 121 | 122 | attr_reader :backend 123 | end 124 | 125 | end 126 | end 127 | end 128 | -------------------------------------------------------------------------------- /lib/sourced/worker.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'console' #  comes with Async 4 | require 'sourced/router' #  comes with Async 5 | 6 | module Sourced 7 | # A Worker is responsible for processing messages in the background. 8 | # Workers poll registered async reactors for available messages and dispatch 9 | # them to reactors in a round-robin fashion. 10 | # 11 | # Workers use a polling model with configurable intervals and can be run 12 | # in multiple processes for horizontal scaling. Each worker maintains its 13 | # own set of reactors and processes them independently. 14 | # 15 | # @example Create and start a worker 16 | # worker = Sourced::Worker.new(name: 'worker-1') 17 | # worker.poll # Start polling for work 18 | # 19 | class Worker 20 | DEFAULT_SHUFFLER = ->(array) { array.shuffle } 21 | 22 | # Process one tick of work using a new worker instance. 23 | # @return [Boolean] true if work was processed, false otherwise 24 | def self.tick 25 | new.tick 26 | end 27 | 28 | # @!attribute [r] name 29 | # @return [String] Unique identifier for this worker instance 30 | attr_reader :name 31 | 32 | # Initialize a new worker instance. 33 | # The worker will automatically discover and shuffle async reactors 34 | # to ensure fair distribution of work across multiple worker instances. 35 | # 36 | # @param logger [Object] Logger instance for worker output (defaults to configured logger) 37 | # @param name [String] Unique name for this worker (defaults to random hex) 38 | # @param poll_interval [Float] Seconds to sleep between polling cycles (defaults to 0.01) 39 | def initialize( 40 | logger: Sourced.config.logger, 41 | name: SecureRandom.hex(4), 42 | poll_interval: 0.01, 43 | router: Sourced::Router, 44 | shuffler: DEFAULT_SHUFFLER 45 | ) 46 | @logger = logger 47 | @running = false 48 | @name = [Process.pid, name].join('-') 49 | @poll_interval = poll_interval 50 | @router = router 51 | # TODO: If reactors have a :weight, we can use that 52 | # to populate this array according to the weight 53 | # so that some reactors are picked more often than others 54 | @reactors = shuffler.call(@router.async_reactors.filter { |r| r.handled_messages.any? }.to_a) 55 | @reactor_index = 0 56 | end 57 | 58 | # Signal the worker to stop polling. 59 | # The worker will finish its current cycle and then stop. 60 | # 61 | # @return [void] 62 | def stop 63 | @running = false 64 | end 65 | 66 | # Start polling for work continuously until stopped. 67 | # This method will block and process messages in a loop 68 | # until {#stop} is called. 69 | # 70 | # @return [void] 71 | def poll 72 | if @reactors.empty? 73 | logger.warn "Worker #{name}: No reactors to poll" 74 | return false 75 | end 76 | 77 | @running = true 78 | while @running 79 | tick 80 | # This sleep seems to be necessary or workers in differet processes will not be able to get the lock 81 | sleep @poll_interval 82 | end 83 | logger.info "Worker #{name}: Polling stopped" 84 | end 85 | 86 | # Process one tick of work for a specific reactor. 87 | # If no reactor is specified, uses the next reactor in round-robin order. 88 | # 89 | # @param reactor [Class, nil] Specific reactor to process (defaults to next in rotation) 90 | # @return [Boolean] true if an event was processed, false otherwise 91 | def tick(reactor = next_reactor) 92 | @router.handle_next_event_for_reactor(reactor, name) 93 | end 94 | 95 | # Get the next reactor in round-robin order. 96 | # Cycles through all available async reactors to ensure fair processing. 97 | # 98 | # @return [Class] Next reactor class to process 99 | def next_reactor 100 | @reactor_index = 0 if @reactor_index >= @reactors.size 101 | reactor = @reactors[@reactor_index] 102 | @reactor_index += 1 103 | reactor 104 | end 105 | 106 | private 107 | 108 | attr_reader :logger 109 | end 110 | end 111 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | Sourced is an Event Sourcing / CQRS library for Ruby built around the "Decide, Evolve, React" pattern. It provides eventual consistency by default with an actor-like execution model for building event-sourced applications. 8 | 9 | ## Core Architecture 10 | 11 | ### Key Components 12 | - **Actors**: Classes that hold state, handle commands, produce events, and react to events (lib/sourced/actor.rb) 13 | - **Commands**: Intents to effect change in the system 14 | - **Events**: Facts describing state changes that have occurred 15 | - **Projectors**: React to events to build views, caches, or other representations (lib/sourced/projector.rb) 16 | - **Backends**: Storage adapters (ActiveRecord, Sequel, test backend) in lib/sourced/backends/ 17 | - **Router**: Routes commands and events to appropriate handlers (lib/sourced/router.rb) 18 | - **Supervisor**: Manages background worker processes (lib/sourced/supervisor.rb) 19 | 20 | ### Message Flow 21 | Commands → Actors (Decide) → Events → Storage → Reactors (React) → New Commands 22 | 23 | ### Concurrency Model 24 | Sourced processes events by acquiring locks on `[reactor_group_id][stream_id]` combinations, ensuring sequential processing within streams while allowing concurrent processing across different streams. 25 | 26 | ## Development Commands 27 | 28 | ### Testing 29 | ```bash 30 | # Run all tests (default rake task) 31 | rake 32 | 33 | # Run specific test file 34 | bundle exec rspec spec/actor_spec.rb 35 | 36 | # Run backend tests 37 | bundle exec rspec spec/backends/ 38 | 39 | # Run with specific database (PostgreSQL required for some tests) 40 | DATABASE_URL=postgres://localhost/sourced_test bundle exec rspec 41 | ``` 42 | 43 | ### Database Setup for Tests 44 | The gem supports multiple backends: 45 | - PostgreSQL (via Sequel or ActiveRecord) 46 | - SQLite (via Sequel or ActiveRecord) 47 | - In-memory test backend 48 | 49 | Test databases are automatically created/cleared by the test suite. 50 | 51 | ### Console/IRB 52 | ```bash 53 | # Interactive console for experimentation 54 | bin/console 55 | ``` 56 | 57 | ## Configuration Patterns 58 | 59 | ### Backend Configuration 60 | ```ruby 61 | # PostgreSQL via Sequel (default production setup) 62 | Sourced.configure do |config| 63 | config.backend = Sequel.connect(ENV.fetch('DATABASE_URL')) 64 | end 65 | 66 | # Test backend (default, in-memory) 67 | Sourced.configure do |config| 68 | config.backend = Sourced::Backends::TestBackend.new 69 | end 70 | ``` 71 | 72 | ### Registering Components 73 | ```ruby 74 | # Register actors and projectors for background processing 75 | Sourced.register(SomeActor) 76 | Sourced.register(SomeProjector) 77 | ``` 78 | 79 | ## Key DSL Patterns 80 | 81 | ### Actor Definition 82 | ```ruby 83 | class SomeActor < Sourced::Actor 84 | # Initial state factory 85 | state do |id| 86 | { id: id, status: 'new' } 87 | end 88 | 89 | # Command handler 90 | command :create_something, name: String do |state, cmd| 91 | event :something_created, cmd.payload 92 | end 93 | 94 | # Event handler (state evolution) 95 | event :something_created, name: String do |state, event| 96 | state[:name] = event.payload.name 97 | end 98 | 99 | # Reaction (workflow orchestration) 100 | reaction :something_created do |event| 101 | stream_for(event).command :next_step 102 | end 103 | end 104 | ``` 105 | 106 | ### Message Definitions 107 | ```ruby 108 | # Expanded syntax for complex validation/coercion 109 | CreateLead = Sourced::Command.define('leads.create') do 110 | attribute :name, Types::String.present 111 | attribute :email, Types::Email.present 112 | end 113 | 114 | LeadCreated = Sourced::Event.define('leads.created') do 115 | attribute :name, String 116 | attribute :email, String 117 | end 118 | ``` 119 | 120 | ## Backend Implementation Notes 121 | 122 | - All backends must implement the BackendInterface defined in lib/sourced/configuration.rb 123 | - SequelBackend is the main production backend (lib/sourced/backends/sequel_backend.rb) 124 | - ActiveRecordBackend provides Rails integration (lib/sourced/backends/active_record_backend.rb) 125 | - TestBackend provides in-memory storage for testing (lib/sourced/backends/test_backend.rb) 126 | 127 | ## Testing Considerations 128 | 129 | - Use shared examples from spec/shared_examples/backend_examples.rb when testing backends 130 | - Time manipulation available via Timecop gem 131 | - Database isolation handled automatically per test 132 | - Concurrent testing patterns available for testing race conditions 133 | 134 | ## Error Handling 135 | 136 | - Default error strategy logs exceptions and stops consumer groups 137 | - Configurable retry/backoff strategies available 138 | - Consumer groups can be stopped/started programmatically via backend API -------------------------------------------------------------------------------- /lib/sourced/supervisor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'async' 4 | require 'console' 5 | require 'sourced/worker' 6 | require 'sourced/house_keeper' 7 | 8 | module Sourced 9 | # The Supervisor manages a pool of background workers that process events and commands. 10 | # It relies on the configured executor (Async by default) to coordinate multiple workers running concurrently 11 | # and handles graceful shutdown via signal handling. 12 | # 13 | # The supervisor automatically sets up signal handlers for INT and TERM signals 14 | # to ensure workers shut down cleanly when the process is terminated. 15 | # 16 | # @example Start a supervisor with 10 workers 17 | # Sourced::Supervisor.start(count: 10) 18 | # 19 | # @example Create and start manually 20 | # supervisor = Sourced::Supervisor.new(count: 5) 21 | # supervisor.start # This will block until interrupted 22 | class Supervisor 23 | # Start a new supervisor instance with the given options. 24 | # This is a convenience method that creates and starts a supervisor. 25 | # 26 | # @param args [Hash] Arguments passed to {#initialize} 27 | # @return [void] This method blocks until the supervisor is stopped 28 | # @see #initialize 29 | def self.start(...) 30 | new(...).start 31 | end 32 | 33 | # Initialize a new supervisor instance. 34 | # Workers are created when {#start} is called, not during initialization. 35 | # 36 | # @param logger [Object] Logger instance for supervisor output (defaults to configured logger) 37 | # @param count [Integer] Number of worker fibers to spawn (defaults to 2) 38 | # @param executor [Object] Executor instance for running concurrent workers (defaults to configured executor) 39 | def initialize( 40 | logger: Sourced.config.logger, 41 | count: 2, 42 | housekeeping_count: 1, 43 | housekeeping_interval: Sourced.config.housekeeping_interval, 44 | housekeeping_heartbeat_interval: Sourced.config.housekeeping_heartbeat_interval, 45 | housekeeping_claim_ttl_seconds: Sourced.config.housekeeping_claim_ttl_seconds, 46 | executor: Sourced.config.executor, 47 | router: Sourced::Router 48 | ) 49 | @logger = logger 50 | @count = count 51 | @housekeeping_count = housekeeping_count 52 | @housekeeping_interval = housekeeping_interval 53 | @housekeeping_heartbeat_interval = housekeeping_heartbeat_interval 54 | @housekeeping_claim_ttl_seconds = housekeeping_claim_ttl_seconds 55 | @executor = executor 56 | @router = router 57 | @workers = [] 58 | end 59 | 60 | # Start the supervisor and all worker fibers. 61 | # This method blocks until the supervisor receives a shutdown signal. 62 | # Workers are spawned as concurrent tasks using the configured executor 63 | # and will begin polling for events and commands immediately. 64 | # TODO: consistently inject config, defaulting to Sourced.config values 65 | # 66 | # @return [void] Blocks until interrupted by signal 67 | def start 68 | logger.info("Starting sync supervisor with #{@count} workers and #{@executor} executor") 69 | set_signal_handlers 70 | 71 | @housekeepers = @housekeeping_count.times.map do |i| 72 | HouseKeeper.new( 73 | logger:, 74 | backend: router.backend, 75 | name: "HouseKeeper-#{i}", 76 | interval: @housekeeping_interval, 77 | heartbeat_interval: @housekeeping_heartbeat_interval, 78 | claim_ttl_seconds: @housekeeping_claim_ttl_seconds, 79 | # Provide live worker IDs for heartbeats 80 | worker_ids_provider: -> { @workers.map(&:name) } 81 | ) 82 | end 83 | 84 | @workers = @count.times.map do |i| 85 | # TODO: worker names using Process.pid, current thread and fiber id 86 | Worker.new(logger:, router:, name: "worker-#{i}") 87 | end 88 | 89 | @executor.start do |task| 90 | @housekeepers.each do |hk| 91 | task.spawn do 92 | hk.work 93 | end 94 | end 95 | 96 | @workers.each do |wrk| 97 | task.spawn do 98 | wrk.poll 99 | end 100 | end 101 | end 102 | end 103 | 104 | # Stop all workers gracefully. 105 | # Sends stop signals to all workers and waits for them to finish 106 | # their current work before shutting down. 107 | # 108 | # @return [void] 109 | def stop 110 | logger.info("Stopping #{@workers.size} workers and #{@housekeepers.size} house-keepers") 111 | @workers.each(&:stop) 112 | @housekeepers.each(&:stop) 113 | logger.info('All workers stopped') 114 | end 115 | 116 | # Set up signal handlers for graceful shutdown. 117 | # Traps INT (Ctrl+C) and TERM signals to call {#stop}. 118 | # 119 | # @return [void] 120 | # @api private 121 | def set_signal_handlers 122 | Signal.trap('INT') { stop } 123 | Signal.trap('TERM') { stop } 124 | end 125 | 126 | private 127 | 128 | attr_reader :logger, :router 129 | end 130 | end 131 | -------------------------------------------------------------------------------- /lib/sourced/projector.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | # Projectors react to events 5 | # and update views of current state somewhere (a DB, files, etc) 6 | class Projector 7 | include React 8 | include Evolve 9 | include Sync 10 | extend Consumer 11 | 12 | REACTION_WITH_STATE_PREFIX = 'reaction_with_state' 13 | BLANK_ARRAY = [].freeze 14 | 15 | class << self 16 | # The Reactor interface 17 | def handled_messages = handled_messages_for_evolve 18 | 19 | # Override this check defined in React mixin 20 | private def __validate_message_for_reaction!(event_class) 21 | if event_class && !handled_messages_for_evolve.include?(event_class) 22 | raise ArgumentError, '.reaction only works with event types handled by this class via .event(event_type)' 23 | end 24 | end 25 | 26 | # The Ractor Interface 27 | # @param message [Sourced::Message] 28 | # @option replaying [Boolean] 29 | # @option history [Enumerable] 30 | # @return [Array] 31 | def handle(message, replaying:, history: BLANK_ARRAY) 32 | new(id: identity_from(message)).handle(message, replaying:, history:) 33 | end 34 | 35 | # Override this in subclasses 36 | # to make an actor take it's @id from an arbitrary 37 | # field in the message 38 | # 39 | # @param message [Sourced::Message] 40 | # @return [Object] 41 | def identity_from(message) = message.stream_id 42 | end 43 | 44 | attr_reader :id, :seq 45 | 46 | def initialize(id:, logger: Sourced.config.logger) 47 | @id = id 48 | @seq = 0 49 | @logger = logger 50 | end 51 | 52 | def inspect 53 | %(<#{self.class} id:#{id} seq:#{seq}>) 54 | end 55 | 56 | def handle(message, replaying:) 57 | raise NotImplementedError, 'implement me in subclasses' 58 | end 59 | 60 | private 61 | 62 | attr_reader :logger 63 | 64 | # Override Evolve#__update_on_evolve 65 | def __update_on_evolve(event) 66 | @seq = event.seq 67 | end 68 | 69 | # A StateStored projector fetches initial state from 70 | # storage somewhere (DB, files, API) 71 | # And then after reacting to events and updating state, 72 | # it can save it back to the same or different storage. 73 | # @example 74 | # 75 | # class CartListings < Sourced::Projector::StateStored 76 | # # Fetch listing record from DB, or new one. 77 | # state do |id| 78 | # CartListing.find_or_initialize(id) 79 | # end 80 | # 81 | # # Evolve listing record from events 82 | # evolve Carts::ItemAdded do |listing, event| 83 | # listing.total += event.payload.price 84 | # end 85 | # 86 | # # Sync listing record back to DB 87 | # sync do |state:, events:, replaying:| 88 | # state.save! 89 | # end 90 | # end 91 | class StateStored < self 92 | class << self 93 | # State-stored version doesn't load :history 94 | def handle(message, replaying: false) 95 | new(id: identity_from(message)).handle(message, replaying:) 96 | end 97 | end 98 | 99 | def handle(message, replaying:) 100 | # Load state from storage 101 | state 102 | # Evolve new message 103 | evolve(message) 104 | # Collect sync actions 105 | actions = sync_actions_with(state:, events: [message], replaying:) 106 | # Replaying? Just return sync action 107 | return actions if replaying 108 | 109 | # Not replaying. Also run reactions 110 | if reacts_to?(message) 111 | actions += Actions.build_for(react(message)) 112 | end 113 | 114 | actions 115 | end 116 | end 117 | 118 | # An EventSourced projector fetches initial state from 119 | # past events in the event store. 120 | # And then after reacting to events and updating state, 121 | # it can save it to a DB table, a file, etc. 122 | # @example 123 | # 124 | # class CartListings < Sourced::Projector::EventSourced 125 | # # Initial in-memory state 126 | # state do |id| 127 | # { id:, total: 0 } 128 | # end 129 | # 130 | # # Evolve listing record from events 131 | # evolve Carts::ItemAdded do |listing, event| 132 | # listing[:total] += event.payload.price 133 | # end 134 | # 135 | # # Sync listing record to a file 136 | # sync do |state:, events:, replaying:| 137 | # File.write("/listings/#{state[:id]}.json", JSON.dump(state)) 138 | # end 139 | # end 140 | class EventSourced < self 141 | def handle(message, replaying:, history:) 142 | # Evolve new message from history 143 | evolve(history) 144 | # Collect sync actions 145 | actions = sync_actions_with(state:, events: [message], replaying:) 146 | # Replaying? Just return sync action 147 | return actions if replaying 148 | 149 | # Not replaying. Also run reactions 150 | if reacts_to?(message) 151 | actions += Actions.build_for(react(message)) 152 | end 153 | 154 | actions 155 | end 156 | end 157 | end 158 | end 159 | -------------------------------------------------------------------------------- /spec/react_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | class ReactTestReactor 6 | include Sourced::React 7 | 8 | Event1 = Sourced::Message.define('reacttest.event1') 9 | Event2 = Sourced::Message.define('reacttest.event2') 10 | Event3 = Sourced::Message.define('reacttest.event3') 11 | Event4 = Sourced::Message.define('reacttest.event4') 12 | Event5 = Sourced::Message.define('reacttest.event5') 13 | Event6 = Sourced::Message.define('reacttest.event6') 14 | Event7 = Sourced::Message.define('reacttest.event7') 15 | Nope = Sourced::Message.define('reacttest.nope') 16 | 17 | Cmd1 = Sourced::Message.define('reacttest.cmd1') do 18 | attribute :name, String 19 | end 20 | Cmd2 = Sourced::Message.define('reacttest.cmd2') 21 | Cmd3 = Sourced::Message.define('reacttest.cmd3') 22 | NotifyWildcardReaction = Sourced::Message.define('reacttest.NotifyWildcardReaction') do 23 | attribute :state 24 | attribute :event 25 | end 26 | NotifyVariableReaction = Sourced::Message.define('reacttest.NotifyVariableReaction') do 27 | attribute :event 28 | end 29 | 30 | def state = { name: 'test' } 31 | 32 | def self.handled_messages_for_evolve = [Event1, Event4, Event5] 33 | 34 | reaction Event1 do |state, event| 35 | dispatch(Cmd1, name: state[:name]).to(event) 36 | end 37 | 38 | reaction Event2 do |_state, event| 39 | dispatch(Cmd2) 40 | dispatch(Cmd3) 41 | .with_metadata(greeting: 'Hi!') 42 | .at(Time.now + 10) 43 | end 44 | 45 | reaction Event3 do |_state, _event| 46 | nil 47 | end 48 | 49 | # This wildcard reaction will be registered 50 | # for all events present in .handled_messages_for_evolve 51 | # that do not have custom reactions 52 | reaction do |state, event| 53 | dispatch NotifyWildcardReaction, state:, event: 54 | end 55 | 56 | # This one will register handlers for multiple events 57 | reaction Event6, Event7 do |state, event| 58 | dispatch NotifyVariableReaction, event: 59 | end 60 | end 61 | 62 | RSpec.describe Sourced::React do 63 | specify '.handled_messages_for_react' do 64 | expect(ReactTestReactor.handled_messages_for_react).to eq([ 65 | ReactTestReactor::Event1, 66 | ReactTestReactor::Event2, 67 | ReactTestReactor::Event3, 68 | ReactTestReactor::Event4, 69 | ReactTestReactor::Event5, 70 | ReactTestReactor::Event6, 71 | ReactTestReactor::Event7, 72 | ]) 73 | end 74 | 75 | specify '#reacts_to?(message)' do 76 | reactor = ReactTestReactor.new 77 | evt1 = ReactTestReactor::Event1.new(stream_id: '1') 78 | evt2 = ReactTestReactor::Nope.new(stream_id: '1') 79 | expect(reactor.reacts_to?(evt1)).to be(true) 80 | expect(reactor.reacts_to?(evt2)).to be(false) 81 | end 82 | 83 | describe '#react' do 84 | it 'returns messages to append or schedule' do 85 | now = Time.now 86 | Timecop.freeze(now) do 87 | evt1 = ReactTestReactor::Event1.new(stream_id: '1', seq: 1) 88 | evt2 = ReactTestReactor::Event2.new(stream_id: '1', seq: 2) 89 | commands = ReactTestReactor.new.react([evt1, evt2]) 90 | expect(commands.map(&:class)).to eq([ 91 | ReactTestReactor::Cmd1, 92 | ReactTestReactor::Cmd2, 93 | ReactTestReactor::Cmd3 94 | ]) 95 | expect(commands.map { |e| e.metadata[:producer] }).to eq(%w[ReactTestReactor ReactTestReactor ReactTestReactor]) 96 | expect(commands.first.causation_id).to eq(evt1.id) 97 | expect(commands.first.created_at).to eq(now) 98 | expect(commands.first.payload.name).to eq('test') 99 | expect(commands.last.causation_id).to eq(evt2.id) 100 | expect(commands.last.metadata[:greeting]).to eq('Hi!') 101 | expect(commands.last.created_at).to eq(now + 10) 102 | end 103 | end 104 | 105 | it 'accepts single message' do 106 | evt1 = ReactTestReactor::Event1.new(stream_id: '1') 107 | commands = ReactTestReactor.new.react(evt1) 108 | expect(commands.first).to be_a(ReactTestReactor::Cmd1) 109 | end 110 | 111 | it 'returns an empty array if the message is not supported' do 112 | evt1 = ReactTestReactor::Nope.new(stream_id: '1') 113 | commands = ReactTestReactor.new.react(evt1) 114 | expect(commands.empty?).to be(true) 115 | end 116 | 117 | it 'runs wildcard reactions' do 118 | evt4 = ReactTestReactor::Event4.new(stream_id: '1', seq: 1) 119 | commands = ReactTestReactor.new.react(evt4) 120 | expect(commands.map(&:class)).to eq([ReactTestReactor::NotifyWildcardReaction]) 121 | expect(commands.first.payload.state[:name]).to eq('test') 122 | expect(commands.first.payload.event).to eq(evt4) 123 | end 124 | 125 | it 'runs reactions to multiple events' do 126 | evt6 = ReactTestReactor::Event6.new(stream_id: '1', seq: 1) 127 | commands = ReactTestReactor.new.react(evt6) 128 | expect(commands.map(&:class)).to eq([ReactTestReactor::NotifyVariableReaction]) 129 | expect(commands.first.payload.event).to eq(evt6) 130 | 131 | evt7 = ReactTestReactor::Event7.new(stream_id: '1', seq: 1) 132 | commands = ReactTestReactor.new.react(evt7) 133 | expect(commands.map(&:class)).to eq([ReactTestReactor::NotifyVariableReaction]) 134 | expect(commands.first.payload.event).to eq(evt7) 135 | end 136 | end 137 | end 138 | -------------------------------------------------------------------------------- /lib/sourced/configuration.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'console' #  comes with async gem 4 | require 'sourced/types' 5 | require 'sourced/backends/test_backend' 6 | require 'sourced/error_strategy' 7 | require 'sourced/async_executor' 8 | 9 | module Sourced 10 | # Configure a Sourced app. 11 | # @example 12 | # 13 | # Sourced.configure do |config| 14 | # config.backend = Sequel.Postgres('postgres://localhost/mydb') 15 | # config.logger = Logger.new(STDOUT) 16 | # end 17 | # 18 | class Configuration 19 | #  Backends must expose these methods 20 | BackendInterface = Types::Interface[ 21 | :installed?, 22 | :reserve_next_for_reactor, 23 | :append_to_stream, 24 | :read_correlation_batch, 25 | :read_stream, 26 | :transaction, 27 | :stats, 28 | :updating_consumer_group, 29 | :register_consumer_group, 30 | :start_consumer_group, 31 | :stop_consumer_group, 32 | :reset_consumer_group 33 | ] 34 | 35 | # Interface that all executors must implement 36 | # @see AsyncExecutor 37 | # @see ThreadExecutor 38 | ExecutorInterface = Types::Interface[ 39 | :start 40 | ] 41 | 42 | attr_accessor :logger 43 | # House-keeping configuration 44 | # interval: main loop tick for housekeeping (seconds) 45 | # heartbeat interval: how often to record heartbeats (seconds) 46 | # claim_ttl_seconds: how long before a worker is considered dead for claim release 47 | attr_accessor( 48 | :housekeeping_interval, 49 | :housekeeping_heartbeat_interval, 50 | :housekeeping_claim_ttl_seconds 51 | ) 52 | 53 | attr_reader :backend, :executor 54 | 55 | def initialize 56 | @logger = Console 57 | @backend = Backends::TestBackend.new 58 | @error_strategy = ErrorStrategy.new 59 | @executor = AsyncExecutor.new 60 | @setup = false 61 | # Defaults for house-keeping 62 | @housekeeping_interval = 3 63 | @housekeeping_heartbeat_interval = 5 64 | @housekeeping_claim_ttl_seconds = 120 65 | @subscribers = [] 66 | end 67 | 68 | def subscribe(callable = nil, &block) 69 | callable ||= block 70 | @subscribers << callable 71 | self 72 | end 73 | 74 | def setup! 75 | return if @setup 76 | 77 | @backend.setup!(self) if @backend.respond_to?(:setup!) 78 | @subscribers.each { |s| s.call(self) } 79 | @setup = true 80 | end 81 | 82 | # Configure the backend for the app. 83 | # Defaults to in-memory TestBackend 84 | # @param bnd [BackendInterface] 85 | def backend=(bnd) 86 | @backend = case bnd.class.name 87 | when 'Sequel::Postgres::Database', 'Sequel::SQLite::Database' 88 | require 'sourced/backends/sequel_backend' 89 | Sourced::Backends::SequelBackend.new(bnd) 90 | else 91 | BackendInterface.parse(bnd) 92 | end 93 | end 94 | 95 | # Configure the executor for the app. 96 | # Supports both symbol shortcuts and executor instances. 97 | # Defaults to AsyncExecutor for fiber-based concurrency. 98 | # 99 | # @param ex [Symbol, Object] The executor to use 100 | # @option ex [Symbol] :async Use AsyncExecutor (fiber-based, default) 101 | # @option ex [Symbol] :thread Use ThreadExecutor (thread-based) 102 | # @option ex [ExecutorInterface] Custom executor instance 103 | # @raise [ArgumentError] if executor doesn't implement ExecutorInterface 104 | # 105 | # @example Using symbol shortcuts 106 | # config.executor = :async # Default fiber-based 107 | # config.executor = :thread # Thread-based for CPU-intensive work 108 | # 109 | # @example Using custom executor 110 | # config.executor = MyCustomExecutor.new 111 | def executor=(ex) 112 | @executor = case ex 113 | when :async 114 | AsyncExecutor.new 115 | when :thread 116 | require 'sourced/thread_executor' 117 | ThreadExecutor.new 118 | when ExecutorInterface 119 | ex 120 | else 121 | raise ArgumentError, "executor=(e) must support interface #{ExecutorInterface.inspect}" 122 | end 123 | end 124 | 125 | # Assign an error strategy 126 | # @param strategy [ErrorStrategy, #call(Exception, Sourced::Message, Group)] 127 | # @raise [ArgumentError] if strategy does not respond to #call 128 | def error_strategy=(strategy) 129 | raise ArgumentError, 'Must respond to #call(Exception, Sourced::Message, Group)' unless strategy.respond_to?(:call) 130 | 131 | @error_strategy = strategy 132 | end 133 | 134 | # Configure a built-in Sourced::ErrorStrategy 135 | # @example 136 | # config.error_strategy do |s| 137 | # s.retry(times: 30, after: 50, backoff: ->(retry_after, retry_count) { retry_after * retry_count }) 138 | # 139 | # s.on_retry do |n, exception, message, later| 140 | # puts "Retrying #{n} times" } 141 | # end 142 | # 143 | # s.on_stop do |exception, message| 144 | # Sentry.capture_exception(exception) 145 | # end 146 | # end 147 | # 148 | # @yieldparam s [ErrorStrategy] 149 | def error_strategy(&blk) 150 | return @error_strategy unless block_given? 151 | 152 | self.error_strategy = ErrorStrategy.new(&blk) 153 | end 154 | end 155 | end 156 | -------------------------------------------------------------------------------- /lib/sourced/evolve.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | # This mixin provides an .event macro 5 | # to register event handlers for a class 6 | # These event handlers are "evolvers", ie. they evolve 7 | # a piece of state based on events. 8 | # More here: https://ismaelcelis.com/posts/decide-evolve-react-pattern-in-ruby/#2-evolve 9 | # 10 | # Example: 11 | # 12 | # class Projector 13 | # include Sourced::Evolve 14 | # 15 | # state do 16 | # { status: 'new' } 17 | # end 18 | # 19 | # event SomethingHappened do |state, event| 20 | # state[:status] = 'done' 21 | # end 22 | # end 23 | # 24 | # pr = Projector.new 25 | # state = pr.evolve([SomethingHappened.new]) 26 | # state[:status] # => 'done' 27 | # 28 | # From the outside, this mixin exposes .handled_messages_for_evolve 29 | # 30 | # .handled_messages_for_evolve() Array 31 | # 32 | # It also provides a .before_evolve and .evolve_all macros 33 | # See comments in code for details. 34 | module Evolve 35 | PREFIX = 'evolution' 36 | NOOP_HANDLER = ->(*_) { nil } 37 | 38 | def self.included(base) 39 | super 40 | base.extend ClassMethods 41 | end 42 | 43 | # Initialize in-memory state for this evolver. 44 | # Override this method to provide a custom initial state. 45 | # @return [Any] the initial state 46 | def init_state(_id) 47 | nil 48 | end 49 | 50 | def state 51 | @state ||= init_state(id) 52 | end 53 | 54 | # Override this in host class 55 | def id = nil 56 | 57 | # Apply a list of events to a piece of state 58 | # by running event handlers registered in this class 59 | # via the .event macro. 60 | # 61 | # @param events [Array] 62 | # @return [Object] 63 | def evolve(events) 64 | Array(events).each do |event| 65 | method_name = Sourced.message_method_name(Evolve::PREFIX, event.class.to_s) 66 | # We might be evolving old events in history 67 | # even if we don't have handlers for them anymore 68 | # we still need to increment seq 69 | __update_on_evolve(event) 70 | if respond_to?(method_name) 71 | before_evolve(state, event) 72 | send(method_name, state, event) 73 | end 74 | end 75 | 76 | state 77 | end 78 | 79 | private def before_evolve(*_) 80 | nil 81 | end 82 | 83 | private def __update_on_evolve(event) 84 | # Noop 85 | end 86 | 87 | module ClassMethods 88 | def inherited(subclass) 89 | super 90 | handled_messages_for_evolve.each do |evt_type| 91 | subclass.handled_messages_for_evolve << evt_type 92 | end 93 | end 94 | 95 | def handled_messages_for_evolve 96 | @handled_messages_for_evolve ||= [] 97 | end 98 | 99 | # Define an initial state factory for this evolver. 100 | # @example 101 | # 102 | # state do 103 | # { status: 'new' } 104 | # end 105 | # 106 | def state(&blk) 107 | define_method(:init_state, &blk) 108 | end 109 | 110 | # This module only accepts registering event handlers 111 | # with qualified event classes 112 | # Decider overrides this method to allow 113 | # defining event handlers with symbols 114 | # which are registered as Event classes in the decider namespace. 115 | # @example 116 | # 117 | # event SomethingHappened do |state, event| 118 | # state[:status] = 'done' 119 | # end 120 | # 121 | # @param event_class [Sourced::Message] 122 | # @return [void] 123 | def event(event_class, &block) 124 | unless event_class.is_a?(Class) && event_class < Sourced::Message 125 | raise ArgumentError, 126 | "Invalid argument #{event_class.inspect} for #{self}.event" 127 | end 128 | 129 | handled_messages_for_evolve << event_class 130 | block = NOOP_HANDLER unless block_given? 131 | define_method(Sourced.message_method_name(Evolve::PREFIX, event_class.to_s), &block) 132 | end 133 | 134 | # Run this block before any of the registered event handlers 135 | # Example: 136 | # before_evolve do |state, event| 137 | # state.udpated_at = event.created_at 138 | # end 139 | def before_evolve(&block) 140 | define_method(:before_evolve, &block) 141 | end 142 | 143 | # Example: 144 | # # With an Array of event types 145 | # evolve_all [:event_type1, :event_type2] do |state, event| 146 | # state.updated_at = event.created_at 147 | # end 148 | # 149 | # # From another Evolver that responds to #handled_messages_for_evolve 150 | # evolve_all CartAggregate do |state, event| 151 | # state.updated_at = event.created_at 152 | # end 153 | # 154 | # @param event_list [Array, #handled_messages_for_evolve() [Array}] 155 | def evolve_all(event_list, &block) 156 | event_list = event_list.handled_messages_for_evolve if event_list.respond_to?(:handled_messages_for_evolve) 157 | event_list.each do |event_type| 158 | event(event_type, &block) 159 | end 160 | end 161 | end 162 | end 163 | end 164 | -------------------------------------------------------------------------------- /spec/supervisor_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | RSpec.describe Sourced::Supervisor do 6 | let(:executor) { double('Executor') } 7 | let(:logger) { instance_double('Logger', info: nil, warn: nil) } 8 | let(:router) { instance_double(Sourced::Router, backend: double('Backend')) } 9 | let(:worker1) { instance_double(Sourced::Worker, poll: nil, stop: nil, name: 'worker-0') } 10 | let(:worker2) { instance_double(Sourced::Worker, poll: nil, stop: nil, name: 'worker-1') } 11 | let(:housekeeper) { instance_double(Sourced::HouseKeeper, work: nil, stop: nil) } 12 | let(:task) { double('Task', spawn: nil) } 13 | 14 | before do 15 | allow(Sourced::Worker).to receive(:new).and_return(worker1, worker2) 16 | allow(Sourced::HouseKeeper).to receive(:new).and_return(housekeeper) 17 | allow(executor).to receive(:start).and_yield(task) 18 | allow(Signal).to receive(:trap) 19 | end 20 | 21 | describe '#start' do 22 | subject(:supervisor) do 23 | described_class.new( 24 | logger:, 25 | count: 2, 26 | housekeeping_count: 1, 27 | executor: executor, 28 | router: router 29 | ) 30 | end 31 | 32 | it 'sets up signal handlers' do 33 | expect(Signal).to receive(:trap).with('INT') 34 | expect(Signal).to receive(:trap).with('TERM') 35 | supervisor.start 36 | end 37 | 38 | it 'creates the correct number of housekeepers with proper configuration' do 39 | expect(Sourced::HouseKeeper).to receive(:new).with( 40 | hash_including( 41 | logger:, 42 | backend: router.backend, 43 | name: 'HouseKeeper-0' 44 | ) 45 | ).and_return(housekeeper) 46 | 47 | supervisor.start 48 | end 49 | 50 | it 'creates the correct number of workers with proper configuration' do 51 | expect(Sourced::Worker).to receive(:new).with( 52 | hash_including( 53 | logger:, 54 | router:, 55 | name: 'worker-0' 56 | ) 57 | ).and_return(worker1) 58 | 59 | expect(Sourced::Worker).to receive(:new).with( 60 | hash_including( 61 | logger:, 62 | router:, 63 | name: 'worker-1' 64 | ) 65 | ).and_return(worker2) 66 | 67 | supervisor.start 68 | end 69 | 70 | it 'spawns tasks for housekeepers and workers via executor' do 71 | expect(executor).to receive(:start).and_yield(task) 72 | expect(task).to receive(:spawn).exactly(3).times # 1 housekeeper + 2 workers 73 | 74 | supervisor.start 75 | end 76 | 77 | it 'calls work on housekeepers and poll on workers in spawned tasks' do 78 | allow(task).to receive(:spawn).and_yield 79 | 80 | expect(housekeeper).to receive(:work) 81 | expect(worker1).to receive(:poll) 82 | expect(worker2).to receive(:poll) 83 | 84 | supervisor.start 85 | end 86 | 87 | it 'provides worker_ids_provider proc to housekeepers that returns live worker names' do 88 | worker_ids_provider = nil 89 | allow(Sourced::HouseKeeper).to receive(:new) do |args| 90 | worker_ids_provider = args[:worker_ids_provider] 91 | housekeeper 92 | end 93 | 94 | supervisor.start 95 | 96 | expect(worker_ids_provider.call).to eq(['worker-0', 'worker-1']) 97 | end 98 | end 99 | 100 | describe '#stop' do 101 | subject(:supervisor) do 102 | described_class.new( 103 | logger:, 104 | count: 2, 105 | housekeeping_count: 1, 106 | executor:, 107 | router: 108 | ) 109 | end 110 | 111 | before do 112 | supervisor.start # Create workers and housekeepers 113 | end 114 | 115 | it 'logs shutdown information' do 116 | expect(logger).to receive(:info).with(/Stopping 2 workers and 1 house-keepers/) 117 | expect(logger).to receive(:info).with('All workers stopped') 118 | supervisor.stop 119 | end 120 | 121 | it 'calls stop on all workers' do 122 | expect(worker1).to receive(:stop) 123 | expect(worker2).to receive(:stop) 124 | supervisor.stop 125 | end 126 | 127 | it 'calls stop on all housekeepers' do 128 | expect(housekeeper).to receive(:stop) 129 | supervisor.stop 130 | end 131 | end 132 | 133 | describe '.start' do 134 | it 'creates a new supervisor instance and starts it' do 135 | supervisor_instance = instance_double(described_class) 136 | expect(described_class).to receive(:new).with( 137 | logger:, 138 | count: 3 139 | ).and_return(supervisor_instance) 140 | expect(supervisor_instance).to receive(:start) 141 | 142 | described_class.start(logger:, count: 3) 143 | end 144 | end 145 | 146 | describe 'signal handling' do 147 | subject(:supervisor) do 148 | described_class.new( 149 | logger:, 150 | executor:, 151 | router: 152 | ) 153 | end 154 | 155 | it 'traps INT and TERM signals to call stop' do 156 | int_handler = nil 157 | term_handler = nil 158 | 159 | allow(Signal).to receive(:trap) do |signal, &block| 160 | int_handler = block if signal == 'INT' 161 | term_handler = block if signal == 'TERM' 162 | end 163 | 164 | supervisor.start 165 | 166 | expect(supervisor).to receive(:stop) 167 | int_handler.call 168 | 169 | expect(supervisor).to receive(:stop) 170 | term_handler.call 171 | end 172 | end 173 | end 174 | -------------------------------------------------------------------------------- /lib/sourced/command_methods.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Sourced 4 | # Provides command invocation methods for actors. 5 | # 6 | # CommandMethods automatically generates instance methods from command definitions, 7 | # allowing you to invoke commands in two ways: 8 | # 9 | # 1. **In-memory version** (e.g., `actor.start(name: 'Joe')`) 10 | # - Validates the command and executes the decision handler 11 | # - Returns a tuple of [cmd, new_events] 12 | # - Does NOT persist events to backend 13 | # 14 | # 2. **Durable version** (e.g., `actor.start!(name: 'Joe')`) 15 | # - Same as in-memory, but also appends events to backend 16 | # - Raises {FailedToAppendMessagesError} if backend fails 17 | # 18 | # ## Usage 19 | # 20 | # Include the module in an Actor and define commands normally: 21 | # 22 | # class MyActor < Sourced::Actor 23 | # include Sourced::CommandMethods 24 | # 25 | # command :create_item, name: String do |state, cmd| 26 | # event :item_created, cmd.payload 27 | # end 28 | # end 29 | # 30 | # actor = MyActor.new(id: 'actor-123') 31 | # cmd, events = actor.create_item(name: 'Widget') # In-memory 32 | # cmd, events = actor.create_item!(name: 'Widget') # Persists to backend 33 | # 34 | # ## Method Naming 35 | # 36 | # Methods are created based on your command name: 37 | # - Symbol commands like `:create_item` → `create_item` and `create_item!` methods 38 | # - Class commands like `CreateItem` → `create_item` and `create_item!` methods 39 | # 40 | module CommandMethods 41 | # Raised when the durable command (bang method) fails to append events to the backend. 42 | # 43 | # @see #__issue_command 44 | class FailedToAppendMessagesError < StandardError 45 | def initialize(cmd, events) 46 | super <<~MSG 47 | Failed to append events to backend. 48 | Command is #{cmd.inspect} 49 | Events are #{events.inspect} 50 | MSG 51 | end 52 | end 53 | 54 | def self.included(base) 55 | base.send :extend, ClassMethods 56 | end 57 | 58 | # Issues a command without persisting to the backend. 59 | # 60 | # This is the core logic used by the generated command methods. It validates 61 | # the command and executes the decision handler if valid. 62 | # 63 | # @private 64 | # @param cmd_class [Class] A subclass of {Sourced::Message} representing the command 65 | # @param payload [Hash] The command payload data 66 | # @return [Array<(Sourced::Message, Array)>] A tuple of [command, events] 67 | # where command is the validated command object and events are the produced events 68 | # (empty array if command was invalid) 69 | # 70 | # @example 71 | # cmd, events = __issue_command(MyActor::CreateItem, name: 'Widget') 72 | # puts cmd.valid? # => true 73 | # puts events.length # => 1 74 | private def __issue_command(cmd_class, payload = {}) 75 | cmd = cmd_class.new(stream_id: id, payload:) 76 | return [cmd, React::EMPTY_ARRAY] unless cmd.valid? 77 | 78 | [cmd, decide(cmd)] 79 | end 80 | 81 | # @private 82 | # Class methods that hook into the command definition and create instance methods. 83 | module ClassMethods 84 | # Hooks into the command definition to automatically generate instance methods. 85 | # 86 | # This method is called automatically when you define a command in your actor. 87 | # It creates both in-memory and durable (bang) versions of the command method. 88 | # 89 | # @see Sourced::Actor.command 90 | def command(*args, &block) 91 | super.tap do |_| 92 | case args 93 | in [Symbol => cmd_name, *_] 94 | klass_name = cmd_name.to_s.split('_').map(&:capitalize).join 95 | cmd_class = const_get(klass_name) 96 | __command_methods_define(cmd_name, cmd_class) 97 | in [Class => cmd_class] if cmd_class < Sourced::Message 98 | cmd_name = Sourced::Types::ModuleToMethodName.parse(cmd_class.name.split('::').last) 99 | __command_methods_define(cmd_name, cmd_class) 100 | end 101 | end 102 | end 103 | 104 | # Creates in-memory and durable command invocation methods. 105 | # 106 | # Defines two methods on the actor instance: 107 | # - `method_name(**payload)` - In-memory version, validates and decides without persisting 108 | # - `method_name!(**payload)` - Durable version, also appends events to backend 109 | # 110 | # @private 111 | # @param cmd_name [Symbol] The command method name (e.g., :create_item) 112 | # @param cmd_class [Class] The command message class 113 | # 114 | # @example Generated methods 115 | # class MyActor < Sourced::Actor 116 | # include Sourced::CommandMethods 117 | # command :create_item, name: String do |state, cmd| 118 | # event :item_created, cmd.payload 119 | # end 120 | # end 121 | # 122 | # actor = MyActor.new(id: 'a1') 123 | # 124 | # # In-memory version 125 | # cmd, events = actor.create_item(name: 'Widget') 126 | # # => Returns [cmd, events] without touching backend 127 | # # => If invalid: [invalid_cmd, []] 128 | # 129 | # # Durable version 130 | # cmd, events = actor.create_item!(name: 'Widget') 131 | # # => Returns [cmd, events] if valid and appended successfully 132 | # # => Raises FailedToAppendMessagesError if backend fails 133 | # # => If invalid: [invalid_cmd, []] 134 | private def __command_methods_define(cmd_name, cmd_class) 135 | define_method(cmd_name) do |**payload| 136 | __issue_command(cmd_class, payload) 137 | end 138 | 139 | define_method("#{cmd_name}!") do |**payload| 140 | cmd_events = __issue_command(cmd_class, payload) 141 | return cmd_events unless cmd_events.first.valid? 142 | 143 | success = Sourced.config.backend.append_to_stream(id, cmd_events.last) 144 | raise FailedToAppendMessagesError.new(*cmd_events) unless success 145 | 146 | cmd_events 147 | end 148 | end 149 | end 150 | end 151 | end 152 | -------------------------------------------------------------------------------- /spec/message_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module TestMessages 6 | Command = Class.new(Sourced::Message) 7 | 8 | Add = Command.define('test.add') do 9 | attribute :value, Integer 10 | end 11 | 12 | Added = Sourced::Message.define('test.added') do 13 | attribute :value, Integer 14 | end 15 | end 16 | 17 | RSpec.describe Sourced::Message do 18 | it 'requires a stream_id' do 19 | msg = TestMessages::Add.new(payload: { value: 1 }) 20 | expect(msg.valid?).to be false 21 | expect(msg.errors[:stream_id]).not_to be(nil) 22 | 23 | msg = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 24 | expect(msg.valid?).to be true 25 | end 26 | 27 | it 'validates payload' do 28 | msg = TestMessages::Add.new(stream_id: '123', payload: { value: 'aaa' }) 29 | expect(msg.valid?).to be false 30 | expect(msg.errors[:payload][:value]).not_to be(nil) 31 | end 32 | 33 | it 'defines Payload#fetch and Payload#[]' do 34 | msg = TestMessages::Add.new(stream_id: '123', payload: { value: 'aaa' }) 35 | expect(msg.payload[:value]).to eq('aaa') 36 | expect(msg.payload.fetch(:value)).to eq('aaa') 37 | 38 | msg = TestMessages::Add.new(stream_id: '123') 39 | expect(msg.payload[:value]).to be(nil) 40 | expect(msg.payload.fetch(:value)).to be(nil) 41 | expect do 42 | msg.payload.fetch(:nope) 43 | end.to raise_error(KeyError) 44 | end 45 | 46 | it 'initializes an empty payload if the class defines one' do 47 | msg = TestMessages::Add.new 48 | expect(msg.payload).not_to be(nil) 49 | end 50 | 51 | it 'sets #type' do 52 | msg = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 53 | expect(msg.type).to eq('test.add') 54 | end 55 | 56 | it 'sets #causation_id and #correlation_id' do 57 | msg = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 58 | expect(msg.id).not_to be(nil) 59 | expect(msg.causation_id).to eq(msg.id) 60 | expect(msg.correlation_id).to eq(msg.id) 61 | end 62 | 63 | describe '.build' do 64 | it 'builds instance with stream_id and payload' do 65 | msg = TestMessages::Add.build('aaa', value: 2) 66 | expect(msg).to be_a(TestMessages::Add) 67 | expect(msg.stream_id).to eq('aaa') 68 | expect(msg.payload.value).to eq(2) 69 | end 70 | end 71 | 72 | describe '.from' do 73 | it 'creates a message from a hash' do 74 | msg = Sourced::Message.from(stream_id: '123', type: 'test.add', payload: { value: 1 }) 75 | expect(msg).to be_a(TestMessages::Add) 76 | expect(msg.valid?).to be(true) 77 | end 78 | 79 | it 'raises a known exception if no type found' do 80 | expect do 81 | Sourced::Message.from(stream_id: '123', type: 'test.unknown', payload: { value: 1 }) 82 | end.to raise_error(Sourced::UnknownMessageError, 'Unknown event type: test.unknown') 83 | end 84 | 85 | it 'scopes message registries by sub-class' do 86 | msg = TestMessages::Command.from(stream_id: '123', type: 'test.add', payload: { value: 1 }) 87 | expect(msg).to be_a(TestMessages::Add) 88 | 89 | expect do 90 | TestMessages::Command.from(stream_id: '123', type: 'test.added', payload: { value: 1 }) 91 | end.to raise_error(Sourced::UnknownMessageError, 'Unknown event type: test.added') 92 | end 93 | end 94 | 95 | describe '#follow' do 96 | it 'creates a new message with causation_id and correlation_id' do 97 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 98 | added = add.follow(TestMessages::Added, value: 2) 99 | expect(added.causation_id).to eq(add.id) 100 | expect(added.correlation_id).to eq(add.id) 101 | end 102 | 103 | it 'copies payload attributes' do 104 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 105 | added = add.follow(TestMessages::Added, add.payload) 106 | expect(added.payload.value).to eq(1) 107 | end 108 | 109 | it 'copies metadata' do 110 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }, metadata: { user_id: 10 }) 111 | added = add.follow(TestMessages::Added, add.payload) 112 | expect(added.metadata[:user_id]).to eq(10) 113 | end 114 | end 115 | 116 | describe '#follow_with_seq' do 117 | it 'creates a new message with custom seq, causation_id and correlation_id' do 118 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 119 | added = add.follow_with_seq(TestMessages::Added, 2, value: 2) 120 | expect(added.seq).to eq(2) 121 | expect(added.causation_id).to eq(add.id) 122 | expect(added.correlation_id).to eq(add.id) 123 | end 124 | end 125 | 126 | describe '#follow_with_stream_id' do 127 | it 'creates a new message with custom stream_id, causation_id and correlation_id' do 128 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 129 | added = add.follow_with_stream_id(TestMessages::Added, 'foo', value: 2) 130 | expect(added.stream_id).to eq('foo') 131 | expect(added.causation_id).to eq(add.id) 132 | expect(added.correlation_id).to eq(add.id) 133 | end 134 | end 135 | 136 | describe '#at' do 137 | it 'creates a message with a created_at date in the future' do 138 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 139 | delayed = add.at(add.created_at + 10) 140 | expect(delayed.created_at).to eq(add.created_at + 10) 141 | end 142 | 143 | it 'does not allow setting a date lower than current' do 144 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 145 | expect do 146 | add.at(add.created_at - 10) 147 | end.to raise_error(Sourced::PastMessageDateError) 148 | end 149 | end 150 | 151 | describe '#to' do 152 | it 'creates a message with a new #stream_id' do 153 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 154 | add2 = add.to('222') 155 | expect(add.stream_id).to eq('123') 156 | expect(add2.stream_id).to eq('222') 157 | end 158 | 159 | it 'accepts a #stream_id interface' do 160 | add = TestMessages::Add.new(stream_id: '123', payload: { value: 1 }) 161 | streamable = double('Streamable', stream_id: '222') 162 | add2 = add.to(streamable) 163 | expect(add2.stream_id).to eq('222') 164 | end 165 | end 166 | end 167 | -------------------------------------------------------------------------------- /lib/sourced/backends/sequel_backend/installer.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'sequel' 4 | module Sourced 5 | module Backends 6 | class SequelBackend 7 | class Installer 8 | def initialize( 9 | db, 10 | logger:, 11 | workers_table:, 12 | scheduled_messages_table:, 13 | streams_table:, 14 | offsets_table:, 15 | consumer_groups_table:, 16 | messages_table: 17 | ) 18 | @db = db 19 | @logger = logger 20 | @scheduled_messages_table = scheduled_messages_table 21 | @workers_table = workers_table 22 | @streams_table = streams_table 23 | @offsets_table = offsets_table 24 | @consumer_groups_table = consumer_groups_table 25 | @messages_table = messages_table 26 | end 27 | 28 | def installed? 29 | db.table_exists?(messages_table) \ 30 | && db.table_exists?(streams_table) \ 31 | && db.table_exists?(consumer_groups_table) \ 32 | && db.table_exists?(offsets_table) \ 33 | && db.table_exists?(scheduled_messages_table) \ 34 | && db.table_exists?(workers_table) 35 | end 36 | 37 | def uninstall 38 | return unless installed? 39 | 40 | raise 'Not in test environment' unless ENV['ENVIRONMENT'] == 'test' 41 | 42 | [offsets_table, scheduled_messages_table, messages_table, consumer_groups_table, streams_table, workers_table].each do |table| 43 | db.drop_table?(table) 44 | end 45 | end 46 | 47 | def install 48 | _streams_table = streams_table 49 | _consumer_groups_table = consumer_groups_table 50 | 51 | db.create_table?(streams_table) do 52 | primary_key :id 53 | String :stream_id, null: false, unique: true 54 | Time :updated_at, null: false, default: Sequel.function(:now) 55 | Bignum :seq, null: false 56 | 57 | # Index for recent_streams query performance 58 | index :updated_at, name: "idx_#{_streams_table}_updated_at" 59 | end 60 | 61 | logger.info("Created table #{streams_table}") 62 | 63 | db.create_table?(consumer_groups_table) do 64 | primary_key :id 65 | String :group_id, null: false, unique: true 66 | Bignum :highest_global_seq, null: false, default: 0 67 | String :status, null: false, default: ACTIVE, index: true 68 | column :error_context, :jsonb 69 | Time :retry_at, null: true, index: true 70 | Time :created_at, null: false, default: Sequel.function(:now) 71 | Time :updated_at, null: false, default: Sequel.function(:now) 72 | 73 | index :group_id, unique: true 74 | end 75 | 76 | logger.info("Created table #{consumer_groups_table}") 77 | 78 | _offsets_table = offsets_table 79 | 80 | db.create_table?(offsets_table) do 81 | primary_key :id 82 | foreign_key :group_id, _consumer_groups_table, on_delete: :cascade 83 | foreign_key :stream_id, _streams_table, on_delete: :cascade 84 | Bignum :global_seq, null: false 85 | Time :created_at, null: false, default: Sequel.function(:now) 86 | TrueClass :claimed, null: false, default: false 87 | Time :claimed_at, null: true 88 | String :claimed_by, null: true 89 | 90 | # Unique constraint for business logic 91 | index %i[group_id stream_id], unique: true 92 | index :claimed, where: { claimed: false }, name: "idx_#{_offsets_table}_unclaimed" 93 | index [:claimed, :claimed_by, :claimed_at], where: { claimed: true }, name: "idx_#{_offsets_table}_claimed_claimer" 94 | 95 | # Coverage index for aggregation queries (sql_for_consumer_stats) 96 | # Covers: GROUP BY group_id + MIN/MAX(global_seq) aggregations 97 | index %i[group_id global_seq], name: "idx_#{_offsets_table}_group_seq_covering" 98 | end 99 | 100 | logger.info("Created table #{offsets_table}") 101 | 102 | db.create_table?(messages_table) do 103 | primary_key :global_seq, type: :Bignum 104 | column :id, :uuid, unique: true 105 | foreign_key :stream_id, _streams_table 106 | Bignum :seq, null: false 107 | String :type, null: false 108 | Time :created_at, null: false 109 | column :causation_id, :uuid, index: true 110 | column :correlation_id, :uuid, index: true 111 | column :metadata, :jsonb 112 | column :payload, :jsonb 113 | 114 | # Existing indexes 115 | index %i[stream_id seq], unique: true 116 | 117 | # Performance indexes for common query patterns 118 | index :type # For event type filtering 119 | index :created_at # For time-based queries 120 | index %i[type global_seq] # For filtered ordering (composite) 121 | index %i[stream_id global_seq] # For stream + sequence queries 122 | end 123 | 124 | logger.info("Created table #{messages_table}") 125 | 126 | _scheduled_messages_table = scheduled_messages_table 127 | 128 | db.create_table?(scheduled_messages_table) do 129 | primary_key :id 130 | Time :created_at, null: false 131 | Time :available_at, null: false 132 | column :message, :jsonb 133 | 134 | index :available_at 135 | end 136 | 137 | logger.info("Created table #{scheduled_messages_table}") 138 | 139 | db.create_table?(workers_table) do 140 | String :id, primary_key: true, null: false 141 | Time :last_seen, null: false, index: true 142 | String :pid, null: true 143 | String :host, null: true 144 | column :info, :jsonb 145 | end 146 | 147 | logger.info("Created table #{workers_table}") 148 | end 149 | 150 | private 151 | 152 | attr_reader( 153 | :db, 154 | :logger, 155 | :scheduled_messages_table, 156 | :workers_table, 157 | :streams_table, 158 | :offsets_table, 159 | :consumer_groups_table, 160 | :messages_table 161 | ) 162 | end 163 | end 164 | end 165 | end 166 | -------------------------------------------------------------------------------- /examples/cart.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'bundler' 4 | Bundler.setup(:test) 5 | 6 | require 'sourced' 7 | require 'sequel' 8 | 9 | # ActiveRecord::Base.establish_connection(adapter: 'postgresql', database: 'decider') 10 | unless ENV['backend_configured'] 11 | puts 'aggregate config' 12 | Sourced.configure do |config| 13 | config.backend = Sequel.postgres('sourced_development') 14 | end 15 | Sourced.config.backend.install 16 | ENV['backend_configured'] = 'true' 17 | end 18 | 19 | # A cart Actor/Aggregate 20 | # Example: 21 | # cart = Cart.new('cart-1') 22 | # cart.add_item(name: 'item1', price: 100) 23 | # cart.place 24 | # cart.events 25 | # 26 | # The above sends a Cart::Place command 27 | # which produces a Cart::Placed event 28 | class Cart < Sourced::Actor 29 | State = Struct.new(:status, :notified, :items, :mailer_id) do 30 | def total = items.sum(&:price) 31 | end 32 | 33 | state do |id| 34 | State.new(:open, false, [], nil) 35 | end 36 | 37 | ItemAdded = Sourced::Message.define('cart.item_added') do 38 | attribute :name, String 39 | attribute :price, Integer 40 | end 41 | 42 | Placed = Sourced::Message.define('cart.placed') 43 | Notified = Sourced::Message.define('cart.notified') do 44 | attribute :mailer_id, String 45 | end 46 | 47 | # Defines a Cart::AddItem command struct 48 | command :add_item, name: String, price: Integer do |cart, cmd| 49 | event(ItemAdded, cmd.payload.to_h) 50 | end 51 | 52 | # Defines a Cart::Place command struct 53 | command :place do |_, cmd| 54 | event(Placed) 55 | end 56 | 57 | # Defines a Cart::Notify command struct 58 | command :notify, mailer_id: String do |_, cmd| 59 | puts "#{self.class.name} #{cmd.stream_id} NOTIFY" 60 | event(Notified, mailer_id: cmd.payload.mailer_id) 61 | end 62 | 63 | def self.on_exception(exception, _message, group) 64 | if group.error_context[:retry_count] < 3 65 | later = 5 + 5 * group.error_context[:retry_count] 66 | group.retry(later) 67 | else 68 | group.stop(exception) 69 | end 70 | end 71 | 72 | event ItemAdded do |cart, event| 73 | cart.items << event.payload 74 | end 75 | 76 | event Placed do |cart, _event| 77 | cart.status = :placed 78 | end 79 | 80 | event Notified do |cart, event| 81 | cart.notified = true 82 | cart.mailer_id = event.payload.mailer_id 83 | end 84 | 85 | # This block will run 86 | # in the same transaction as appending 87 | # new events to the store. 88 | # So if either fails, eveything is rolled back. 89 | # ergo, strong consistency. 90 | sync do |command:, events:, state:| 91 | puts "#{self.class.name} #{events.last.seq} SYNC" 92 | end 93 | 94 | # Or register a Reactor interface to react to events 95 | # synchronously 96 | # sync CartListings 97 | end 98 | 99 | class Mailer < Sourced::Actor 100 | EmailSent = Sourced::Message.define('mailer.email_sent') do 101 | attribute :cart_id, String 102 | end 103 | 104 | state do |id| 105 | [] 106 | end 107 | 108 | command :send_email, cart_id: String do |_, cmd| 109 | # Send email here, emit EmailSent if successful 110 | event(EmailSent, cart_id: cmd.payload.cart_id) 111 | end 112 | 113 | event EmailSent do |list, event| 114 | list << event 115 | end 116 | end 117 | 118 | # A Saga that orchestrates the flow between Cart and Mailer 119 | class CartEmailsSaga < Sourced::Actor 120 | # Listen for Cart::Placed events and 121 | # send command to Mailer 122 | reaction Cart::Placed do |event| 123 | dispatch(Mailer::SendEmail, cart_id: event.stream_id).to("mailer-#{event.stream_id}") 124 | end 125 | 126 | # Listen for Mailer::EmailSent events and 127 | # send command to Cart 128 | reaction Mailer::EmailSent do |event| 129 | dispatch(Cart::Notify, mailer_id: event.stream_id).to(event.payload.cart_id) 130 | end 131 | end 132 | 133 | # A projector 134 | # "reacts" to events registered with .evolve 135 | class CartListings < Sourced::Actor 136 | class << self 137 | def handled_events = self.handled_events_for_evolve 138 | 139 | # The Reactor interface 140 | # @param events [Array] 141 | def handle_events(events) 142 | # For this type of event sourced projections 143 | # that load current state from events 144 | # then apply "new" events 145 | # TODO: the current state already includes 146 | # the new events, so we need to load upto events.first.seq 147 | instance = load(events.first.stream_id, upto: events.first.seq - 1) 148 | instance.handle_events(events) 149 | end 150 | end 151 | 152 | def handle_events(events) 153 | evolve(state, events) 154 | save 155 | [] # no commands 156 | end 157 | 158 | def initialize(id, **_args) 159 | super 160 | FileUtils.mkdir_p('examples/carts') 161 | @path = "./examples/carts/#{id}.json" 162 | end 163 | 164 | private def save 165 | backend.transaction do 166 | run_sync_blocks(state, nil, []) 167 | end 168 | end 169 | 170 | def init_state(id) 171 | { id:, items: [], status: :open, seq: 0, seqs: [] } 172 | end 173 | 174 | sync do |cart, _command, _events| 175 | File.write(@path, JSON.pretty_generate(cart)) 176 | end 177 | 178 | # Register all events from Cart 179 | # So that before_evolve runs before all cart events 180 | evolve_all Cart.handled_commands 181 | evolve_all Cart 182 | 183 | before_evolve do |cart, event| 184 | cart[:seq] = event.seq 185 | cart[:seqs] << event.seq 186 | end 187 | 188 | event Cart::Placed do |cart, event| 189 | cart[:status] = :placed 190 | end 191 | 192 | event Cart::ItemAdded do |cart, event| 193 | cart[:items] << event.payload.to_h 194 | end 195 | end 196 | 197 | class LoggingReactor 198 | extend Sourced::Consumer 199 | 200 | class << self 201 | # Register as a Reactor that cares about these events 202 | # The workers will use this to fetch the right events 203 | # and ACK offsets after processing 204 | # 205 | # @return [Array] 206 | def handled_events = [Cart::Placed, Cart::ItemAdded] 207 | 208 | # Workers pass available events to this method 209 | # in order, with exactly-once semantics 210 | # If a list of commands is returned, 211 | # workers will send them to the router 212 | # to be dispatched to the appropriate command handlers. 213 | # 214 | # @param events [Array] 215 | # @option replaying [Boolean] whether this is a replay of events 216 | # @return [Array] 45 | # @return [Array] 46 | def react(events) 47 | __handling_reactions(Array(events)) do |event| 48 | method_name = Sourced.message_method_name(React::PREFIX, event.class.to_s) 49 | if respond_to?(method_name) 50 | Array(send(method_name, state, event)).compact 51 | else 52 | EMPTY_ARRAY 53 | end 54 | end 55 | end 56 | 57 | # TODO: O(1) lookup 58 | def reacts_to?(message) 59 | self.class.handled_messages_for_react.include?(message.class) 60 | end 61 | 62 | private 63 | 64 | def __handling_reactions(events, &) 65 | @__stream_dispatchers = [] 66 | events.each do |event| 67 | @__event_for_reaction = event 68 | yield event 69 | end 70 | cmds = @__stream_dispatchers.map(&:message) 71 | @__stream_dispatchers.clear 72 | cmds 73 | end 74 | 75 | class Dispatcher 76 | attr_reader :message 77 | 78 | def initialize(msg) 79 | @message = msg 80 | end 81 | 82 | def inspect = %(<#{self.class} #{@message}>) 83 | 84 | def to(stream_id) 85 | @message = @message.to(stream_id) 86 | self 87 | end 88 | 89 | def at(datetime) 90 | @message = @message.at(datetime) 91 | self 92 | end 93 | 94 | def with_metadata(attrs = {}) 95 | @message = @message.with_metadata(attrs) 96 | self 97 | end 98 | end 99 | 100 | def dispatch(command_class, payload = {}) 101 | command_class = self.class[command_class] if command_class.is_a?(Symbol) 102 | cmd = @__event_for_reaction 103 | .follow(command_class, payload) 104 | .with_metadata(producer: self.class.consumer_info.group_id) 105 | 106 | dispatcher = Dispatcher.new(cmd) 107 | @__stream_dispatchers << dispatcher 108 | dispatcher 109 | end 110 | 111 | module ClassMethods 112 | def inherited(subclass) 113 | super 114 | handled_messages_for_react.each do |evt_type| 115 | subclass.handled_messages_for_react << evt_type 116 | end 117 | end 118 | 119 | # Override this with extend Sourced::Consumer 120 | def consumer_info 121 | Sourced::Consumer::ConsumerInfo.new(group_id: name) 122 | end 123 | 124 | def handled_messages_for_react 125 | @handled_messages_for_react ||= [] 126 | end 127 | 128 | # Define a reaction to an event 129 | # @example 130 | # reaction SomethingHappened do |state, event| 131 | # stream = stream_for(event) 132 | # # stream = stream_for("new-stream-id") 133 | # stream.command DoSomethingElse 134 | # end 135 | # 136 | # The host class is expected to define a #state method 137 | # These handlers will load the decider's state from past events, and yield the state and the event to the block. 138 | # @example 139 | # reaction SomethingHappened do |state, event| 140 | # if state[:count] % 3 == 0 141 | # steam_for(event).command DoSomething 142 | # end 143 | # end 144 | # 145 | # If no event class given, the handler is registered for all events 146 | # set to evolve in .handled_messaged_for_evolve, unless 147 | # specific reactions have already been registered for them 148 | # The host class is expected to support .handled_messaged_for_evolve 149 | # see Evolve mixin 150 | # @example 151 | # reaction do |state, event| 152 | # LOGGER.info state 153 | # end 154 | # 155 | # @overload reaction do |state, event| 156 | # @overload reaction(event_symbol) do |state, event| 157 | # @param event_symbol [Symbol] Symbolised message name 158 | # @overload reaction(event_class) do |state, event| 159 | # @param event_class [Class] Must be subclass of Sourced::Message 160 | # @overload reaction(*events) do |state, event| 161 | # @param *events [Array] List of event classes or symbols 162 | # @return [void] 163 | def reaction(*args, &block) 164 | case args 165 | in [] 166 | handled_messages_for_evolve.each do |e| 167 | method_name = Sourced.message_method_name(React::PREFIX, e.to_s) 168 | if !instance_methods.include?(method_name.to_sym) 169 | reaction e, &block 170 | end 171 | end 172 | 173 | in [Symbol => message_symbol] 174 | message_class = __resolve_message_class(message_symbol) 175 | reaction(message_class, &block) 176 | 177 | in [Class => message_class] if message_class < Sourced::Message 178 | __validate_message_for_reaction!(message_class) 179 | unless message_class.is_a?(Class) && message_class < Sourced::Message 180 | raise ArgumentError, 181 | "Invalid argument #{message_class.inspect} for #{self}.reaction" 182 | end 183 | 184 | self.handled_messages_for_react << message_class 185 | define_method(Sourced.message_method_name(React::PREFIX, message_class.to_s), &block) if block_given? 186 | 187 | else 188 | args.each do |k| 189 | reaction k, &block 190 | end 191 | end 192 | end 193 | 194 | # Run this hook before registering a reaction 195 | # Actor can override this to make sure that the same message is not 196 | # also handled as a command 197 | def __validate_message_for_reaction!(event_class) 198 | # no-op. 199 | end 200 | 201 | private 202 | 203 | def __resolve_message_class(message_symbol) 204 | raise ArgumentError, "#{self} doesn't support resolving #{message_symbol.inspect} into a message class" 205 | end 206 | end 207 | end 208 | end 209 | -------------------------------------------------------------------------------- /spec/testing/rspec_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module Testing 6 | Start = Sourced::Message.define('sourced.testing.start') do 7 | attribute :name, String 8 | end 9 | 10 | Started = Sourced::Message.define('sourced.testing.started') do 11 | attribute :name, String 12 | end 13 | 14 | class Reactor 15 | extend Sourced::Consumer 16 | 17 | def self.handled_messages = [Start] 18 | 19 | def self.handle(message, history: []) 20 | actions = [] 21 | if Start === message && history.none? { |m| Started === m } 22 | actions << Sourced::Actions::AppendNext.new([message.follow(Started, name: message.payload.name)]) 23 | end 24 | actions 25 | end 26 | end 27 | 28 | class Order < Sourced::Actor 29 | state do |id| 30 | { id:, name: nil} 31 | end 32 | 33 | command Start do |state, cmd| 34 | if state[:name].nil? 35 | event Started, cmd.payload 36 | end 37 | end 38 | 39 | event Started do |state, evt| 40 | state[:name] = evt.payload.name 41 | end 42 | 43 | command :start_payment do |_, cmd| 44 | if state[:name] 45 | event :payment_started 46 | end 47 | end 48 | 49 | event :payment_started 50 | 51 | reaction :payment_started do |_, evt| 52 | dispatch(Payment::Process).to("#{evt.stream_id}-payment") 53 | end 54 | end 55 | 56 | class Payment < Sourced::Actor 57 | command :process do |_, cmd| 58 | event :processed 59 | end 60 | 61 | event :processed 62 | end 63 | 64 | class Telemetry 65 | STREAM_ID = 'telemetry-stream' 66 | include Sourced::Handler 67 | 68 | Logged = Sourced::Message.define('test-telemetry.logged') do 69 | attribute :source_stream, String 70 | attribute :message_type, String 71 | end 72 | 73 | consumer do |c| 74 | c.group_id = 'test-telemetry' 75 | end 76 | 77 | on Order::PaymentStarted, Payment::Processed do |event| 78 | logged = Logged.build(STREAM_ID, source_stream: event.stream_id, message_type: event.type) 79 | [logged] 80 | end 81 | end 82 | end 83 | 84 | RSpec.describe Sourced::Testing::RSpec do 85 | describe 'with_reactor' do 86 | context 'with Reactor interface' do 87 | it 'works' do 88 | with_reactor(Testing::Reactor, 'a') 89 | .when(Testing::Start, name: 'Joe') 90 | .then(Testing::Started.build('a', name: 'Joe')) 91 | 92 | with_reactor(Testing::Reactor, 'a') 93 | .given(Testing::Started, name: 'Joe') 94 | .when(Testing::Start, name: 'Joe') 95 | .then([]) 96 | 97 | # If supports any .handle() interface, including u classes 98 | with_reactor(Testing::Order, 'a') 99 | .when(Testing::Start, name: 'Joe') 100 | .then(Testing::Started.build('a', name: 'Joe')) 101 | end 102 | end 103 | 104 | context 'with Actor instance' do 105 | it 'works' do 106 | with_reactor(Testing::Order.new(id: 'a')) 107 | .when(Testing::Start, name: 'Joe') 108 | .then(Testing::Started.build('a', name: 'Joe')) 109 | 110 | with_reactor(Testing::Order.new(id: 'a')) 111 | .when(Testing::Start, name: 'Joe') 112 | .then(Testing::Started, name: 'Joe') 113 | 114 | with_reactor(Testing::Order.new(id: 'a')) 115 | .when(Testing::Start, name: 'Joe') 116 | .then([Testing::Started.build('a', name: 'Joe')]) 117 | 118 | with_reactor(Testing::Order.new(id: 'a')) 119 | .given(Testing::Started, name: 'Joe') 120 | .when(Testing::Start, name: 'Joe') 121 | .then([]) 122 | end 123 | end 124 | 125 | specify 'it raises when adding events after assertion' do 126 | expect { 127 | with_reactor(Testing::Reactor, 'a') 128 | .given(Testing::Started, name: 'Joe') 129 | .when(Testing::Start, name: 'Joe') 130 | .then([]) 131 | .given(Testing::Started, name: 'Joe') # <= can't add more state after .then() assertion 132 | }.to raise_error(Sourced::Testing::RSpec::FinishedTestCase) 133 | end 134 | 135 | context 'with block given to #then' do 136 | it 'evaluates block' do 137 | received = [] 138 | 139 | klass = Class.new do 140 | extend Sourced::Consumer 141 | def self.handled_messages = [Testing::Start] 142 | end 143 | klass.define_singleton_method(:handle) do |message, history:| 144 | received << message 145 | [] 146 | end 147 | 148 | with_reactor(klass, 'abc') 149 | .when(Testing::Start, name: 'Joe') 150 | .then do |actions| 151 | expect(actions).to eq([]) 152 | expect(received).to match_sourced_messages(Testing::Start.build('abc', name: 'Joe')) 153 | end 154 | end 155 | end 156 | 157 | describe '.then!' do 158 | it 'evaluates sync blocks' do 159 | received = [] 160 | 161 | klass = Class.new do 162 | extend Sourced::Consumer 163 | def self.handled_messages = [Testing::Start] 164 | end 165 | klass.define_singleton_method(:handle) do |message, history:| 166 | sync = proc do 167 | received << 10 168 | end 169 | started = message.follow(Testing::Started, message.payload) 170 | [ 171 | Sourced::Actions::Sync.new(sync), 172 | Sourced::Actions::AppendNext.new([started]) 173 | ] 174 | end 175 | 176 | with_reactor(klass, 'abc') 177 | .when(Testing::Start, name: 'Joe') 178 | .then! do |actions| 179 | expect(actions.first).to be_a(Sourced::Actions::Sync) 180 | expect(received).to eq([10]) 181 | end 182 | .then(Testing::Started.build('abc', name: 'Joe')) 183 | end 184 | end 185 | end 186 | 187 | describe 'with_reactors' do 188 | it 'tests collaboration of reactors' do 189 | order_stream = 'actor-1' 190 | payment_stream = 'actor-1-payment' 191 | telemetry_stream = Testing::Telemetry::STREAM_ID 192 | 193 | # With these reactors 194 | with_reactors(Testing::Order, Testing::Payment, Testing::Telemetry) 195 | # GIVEN that these events exist in history 196 | .given(Testing::Started.build(order_stream, name: 'foo')) 197 | # WHEN I dispatch this new command 198 | .when(Testing::Order::StartPayment.build(order_stream)) 199 | # Then I expect 200 | .then do |_, new_messages| 201 | # The different reactors collaborated and 202 | # left this message trail behind 203 | # Backend#messages is only available in the TestBackend 204 | expect(new_messages).to match_sourced_messages([ 205 | Testing::Started.build(order_stream, name: 'foo'), 206 | Testing::Order::StartPayment.build(order_stream), 207 | Testing::Order::PaymentStarted.build(order_stream), 208 | Testing::Telemetry::Logged.build(telemetry_stream, source_stream: order_stream, message_type: 'testing.order.payment_started'), 209 | Testing::Payment::Process.build(payment_stream), 210 | Testing::Payment::Processed.build(payment_stream), 211 | Testing::Telemetry::Logged.build(telemetry_stream, source_stream: payment_stream, message_type: 'testing.payment.processed'), 212 | ]) 213 | end 214 | end 215 | end 216 | end 217 | -------------------------------------------------------------------------------- /spec/projector_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module ProjectorTest 6 | STORE = {} 7 | 8 | State = Struct.new(:id, :total) 9 | 10 | Added = Sourced::Event.define('prtest.added') do 11 | attribute :amount, Integer 12 | end 13 | 14 | Probed = Sourced::Event.define('prtest.probed') 15 | 16 | NextCommand = Sourced::Command.define('prtest.next_command') do 17 | attribute :amount, Integer 18 | end 19 | 20 | NextCommand2 = Sourced::Command.define('prtest.next_command2') do 21 | attribute :amount, Integer 22 | end 23 | 24 | class StateStored < Sourced::Projector::StateStored 25 | state do |id| 26 | STORE[id] || State.new(id, 0) 27 | end 28 | 29 | event Added do |state, event| 30 | state.total += event.payload.amount 31 | end 32 | 33 | sync do |state:, events:, replaying:| 34 | STORE[state.id] = state 35 | end 36 | end 37 | 38 | class EventSourced < Sourced::Projector::EventSourced 39 | state do |id| 40 | State.new(id, 0) 41 | end 42 | 43 | event Added do |state, event| 44 | state.total += event.payload.amount 45 | end 46 | 47 | sync do |state:, events:, replaying:| 48 | STORE[state.id] = [state, events.last.type] 49 | end 50 | end 51 | 52 | class StateStoredWithReactions < Sourced::Projector::StateStored 53 | state do |id| 54 | STORE[id] || State.new(id, 0) 55 | end 56 | 57 | event Added do |state, event| 58 | state.total += event.payload.amount 59 | end 60 | 61 | event Probed # register so that it's handled by .reaction 62 | 63 | # React to a specific event 64 | reaction Added do |state, event| 65 | if state.total > 20 66 | dispatch(NextCommand, amount: state.total).to(event) 67 | end 68 | end 69 | 70 | # React to any event 71 | reaction do |state, event| 72 | if state.total > 10 73 | dispatch(NextCommand2, amount: state.total) 74 | end 75 | end 76 | 77 | sync do |state:, events:, replaying:| 78 | STORE[state.id] = state 79 | end 80 | end 81 | end 82 | 83 | RSpec.describe Sourced::Projector do 84 | before do 85 | ProjectorTest::STORE.clear 86 | end 87 | 88 | describe Sourced::Projector::StateStored do 89 | it 'has consumer info' do 90 | expect(ProjectorTest::StateStored.consumer_info.group_id).to eq('ProjectorTest::StateStored') 91 | end 92 | 93 | specify 'with new state' do 94 | e1 = ProjectorTest::Added.parse(stream_id: '111', payload: { amount: 10 }) 95 | 96 | actions = ProjectorTest::StateStored.handle(e1, replaying: false) 97 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync]) 98 | # Run actions. Normally the backend runs these 99 | run_sync_blocks(actions) 100 | 101 | expect(ProjectorTest::STORE['111'].total).to eq(10) 102 | end 103 | 104 | specify 'with existing state' do 105 | ProjectorTest::STORE['111'] = ProjectorTest::State.new('111', 10) 106 | 107 | e1 = ProjectorTest::Added.parse(stream_id: '111', payload: { amount: 10 }) 108 | 109 | actions = ProjectorTest::StateStored.handle(e1, replaying: false) 110 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync]) 111 | # Run actions. Normally the backend runs these 112 | run_sync_blocks(actions) 113 | 114 | expect(ProjectorTest::STORE['111'].total).to eq(20) 115 | end 116 | 117 | it 'increments @seq' do 118 | e1 = ProjectorTest::Added.parse(stream_id: '222', seq: 1, payload: { amount: 12 }) 119 | e2 = ProjectorTest::Probed.parse(stream_id: '222', seq: 2) 120 | projector = ProjectorTest::StateStored.new(id: '222') 121 | projector.evolve([e1, e2]) 122 | expect(projector.seq).to eq(2) 123 | end 124 | end 125 | 126 | describe 'Sourced::Projector::StateStored with reactions' do 127 | it 'reacts to events based on projected state, and returns commands' do 128 | e1 = ProjectorTest::Added.parse(stream_id: '222', payload: { amount: 10 }) 129 | e2 = ProjectorTest::Added.parse(stream_id: '222', payload: { amount: 5 }) 130 | e3 = ProjectorTest::Added.parse(stream_id: '222', payload: { amount: 6 }) 131 | 132 | actions = ProjectorTest::StateStoredWithReactions.handle(e1, replaying: false) 133 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync]) 134 | run_sync_blocks(actions) 135 | 136 | actions = ProjectorTest::StateStoredWithReactions.handle(e1, replaying: false) 137 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync]) 138 | run_sync_blocks(actions) 139 | 140 | actions = ProjectorTest::StateStoredWithReactions.handle(e2, replaying: false) 141 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync, Sourced::Actions::AppendNext]) 142 | run_sync_blocks(actions) 143 | 144 | actions = ProjectorTest::StateStoredWithReactions.handle(e3, replaying: false) 145 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync, Sourced::Actions::AppendNext]) 146 | run_sync_blocks(actions) 147 | expect(ProjectorTest::STORE['222'].total).to eq(31) 148 | expect(actions.last.messages.map(&:class)).to eq([ProjectorTest::NextCommand]) 149 | expect(actions.last.messages.map(&:stream_id)).to eq(['222']) 150 | expect(actions.last.messages.first.payload.amount).to eq(31) 151 | end 152 | 153 | it 'reacts to wildcard events, if it evolves from them' do 154 | e1 = ProjectorTest::Added.parse(stream_id: '222', payload: { amount: 12 }) 155 | e2 = ProjectorTest::Probed.parse(stream_id: '222') 156 | 157 | actions = ProjectorTest::StateStoredWithReactions.handle(e1, replaying: false) 158 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync]) 159 | run_sync_blocks(actions) 160 | actions = ProjectorTest::StateStoredWithReactions.handle(e2, replaying: false) 161 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync, Sourced::Actions::AppendNext]) 162 | expect(actions.last.messages.map(&:class)).to eq([ProjectorTest::NextCommand2]) 163 | end 164 | 165 | it 'does not react if replaying' do 166 | e1 = ProjectorTest::Added.parse(stream_id: '222', payload: { amount: 12 }) 167 | e2 = ProjectorTest::Probed.parse(stream_id: '222') 168 | 169 | actions = ProjectorTest::StateStoredWithReactions.handle(e1, replaying: false) 170 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync]) 171 | run_sync_blocks(actions) 172 | actions = ProjectorTest::StateStoredWithReactions.handle(e2, replaying: true) 173 | expect(actions.map(&:class)).to eq([Sourced::Actions::Sync]) 174 | end 175 | 176 | it 'rejects reactions to events not handled by .event handlers' do 177 | expect { 178 | Class.new(Sourced::Projector::StateStored) do 179 | reaction ProjectorTest::Added do |_state, _event| 180 | end 181 | end 182 | }.to raise_error(ArgumentError) 183 | end 184 | end 185 | 186 | describe Sourced::Projector::EventSourced do 187 | it 'has consumer info' do 188 | expect(ProjectorTest::EventSourced.consumer_info.group_id).to eq('ProjectorTest::EventSourced') 189 | end 190 | 191 | specify 'it builds state from history, returns sync action to persist it' do 192 | e1 = ProjectorTest::Added.parse(stream_id: '111', payload: { amount: 10 }) 193 | e2 = ProjectorTest::Added.parse(stream_id: '111', payload: { amount: 5 }) 194 | 195 | # In Sourced's arch, the new message is included in the history 196 | actions = ProjectorTest::EventSourced.handle(e2, replaying: false, history: [e1, e2]) 197 | run_sync_blocks(actions) 198 | 199 | obj, last_event_type = ProjectorTest::STORE['111'] 200 | expect(obj.total).to eq(15) 201 | expect(last_event_type).to eq('prtest.added') 202 | end 203 | end 204 | 205 | private def run_sync_blocks(actions) 206 | actions.filter{ |a| a.is_a?(Sourced::Actions::Sync) }.each(&:call) 207 | end 208 | end 209 | -------------------------------------------------------------------------------- /lib/sourced/message.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'sourced/types' 4 | 5 | # A superclass and registry to define event types 6 | # for example for an event-driven or event-sourced system. 7 | # All events have an "envelope" set of attributes, 8 | # including unique ID, stream_id, type, timestamp, causation ID, 9 | # event subclasses have a type string (ex. 'users.name.updated') and an optional payload 10 | # This class provides a `.define` method to create new event types with a type and optional payload struct, 11 | # a `.from` method to instantiate the correct subclass from a hash, ex. when deserializing from JSON or a web request. 12 | # and a `#follow` method to produce new events based on a previous event's envelope, where the #causation_id and #correlation_id 13 | # are set to the parent event 14 | # @example 15 | # 16 | # # Define event struct with type and payload 17 | # UserCreated = Message.define('users.created') do 18 | # attribute :name, Types::String 19 | # attribute :email, Types::Email 20 | # end 21 | # 22 | # # Instantiate a full event with .new 23 | # user_created = UserCreated.new(stream_id: 'user-1', payload: { name: 'Joe', email: '...' }) 24 | # 25 | # # Use the `.from(Hash) => Message` factory to lookup event class by `type` and produce the right instance 26 | # user_created = Message.from(type: 'users.created', stream_id: 'user-1', payload: { name: 'Joe', email: '...' }) 27 | # 28 | # # Use #follow(payload Hash) => Message to produce events following a command or parent event 29 | # create_user = CreateUser.new(...) 30 | # user_created = create_user.follow(UserCreated, name: 'Joe', email: '...') 31 | # user_created.causation_id == create_user.id 32 | # user_created.correlation_id == create_user.correlation_id 33 | # user_created.stream_id == create_user.stream_id 34 | # 35 | # ## Message registries 36 | # Each Message class has its own registry of sub-classes. 37 | # You can use the top-level Sourced::Message.from(hash) to instantiate all message types. 38 | # You can also scope the lookup by sub-class. 39 | # 40 | # @example 41 | # 42 | # class PublicCommand < Sourced::Message; end 43 | # 44 | # DoSomething = PublicCommand.define('commands.do_something') 45 | # 46 | # # Use .from scoped to PublicCommand subclass 47 | # # to ensure that only PublicCommand subclasses are accessible. 48 | # cmd = PublicCommand.from(type: 'commands.do_something', payload: { ... }) 49 | # 50 | # ## JSON Schemas 51 | # Plumb data structs support `.to_json_schema`, so you can document all events in the registry with something like 52 | # 53 | # Message.subclasses.map(&:to_json_schema) 54 | # 55 | module Sourced 56 | UnknownMessageError = Class.new(ArgumentError) 57 | PastMessageDateError = Class.new(ArgumentError) 58 | 59 | class Message < Types::Data 60 | attribute :id, Types::AutoUUID 61 | attribute :stream_id, Types::String.present 62 | attribute :type, Types::String.present 63 | attribute :created_at, Types::Forms::Time.default { Time.now } #Types::JSON::AutoUTCTime 64 | attribute? :causation_id, Types::UUID::V4 65 | attribute? :correlation_id, Types::UUID::V4 66 | attribute :seq, Types::Integer.default(1) 67 | attribute :metadata, Types::Hash.default(Plumb::BLANK_HASH) 68 | attribute :payload, Types::Static[nil] 69 | 70 | class Registry 71 | def initialize(message_class) 72 | @message_class = message_class 73 | @lookup = {} 74 | end 75 | 76 | def keys = @lookup.keys 77 | def subclasses = message_class.subclasses 78 | 79 | def []=(key, klass) 80 | @lookup[key] = klass 81 | end 82 | 83 | def [](key) 84 | klass = lookup[key] 85 | return klass if klass 86 | 87 | subclasses.each do |c| 88 | klass = c.registry[key] 89 | return klass if klass 90 | end 91 | nil 92 | end 93 | 94 | def inspect 95 | %(<#{self.class}:#{object_id} #{lookup.size} keys, #{subclasses.size} child registries>) 96 | end 97 | 98 | private 99 | 100 | attr_reader :lookup, :message_class 101 | end 102 | 103 | def self.registry 104 | @registry ||= Registry.new(self) 105 | end 106 | 107 | class Payload < Types::Data 108 | def [](key) = attributes[key] 109 | def fetch(...) = to_h.fetch(...) 110 | end 111 | 112 | def self.define(type_str, payload_schema: nil, &payload_block) 113 | type_str.freeze unless type_str.frozen? 114 | if registry[type_str] 115 | Sourced.config.logger.warn("Message '#{type_str}' already defined") 116 | end 117 | 118 | registry[type_str] = Class.new(self) do 119 | def self.node_name = :data 120 | define_singleton_method(:type) { type_str } 121 | 122 | attribute :type, Types::Static[type_str] 123 | if payload_schema 124 | attribute :payload, Payload[payload_schema] 125 | elsif block_given? 126 | attribute :payload, Payload, &payload_block if block_given? 127 | end 128 | end 129 | end 130 | 131 | def self.from(attrs) 132 | klass = registry[attrs[:type]] 133 | raise UnknownMessageError, "Unknown event type: #{attrs[:type]}" unless klass 134 | 135 | klass.new(attrs) 136 | end 137 | 138 | def self.build(stream_id, payload = nil) 139 | attrs = {stream_id:} 140 | attrs[:payload] = payload if payload 141 | parse(attrs) 142 | end 143 | 144 | def initialize(attrs = {}) 145 | unless attrs[:payload] 146 | attrs = attrs.merge(payload: {}) 147 | end 148 | super(attrs) 149 | end 150 | 151 | def with_metadata(meta = {}) 152 | return self if meta.empty? 153 | 154 | attrs = metadata.merge(meta) 155 | with(metadata: attrs) 156 | end 157 | 158 | def follow(event_class, payload_attrs = nil) 159 | follow_with_attributes( 160 | event_class, 161 | payload: payload_attrs 162 | ) 163 | end 164 | 165 | def follow_with_seq(event_class, seq, payload_attrs = nil) 166 | follow_with_attributes( 167 | event_class, 168 | attrs: { seq: }, 169 | payload: payload_attrs 170 | ) 171 | end 172 | 173 | def follow_with_stream_id(event_class, stream_id, payload_attrs = nil) 174 | follow_with_attributes( 175 | event_class, 176 | attrs: { stream_id: }, 177 | payload: payload_attrs 178 | ) 179 | end 180 | 181 | def follow_with_attributes(event_class, attrs: {}, payload: nil, metadata: nil) 182 | meta = self.metadata 183 | meta = meta.merge(metadata) if metadata 184 | attrs = { stream_id:, causation_id: id, correlation_id:, metadata: meta }.merge(attrs) 185 | attrs[:payload] = payload.to_h if payload 186 | event_class.parse(attrs) 187 | end 188 | 189 | def correlate(message) 190 | attrs = { 191 | causation_id: id, 192 | correlation_id:, 193 | metadata: metadata.merge(message.metadata || Plumb::BLANK_HASH) 194 | } 195 | message.with(attrs) 196 | end 197 | 198 | # A copy of a message with a new stream_id 199 | # @param stream_id [String, #stream_id] 200 | # @return [Message] 201 | def to(stream_id) 202 | stream_id = stream_id.stream_id if stream_id.respond_to?(:stream_id) 203 | with(stream_id:) 204 | end 205 | 206 | def at(datetime) 207 | if datetime < created_at 208 | raise PastMessageDateError, "Message #{type} can't be delayed to a date in the past" 209 | end 210 | with(created_at: datetime) 211 | end 212 | 213 | def to_json(*) 214 | to_h.to_json(*) 215 | end 216 | 217 | private 218 | 219 | def prepare_attributes(attrs) 220 | attrs[:correlation_id] = attrs[:id] unless attrs[:correlation_id] 221 | attrs[:causation_id] = attrs[:id] unless attrs[:causation_id] 222 | attrs 223 | end 224 | end 225 | 226 | class Command < Message; end 227 | class Event < Message; end 228 | end 229 | -------------------------------------------------------------------------------- /lib/sourced/backends/pg_pub_sub.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'sequel' 4 | require 'thread' 5 | require 'json' 6 | 7 | module Sourced 8 | module Backends 9 | # a PubSub implementation using Postgres' LISTEN/NOTIFY 10 | # 11 | # This class provides a publish-subscribe mechanism using Postgres LISTEN/NOTIFY, 12 | # with connection pooling - each channel name reuses a single database 13 | # listener per process, allowing multiple subscribers to share the same listener thread / fiber. 14 | # Relies on Sourced's configured Executor to use Threads or Fibers for concurrency 15 | # 16 | # @example Publishing a message 17 | # backend = Sourced.config.backend 18 | # event = MyEvent.new(stream_id: '111', payload: 'hello') 19 | # backend.pub_sub.publish('my_channel', event) 20 | # 21 | # @example Subscribing to messages 22 | # backend = Sourced.config.backend 23 | # channel = backend.pub_sub.subscribe('my_channel') 24 | # channel.start do |event, ch| 25 | # case event 26 | # when MyEvent 27 | # puts "Received: #{event}" 28 | # end 29 | # end 30 | # 31 | # @example Multiple subscribers to the same channel 32 | # # Both threads listen to the same channel but receive all messages 33 | # Thread.new do 34 | # ch1 = backend.pub_sub.subscribe('events') 35 | # ch1.start { |event| puts "Subscriber 1: #{event}" } 36 | # end 37 | # 38 | # Thread.new do 39 | # ch2 = backend.pub_sub.subscribe('events') 40 | # ch2.start { |event| puts "Subscriber 2: #{event}" } 41 | # end 42 | # 43 | class PGPubSub 44 | # Listener holds a single DB connection per channel name/process 45 | # Channel instances with the same name instantiated in different threads/fibers but the same process 46 | # share a Listener instance 47 | # The Listener receives messages and dispatches them to all channels. 48 | class Listener 49 | # @param db [Sequel::Database] the database connection for listening 50 | # @param channel_name [String] the name of the Postgres channel to listen on 51 | # @param timeout [Integer] the timeout in seconds for listen operations (default: 2) 52 | # @param logger [Logger] 53 | def initialize(db:, channel_name:, timeout: 2, logger:) 54 | @db = db 55 | @channel_name = channel_name 56 | @logger = logger 57 | @channels = {} 58 | @timeout = timeout 59 | @queue = Sourced.config.executor.new_queue 60 | @running = true 61 | @info = "[#{[self.class.name, @channel_name, Process.pid, object_id].join(' ')}]" 62 | end 63 | 64 | # Subscribe a channel to this listener 65 | # 66 | # @param channel [Channel] the channel to subscribe 67 | # @return [self] 68 | def subscribe(channel) 69 | start 70 | @queue << [:subscribe, channel] 71 | self 72 | end 73 | 74 | # Unsubscribe a channel from this listener 75 | # 76 | # @param channel [Channel] the channel to unsubscribe 77 | # @return [self] 78 | def unsubscribe(channel) 79 | @queue << [:unsubscribe, channel] 80 | self 81 | end 82 | 83 | # Start the listener threads for database listening and message dispatching 84 | # 85 | # @return [self] 86 | def start 87 | return if @control 88 | 89 | @control = Sourced.config.executor.start(wait: false) do |t| 90 | t.spawn do 91 | while (msg = @queue.pop) 92 | case msg 93 | in :stop 94 | @running = false 95 | # Stop all channels? 96 | @logger.info "#{@info} stopping" 97 | @channels.values.each(&:stop) 98 | in [:unsubscribe, channel] 99 | if @channels.delete(channel.object_id) 100 | @logger.info { "#{@info} unsubscribe channel #{channel.object_id}" } 101 | if @channels.empty? 102 | @logger.info { "#{@info} all channels unsubscribed." } 103 | end 104 | end 105 | in [:subscribe, channel] 106 | @logger.info { "#{@info} subscribe channel #{channel.object_id}" } 107 | @channels[channel.object_id] ||= channel 108 | in [:dispatch, message] 109 | @channels.values.each { |ch| ch << message } 110 | end 111 | end 112 | 113 | @logger.info "#{@info} Stopped" 114 | end 115 | 116 | t.spawn do 117 | @db.listen(@channel_name, timeout: @timeout, loop: true) do |_channel, _pid, payload| 118 | break unless @running 119 | 120 | message = parse(payload) 121 | @queue << [:dispatch, message] 122 | end 123 | end 124 | end 125 | 126 | @logger.info { "#{@info} Started" } 127 | end 128 | 129 | # Stop the listener threads and unsubscribe all channels 130 | # 131 | # @return [self] 132 | def stop 133 | @queue << :stop 134 | @control&.wait 135 | @control = nil 136 | @logger.info "#{@info} Stopped" 137 | self 138 | end 139 | 140 | private def parse(payload) 141 | data = JSON.parse(payload, symbolize_names: true) 142 | Sourced::Message.from(data) 143 | end 144 | end 145 | 146 | # Initialize a new PubSub instance with database and logger 147 | # 148 | # @param db [Sequel::Database] the database connection for publishing and listening 149 | # @param logger [Logger] the logger instance for recording events 150 | def initialize(db:, logger:) 151 | @db = db 152 | @logger = logger 153 | @mutex = Mutex.new 154 | @listeners ||= {} 155 | end 156 | 157 | # Subscribe to messages on a channel 158 | # 159 | # Creates or reuses a listener for the given channel name and returns a new channel object 160 | # that can be used to receive messages. Multiple subscribers to the same channel share a 161 | # single database listener per process. 162 | # 163 | # @param channel_name [String] the name of the channel to subscribe to 164 | # @return [Channel] a new channel object for receiving messages 165 | def subscribe(channel_name) 166 | @mutex.synchronize do 167 | listener = @listeners[channel_name] ||= Listener.new(db: @db, channel_name:, logger: @logger) 168 | ch = Channel.new(name: channel_name, listener:) 169 | listener.subscribe(ch) 170 | ch 171 | end 172 | end 173 | 174 | # Publish a message to a channel 175 | # 176 | # Sends an event to all subscribers on the given channel via Postgres NOTIFY. 177 | # 178 | # @param channel_name [String] the name of the channel to publish to 179 | # @param event [Sourced::Message] the message to publish 180 | # @return [self] 181 | def publish(channel_name, event) 182 | event_data = JSON.dump(event.to_h) 183 | @db.run(Sequel.lit('SELECT pg_notify(?, ?)', channel_name, event_data)) 184 | self 185 | end 186 | end 187 | 188 | class Channel 189 | NOTIFY_CHANNEL = 'sourced-scheduler-ch' 190 | 191 | attr_reader :name 192 | 193 | # Initialize a new channel for receiving messages 194 | # 195 | # @param name [String] the name of the channel (default: NOTIFY_CHANNEL) 196 | # @param listener [Listener] the listener managing this channel 197 | def initialize(name: NOTIFY_CHANNEL, listener:) 198 | @name = name 199 | @running = false 200 | @listener = listener 201 | @queue = Sourced.config.executor.new_queue 202 | end 203 | 204 | # Add a message to this channel's queue 205 | # 206 | # @param message [Sourced::Message] the message to queue 207 | # @return [self] 208 | def <<(message) 209 | @queue << message 210 | self 211 | end 212 | 213 | # Start listening to incoming events on this channel 214 | # 215 | # Blocks until the channel is stopped. Messages are passed to either the provided 216 | # handler or the given block. 217 | # 218 | # @param handler [#call, nil] a callable object to use as an event handler 219 | # @yieldparam message [Sourced::Message] the incoming message 220 | # @yieldparam channel [Channel] this channel instance 221 | # @return [self] 222 | def start(handler: nil, &block) 223 | return self if @running 224 | 225 | @running = true 226 | 227 | handler ||= block 228 | 229 | while (msg = @queue.pop) 230 | handler.call(msg, self) 231 | end 232 | 233 | @running = false 234 | 235 | self 236 | end 237 | 238 | # Stop listening on this channel 239 | # 240 | # Unsubscribes from the listener and marks the channel to stop processing messages. 241 | # The stop takes effect on the next iteration of the message loop. 242 | # 243 | # @return [self] 244 | def stop 245 | return self unless @running 246 | 247 | @listener.unsubscribe self 248 | @queue << nil 249 | self 250 | end 251 | end 252 | end 253 | end 254 | -------------------------------------------------------------------------------- /spec/actor_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | module TestDomain 6 | TodoList = Struct.new(:archive_status, :seq, :id, :status, :items) 7 | 8 | AddItem = Sourced::Message.define('actor.todos.add') do 9 | attribute :name, String 10 | end 11 | 12 | Notify = Sourced::Message.define('actor.todos.notify') do 13 | attribute :item_count, Integer 14 | end 15 | 16 | ListStarted = Sourced::Message.define('actor.todos.started') 17 | ArchiveRequested = Sourced::Message.define('actor.todos.archive_requested') 18 | ConfirmArchive = Sourced::Message.define('actor.todos.archive_confirm') 19 | ArchiveConfirmed = Sourced::Message.define('actor.todos.archive_confirmed') 20 | 21 | class Tracer 22 | attr_reader :calls 23 | 24 | def initialize 25 | @calls = [] 26 | end 27 | end 28 | 29 | SyncTracer = Tracer.new 30 | 31 | class TodoListActor < Sourced::Actor 32 | state do |id| 33 | TodoList.new(nil, 0, id, :new, []) 34 | end 35 | 36 | command AddItem do |list, cmd| 37 | event ListStarted if list.status == :new 38 | event :item_added, name: cmd.payload.name 39 | end 40 | 41 | command :add_one, name: String do |_list, cmd| 42 | event :item_added, name: cmd.payload.name 43 | end 44 | 45 | event ListStarted do |list, _event| 46 | list.status = :active 47 | end 48 | 49 | # Test that this block is returned after #decide 50 | # wrapperd as a Sourced::Actions::Sync 51 | # to be run within the same transaction as the append action 52 | sync do |command:, events:, state:| 53 | SyncTracer.calls << [command, events, state] 54 | end 55 | 56 | event :item_added, name: String do |list, event| 57 | list.items << event.payload 58 | end 59 | 60 | reaction ListStarted do |list, event| 61 | dispatch(Notify, item_count: list.items.size).to('different-stream') 62 | dispatch(Notify, item_count: list.items.size).at(Time.now + 30) 63 | end 64 | 65 | reaction :item_added do |list, event| 66 | dispatch(Notify, item_count: list.items.size) 67 | end 68 | end 69 | end 70 | 71 | RSpec.describe Sourced::Actor do 72 | describe '.handled_messages' do 73 | it 'returns commands and events to react to' do 74 | expect(TestDomain::TodoListActor.handled_messages).to match_array([ 75 | TestDomain::AddItem, 76 | TestDomain::TodoListActor::AddOne, 77 | TestDomain::ListStarted, 78 | TestDomain::TodoListActor::ItemAdded, 79 | ]) 80 | end 81 | end 82 | 83 | describe '.command' do 84 | it 'raises if the message is also reacted to' do 85 | klass = Class.new(described_class) 86 | klass.reaction TestDomain::ListStarted do |_| 87 | end 88 | expect { 89 | klass.command TestDomain::ListStarted do |_, _| 90 | end 91 | }.to raise_error(Sourced::Actor::DualMessageRegistrationError) 92 | end 93 | end 94 | 95 | describe '.reaction' do 96 | it 'raises if the message is also registered as a command' do 97 | klass = Class.new(described_class) 98 | klass.command TestDomain::ListStarted do |_, _| 99 | end 100 | expect { 101 | klass.reaction TestDomain::ListStarted do |_| 102 | end 103 | }.to raise_error(Sourced::Actor::DualMessageRegistrationError) 104 | end 105 | 106 | it 'supports multiple events as symbols' do 107 | klass = Class.new(described_class) do 108 | event(:e1) 109 | event(:e2) 110 | reaction :e1, :e2 do |_, _| 111 | end 112 | end 113 | 114 | expect(klass.handled_messages).to match_array([ 115 | klass::E1, 116 | klass::E2, 117 | ]) 118 | end 119 | end 120 | 121 | describe '#evolve' do 122 | it 'evolves internal state' do 123 | actor = TestDomain::TodoListActor.new 124 | e1 = TestDomain::ListStarted.parse(stream_id: actor.id, seq: 1) 125 | e2 = TestDomain::TodoListActor::ItemAdded.parse( 126 | stream_id: actor.id, 127 | seq: 2, 128 | payload: { name: 'Shoes' } 129 | ) 130 | state = actor.evolve([e1, e2]) 131 | expect(state).to eq(actor.state) 132 | expect(actor.state.status).to eq(:active) 133 | expect(actor.seq).to eq(2) 134 | end 135 | end 136 | 137 | describe '#decide' do 138 | it 'returns events with the right sequence, updates state' do 139 | actor = TestDomain::TodoListActor.new 140 | cmd = TestDomain::AddItem.parse( 141 | stream_id: actor.id, 142 | payload: { name: 'Shoes' } 143 | ) 144 | events = actor.decide(cmd) 145 | expect(events.map(&:class)).to eq([TestDomain::ListStarted, TestDomain::TodoListActor::ItemAdded]) 146 | expect(events.map(&:seq)).to eq([1, 2]) 147 | expect(actor.seq).to eq(2) 148 | expect(actor.state.items.size).to eq(1) 149 | cmd2 = cmd.with(seq: 1) 150 | events = actor.decide(cmd2) 151 | expect(actor.seq).to eq(3) 152 | expect(events.map(&:class)).to eq([TestDomain::TodoListActor::ItemAdded]) 153 | expect(events.map(&:seq)).to eq([3]) 154 | expect(actor.state.items.size).to eq(2) 155 | end 156 | end 157 | 158 | describe '#react' do 159 | it 'reacts to events and return commands' do 160 | now = Time.now 161 | Timecop.freeze(now) do 162 | actor = TestDomain::TodoListActor.new 163 | event = TestDomain::ListStarted.parse(stream_id: actor.id) 164 | commands = actor.react(event) 165 | expect(commands.map(&:class)).to eq([TestDomain::Notify, TestDomain::Notify]) 166 | expect(commands.first.metadata[:producer]).to eq('TestDomain::TodoListActor') 167 | expect(commands.map(&:created_at)).to eq([now, now + 30]) 168 | expect(commands.map(&:stream_id)).to eq(['different-stream', actor.id]) 169 | end 170 | end 171 | end 172 | 173 | describe '.handle' do 174 | context 'with a command to decide on' do 175 | let(:cmd) do 176 | TestDomain::AddItem.parse( 177 | stream_id: Sourced.new_stream_id, 178 | seq: 1, 179 | metadata: { foo: 'bar' }, 180 | payload: { name: 'Shoes' } 181 | ) 182 | end 183 | 184 | it 'returns an array with Sourced::Actions::AppendAfter Sourced::Actions::Sync actions' do 185 | result = TestDomain::TodoListActor.handle(cmd, history: [cmd]) 186 | expect(result.map(&:class)).to eq([Sourced::Actions::AppendAfter, Sourced::Actions::Sync]) 187 | end 188 | 189 | specify 'the AppendAfter action contains messages to append' do 190 | result = TestDomain::TodoListActor.handle(cmd, history: [cmd]) 191 | append_action = result[0] 192 | expect(append_action.stream_id).to eq(cmd.stream_id) 193 | # two new events, seq 2, and 3 194 | expect(append_action.messages.map(&:seq)).to eq([2, 3]) 195 | append_action.messages[0].tap do |msg| 196 | expect(msg).to be_a(TestDomain::ListStarted) 197 | expect(msg.stream_id).to eq(cmd.stream_id) 198 | expect(msg.metadata[:foo]).to eq('bar') 199 | end 200 | append_action.messages[1].tap do |msg| 201 | expect(msg).to be_a(TestDomain::TodoListActor::ItemAdded) 202 | expect(msg.stream_id).to eq(cmd.stream_id) 203 | expect(msg.metadata[:foo]).to eq('bar') 204 | end 205 | end 206 | 207 | specify 'the Sync action contains a side-effect to run' do 208 | result = TestDomain::TodoListActor.handle(cmd, history: [cmd]) 209 | append_action = result[0] 210 | sync_action = result[1] 211 | 212 | expect(TestDomain::SyncTracer.calls).to eq([]) 213 | sync_action.call 214 | expect(TestDomain::SyncTracer.calls.size).to eq(1) 215 | TestDomain::SyncTracer.calls.first.tap do |(command, events, state)| 216 | expect(command).to eq(cmd) 217 | expect(events).to eq(append_action.messages) 218 | expect(state).to be_a(TestDomain::TodoList) 219 | expect(state.items.size).to eq(1) 220 | end 221 | end 222 | end 223 | 224 | context 'with an event to react to' do 225 | let(:stream_id) { Sourced.new_stream_id } 226 | 227 | let(:history) do 228 | [ 229 | TestDomain::AddItem.parse(stream_id:, seq: 1, payload: { name: 'test' }), 230 | TestDomain::ListStarted.parse(stream_id:, seq: 2), 231 | TestDomain::TodoListActor::ItemAdded.parse(stream_id:, seq: 3, payload: { name: 'test' }) 232 | ] 233 | end 234 | 235 | it 'returns new commands to append' do 236 | result = TestDomain::TodoListActor.handle(history.last, history:) 237 | expect(result).to be_a(Array) 238 | expect(result.first).to be_a(Sourced::Actions::AppendNext) 239 | expect(result.first.messages.map(&:stream_id)).to eq([stream_id]) 240 | expect(result.first.messages.map(&:class)).to eq([TestDomain::Notify]) 241 | expect(result.first.messages.first.payload.item_count).to eq(1) 242 | end 243 | 244 | it 'returns multiple commands to append or schedule' do 245 | now = Time.now 246 | Timecop.freeze(now) do 247 | result = TestDomain::TodoListActor.handle(history[1], history:) 248 | expect(result).to be_a(Array) 249 | expect(result.map(&:class)).to eq [Sourced::Actions::AppendNext, Sourced::Actions::Schedule] 250 | expect(result[0].messages.size).to eq(1) 251 | result[0].messages[0].tap do |msg| 252 | expect(msg).to be_a TestDomain::Notify 253 | expect(msg.stream_id).to eq('different-stream') 254 | expect(msg.payload.item_count).to eq(1) 255 | end 256 | expect(result[1].messages.size).to eq(1) 257 | expect(result[1].at).to eq(now + 30) 258 | result[1].messages[0].tap do |msg| 259 | expect(msg).to be_a TestDomain::Notify 260 | expect(msg.stream_id).to eq(stream_id) 261 | expect(msg.payload.item_count).to eq(1) 262 | end 263 | end 264 | end 265 | end 266 | end 267 | end 268 | -------------------------------------------------------------------------------- /lib/sourced/backends/test_backend.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'thread' 4 | 5 | module Sourced 6 | module Backends 7 | class TestBackend 8 | ACTIVE = 'active' 9 | STOPPED = 'stopped' 10 | 11 | attr_reader :pubsub 12 | 13 | def initialize 14 | clear! 15 | @mutex = Mutex.new 16 | @in_tx = false 17 | @tx_id = nil 18 | end 19 | 20 | def messages = @state.messages 21 | 22 | def inspect 23 | %(<#{self.class} messages:#{messages.size} streams:#{@state.messages_by_stream_id.size}>) 24 | end 25 | 26 | def clear! 27 | @state = State.new 28 | @pubsub = TestPubSub.new 29 | end 30 | 31 | def installed? = true 32 | 33 | def handling_reactor_exceptions(_reactor, &) 34 | yield 35 | end 36 | 37 | def reserve_next_for_reactor(reactor, worker_id: Process.pid.to_s, &) 38 | group_id = reactor.consumer_info.group_id 39 | start_from = reactor.consumer_info.start_from.call 40 | transaction do 41 | group = @state.groups[group_id] 42 | if group.active? && (group.retry_at.nil? || group.retry_at <= Time.now) 43 | group.reserve_next(reactor.handled_messages, start_from, method(:process_actions), &) 44 | end 45 | end 46 | end 47 | 48 | private def process_actions(group_id, actions, ack, event, offset) 49 | should_ack = false 50 | actions = [actions] unless actions.is_a?(Array) 51 | actions = actions.compact 52 | # Empty actions is assumed to be an ACK 53 | return ack.() if actions.empty? 54 | 55 | actions.each do |action| 56 | case action 57 | when Actions::OK 58 | should_ack = true 59 | 60 | when Actions::Ack 61 | offset.locked = false 62 | ack_on(group_id, action.message_id) 63 | 64 | when Actions::AppendNext 65 | messages = correlate(event, action.messages) 66 | messages.group_by(&:stream_id).each do |stream_id, stream_messages| 67 | append_next_to_stream(stream_id, stream_messages) 68 | end 69 | should_ack = true 70 | 71 | when Actions::AppendAfter 72 | append_to_stream(action.stream_id, correlate(event, action.messages)) 73 | should_ack = true 74 | 75 | when Actions::Schedule 76 | schedule_messages correlate(event, action.messages), at: action.at 77 | should_ack = true 78 | 79 | when Actions::Sync 80 | action.call 81 | should_ack = true 82 | 83 | when Actions::RETRY 84 | # Don't ack 85 | 86 | else 87 | raise ArgumentError, "Expected Sourced::Actions type, but got: #{action.class}" 88 | end 89 | end 90 | 91 | ack.() if should_ack 92 | end 93 | 94 | private def correlate(source_message, messages) 95 | messages.map { |e| source_message.correlate(e) } 96 | end 97 | 98 | def ack_on(group_id, event_id, &) 99 | transaction do 100 | group = @state.groups[group_id] 101 | group.ack_on(event_id, &) 102 | end 103 | end 104 | 105 | def register_consumer_group(group_id) 106 | transaction do 107 | @state.groups[group_id] 108 | end 109 | end 110 | 111 | def updating_consumer_group(group_id, &) 112 | transaction do 113 | group = @state.groups[group_id] 114 | yield group 115 | end 116 | end 117 | 118 | # @param group_id [String] 119 | def start_consumer_group(group_id) 120 | group_id = group_id.consumer_info.group_id if group_id.respond_to?(:consumer_info) 121 | transaction do 122 | group = @state.groups[group_id] 123 | group.error_context = {} 124 | group.status = ACTIVE 125 | group.retry_at = nil 126 | end 127 | end 128 | 129 | def stop_consumer_group(group_id, error = nil) 130 | group_id = group_id.consumer_info.group_id if group_id.respond_to?(:consumer_info) 131 | transaction do 132 | group = @state.groups[group_id] 133 | group.stop(error) 134 | end 135 | end 136 | 137 | def reset_consumer_group(group_id) 138 | group_id = group_id.consumer_info.group_id if group_id.respond_to?(:consumer_info) 139 | transaction do 140 | group = @state.groups[group_id] 141 | group.reset! 142 | end 143 | true 144 | end 145 | 146 | def schedule_messages(messages, at: Time.now) 147 | @state.schedule_messages(messages, at:) 148 | true 149 | end 150 | 151 | def update_schedule! 152 | count = 0 153 | transaction do 154 | @state.next_scheduled_messages do |scheduled_messages| 155 | scheduled_messages.group_by(&:stream_id).each do |stream_id, stream_messages| 156 | append_next_to_stream(stream_id, stream_messages) 157 | end 158 | count = scheduled_messages.size 159 | end 160 | count 161 | end 162 | end 163 | 164 | Stats = Data.define(:stream_count, :max_global_seq, :groups) 165 | 166 | def stats 167 | stream_count = @state.messages_by_stream_id.size 168 | max_global_seq = messages.size 169 | groups = @state.groups.values.map(&:to_h) 170 | Stats.new(stream_count, max_global_seq, groups) 171 | end 172 | 173 | # Retrieve a list of recently active streams, ordered by most recent activity. 174 | # This is the in-memory implementation that maintains stream metadata during testing. 175 | # 176 | # @param limit [Integer] Maximum number of streams to return (defaults to 10) 177 | # @return [Array] Array of Stream objects ordered by updated_at descending 178 | # @see SequelBackend#recent_streams 179 | def recent_streams(limit: 10) 180 | # Input validation (consistent with SequelBackend) 181 | return [] if limit == 0 182 | raise ArgumentError, "limit must be a positive integer" if limit < 0 183 | 184 | @state.streams.values.sort_by(&:updated_at).reverse.take(limit) 185 | end 186 | 187 | def transaction(&) 188 | if @in_tx 189 | yield 190 | else 191 | @mutex.synchronize do 192 | @in_tx = true 193 | @state_snapshot = @state.copy 194 | result = yield 195 | @in_tx = false 196 | @state_snapshot = nil 197 | result 198 | end 199 | end 200 | rescue StandardError => e 201 | @in_tx = false 202 | @state = @state_snapshot if @state_snapshot 203 | raise 204 | end 205 | 206 | # @param stream_id [String] Unique identifier for the event stream 207 | # @param messages [Sourced::Message, Array] Event(s) to append to the stream 208 | # @option max_retries [Integer] Not used in this backend, but kept for interface compatibility 209 | def append_next_to_stream(stream_id, messages, max_retries: 3) 210 | # Handle both single event and array of messages 211 | messages_array = Array(messages) 212 | return true if messages_array.empty? 213 | 214 | transaction do 215 | last_message = @state.messages_by_stream_id[stream_id].last 216 | last_seq = last_message ? last_message.seq : 0 217 | 218 | messages_with_seq = messages_array.map.with_index do |message, index| 219 | message.with(seq: last_seq + index + 1) 220 | end 221 | 222 | append_to_stream(stream_id, messages_with_seq) 223 | end 224 | end 225 | 226 | def append_to_stream(stream_id, messages) 227 | # Handle both single event and array of events 228 | messages_array = Array(messages) 229 | return false if messages_array.empty? 230 | 231 | transaction do 232 | check_unique_seq!(messages_array) 233 | 234 | messages_array.each do |message| 235 | @state.messages_by_correlation_id[message.correlation_id] << message 236 | @state.messages_by_stream_id[stream_id] << message 237 | @state.messages << message 238 | @state.stream_id_seq_index[seq_key(stream_id, message)] = true 239 | @state.upsert_stream(stream_id, message.seq) 240 | end 241 | end 242 | @state.groups.each_value(&:reindex) 243 | true 244 | end 245 | 246 | def read_correlation_batch(message_id) 247 | message = @state.messages.find { |e| e.id == message_id } 248 | return [] unless message 249 | @state.messages_by_correlation_id[message.correlation_id] 250 | end 251 | 252 | def read_stream(stream_id, after: nil, upto: nil) 253 | messages = @state.messages_by_stream_id[stream_id] 254 | messages = messages.select { |e| e.seq > after } if after 255 | messages = messages.select { |e| e.seq <= upto } if upto 256 | messages 257 | end 258 | 259 | # No-op heartbeats for test backend 260 | def worker_heartbeat(worker_ids, at: Time.now) 261 | Array(worker_ids).size 262 | end 263 | 264 | # No-op stale claim release for test backend 265 | def release_stale_claims(ttl_seconds: 120) 266 | 0 267 | end 268 | 269 | private 270 | 271 | def check_unique_seq!(messages) 272 | duplicate = messages.find do |message| 273 | @state.stream_id_seq_index[seq_key(message.stream_id, message)] 274 | end 275 | if duplicate 276 | raise Sourced::ConcurrentAppendError, "Duplicate stream_id/seq: #{duplicate.stream_id}/#{duplicate.seq}" 277 | end 278 | end 279 | 280 | def seq_key(stream_id, message) 281 | [stream_id, message.seq] 282 | end 283 | end 284 | end 285 | end 286 | 287 | require 'sourced/backends/test_backend/group' 288 | require 'sourced/backends/test_backend/state' 289 | require 'sourced/backends/test_backend/test_pub_sub' 290 | --------------------------------------------------------------------------------