├── lib ├── actioncable-enhanced-postgresql-adapter.rb ├── railtie.rb └── action_cable │ └── subscription_adapter │ └── enhanced_postgresql.rb ├── Gemfile ├── CHANGELOG.md ├── test ├── channel_prefix.rb ├── test_helper.rb ├── common.rb └── postgresql_test.rb ├── LICENSE ├── actioncable-enhanced-postgresql-adapter.gemspec └── README.md /lib/actioncable-enhanced-postgresql-adapter.rb: -------------------------------------------------------------------------------- 1 | require_relative "action_cable/subscription_adapter/enhanced_postgresql" 2 | require_relative "railtie" if defined? Rails::Railtie 3 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source "https://rubygems.org" 4 | 5 | gemspec 6 | 7 | group :development, :test do 8 | gem "activerecord" 9 | gem "debug" 10 | gem "minitest-reporters" 11 | end 12 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | 1.0.2 2 | 3 | - Fix incorrect escaping of large payloads (https://github.com/reclaim-the-stack/actioncable-enhanced-postgresql-adapter/pull/5 and https://github.com/reclaim-the-stack/actioncable-enhanced-postgresql-adapter/pull/6), thanks @chriscz and @bensheldon 4 | 5 | 1.0.1 6 | 7 | - Fix gemspec metadata 8 | 9 | 1.0.0 10 | 11 | - Support > 8000 byte payloads 12 | - Remove hard dependency on ActiveRecord 13 | -------------------------------------------------------------------------------- /lib/railtie.rb: -------------------------------------------------------------------------------- 1 | class ActionCable::SubscriptionAdapter::EnhancedPostgresql 2 | class Railtie < ::Rails::Railtie 3 | initializer "action_cable.enhanced_postgresql_adapter" do 4 | ActiveSupport.on_load(:active_record) do 5 | large_payloads_table = ActionCable::SubscriptionAdapter::EnhancedPostgresql::LARGE_PAYLOADS_TABLE 6 | ActiveRecord::SchemaDumper.ignore_tables << large_payloads_table 7 | end 8 | end 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /test/channel_prefix.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module ChannelPrefixTest 4 | def test_channel_prefix 5 | server2 = ActionCable::Server::Base.new(config: ActionCable::Server::Configuration.new) 6 | server2.config.cable = alt_cable_config.with_indifferent_access 7 | server2.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 8 | 9 | adapter_klass = server2.config.pubsub_adapter 10 | 11 | rx_adapter2 = adapter_klass.new(server2) 12 | tx_adapter2 = adapter_klass.new(server2) 13 | 14 | subscribe_as_queue("channel") do |queue| 15 | subscribe_as_queue("channel", rx_adapter2) do |queue2| 16 | @tx_adapter.broadcast("channel", "hello world") 17 | tx_adapter2.broadcast("channel", "hello world 2") 18 | 19 | assert_equal "hello world", queue.pop 20 | assert_equal "hello world 2", queue2.pop 21 | end 22 | end 23 | end 24 | 25 | def alt_cable_config 26 | cable_config.merge(channel_prefix: "foo") 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /test/test_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "debug" 4 | 5 | require "action_cable" 6 | require "active_support/testing/autorun" 7 | require "active_support/testing/method_call_assertions" 8 | 9 | require "minitest/reporters" 10 | Minitest::Reporters.use! 11 | 12 | # Set test adapter and logger 13 | ActionCable.server.config.cable = { "adapter" => "test" } 14 | ActionCable.server.config.logger = Logger.new(nil) 15 | 16 | class ActionCable::TestCase < ActiveSupport::TestCase 17 | include ActiveSupport::Testing::MethodCallAssertions 18 | 19 | def wait_for_async 20 | wait_for_executor Concurrent.global_io_executor 21 | end 22 | 23 | def run_in_eventmachine 24 | yield 25 | wait_for_async 26 | end 27 | 28 | def wait_for_executor(executor) 29 | # do not wait forever, wait 2s 30 | timeout = 2 31 | until executor.completed_task_count == executor.scheduled_task_count 32 | sleep 0.1 33 | timeout -= 0.1 34 | raise "Executor could not complete all tasks in 2 seconds" unless timeout > 0 35 | end 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Reclaim the Stack 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /actioncable-enhanced-postgresql-adapter.gemspec: -------------------------------------------------------------------------------- 1 | Gem::Specification.new do |spec| 2 | spec.name = "actioncable-enhanced-postgresql-adapter" 3 | spec.version = "1.0.2" 4 | spec.authors = ["David Backeus"] 5 | spec.email = ["david.backeus@mynewsdesk.com"] 6 | 7 | spec.summary = "ActionCable adapter for PostgreSQL that enhances the default." 8 | spec.description = "Enhances ActionCable's built in Postgres adapter with handling of the 8000 byte limit for PostgreSQL NOTIFY payloads and allows usage without ActiveRecord." 9 | spec.homepage = "https://github.com/reclaim-the-stack/actioncable-enhanced-postgresql-adapter" 10 | spec.license = "MIT" 11 | spec.required_ruby_version = ">= 2.7.0" 12 | 13 | spec.metadata = { 14 | "homepage_uri" => spec.homepage, 15 | "source_code_uri" => spec.homepage, 16 | "changelog_uri" => "#{spec.homepage}/blob/master/CHANGELOG.md" 17 | } 18 | 19 | spec.files = %w[README.md CHANGELOG.md actioncable-enhanced-postgresql-adapter.gemspec] + Dir["lib/**/*"] 20 | 21 | spec.add_dependency "actioncable", ">= 6.0" 22 | spec.add_dependency "connection_pool", ">= 2.2.5" # Ruby 2.7 compatible version 23 | spec.add_dependency "pg", "~> 1.5" 24 | end 25 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # actioncable-enhanced-postgresql-adapter 2 | 3 | This gem provides an enhanced PostgreSQL adapter for ActionCable. It is based on the original PostgreSQL adapter, but includes the following enhancements: 4 | - Ability to broadcast payloads larger than 8000 bytes 5 | - Not dependent on ActiveRecord (but can still integrate with it if available) 6 | 7 | ### Approach 8 | 9 | To overcome the 8000 bytes limit, we temporarily store large payloads in an [unlogged](https://www.crunchydata.com/blog/postgresl-unlogged-tables) database table named `action_cable_large_payloads`. The table is lazily created on first broadcast. 10 | 11 | We then broadcast a payload in the style of `__large_payload:`. The listener client then decrypts incoming ID's, fetches the original payload from the database, and replaces the temporary payload before invoking the subscriber callback. 12 | 13 | ID encryption is done to prevent spoofing large payloads by manually broadcasting messages prefixed with `__large_payload:` with just an auto incrementing integer. 14 | 15 | Note that payloads smaller than 8000 bytes are sent directly via NOTIFY, as per the original adapter. 16 | 17 | ## Installation 18 | 19 | Add this line to your application's Gemfile: 20 | 21 | ```ruby 22 | gem "actioncable-enhanced-postgresql-adapter" 23 | ``` 24 | 25 | ## Usage 26 | 27 | In your `config/cable.yml` file, change the adapter for relevant environments to `enhanced_postgresql`: 28 | 29 | ```yaml 30 | development: 31 | adapter: enhanced_postgresql 32 | 33 | production: 34 | adapter: enhanced_postgresql 35 | ``` 36 | 37 | ## Configuration 38 | 39 | The following configuration options are available: 40 | 41 | - `payload_encryptor_secret` - The secret used to encrypt large payload ID's. Defaults to `Rails.application.secret_key_base` or the `SECRET_KEY_BASE` environment variable unless explicitly specified. 42 | - `url` - Set this if you want to use a different database than the one provided by ActiveRecord. Must be a valid PostgreSQL connection string. 43 | - `connection_pool_size` - Set this in conjunction with `url` to set the size of the postgres connection pool used for broadcasts. Defaults to `RAILS_MAX_THREADS` environment variable or falls back to 5. 44 | 45 | ## Performance 46 | 47 | For payloads smaller than 8000 bytes, which should cover the majority of cases, performance is identical to the original adapter. 48 | 49 | When broadcasting large payloads, one has to consider the overhead of storing and fetching the payload from the database. For low frequency broadcasting, this overhead is likely negligible. But take care if you're doing very high frequency broadcasting. 50 | 51 | Note that whichever ActionCable adapter you're using, sending large payloads with high frequency is an anti-pattern. Even Redis pub/sub has [limitations](https://redis.io/docs/reference/clients/#output-buffer-limits) to be aware of. 52 | 53 | ### Cleanup of large payloads 54 | 55 | Deletion of stale payloads (2 minutes or older) are triggered every 100 large payload inserts. We do this by looking at the incremental ID generated on insert and checking if it is evenly divisible by 100. This approach avoids having to manually schedule cleanup jobs while striking a balance between performance and cleanup frequency. 56 | 57 | ## Development 58 | 59 | - Clone repo 60 | - `bundle install` to install dependencies 61 | - `bundle exec ruby test/postgresql_test.rb` to run tests 62 | -------------------------------------------------------------------------------- /test/common.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "concurrent" 4 | require "active_support/core_ext/hash/indifferent_access" 5 | require "pathname" 6 | 7 | module CommonSubscriptionAdapterTest 8 | WAIT_WHEN_EXPECTING_EVENT = 3 9 | WAIT_WHEN_NOT_EXPECTING_EVENT = 0.2 10 | 11 | def setup 12 | server = ActionCable::Server::Base.new 13 | server.config.cable = cable_config.with_indifferent_access 14 | server.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 15 | 16 | adapter_klass = server.config.pubsub_adapter 17 | 18 | @rx_adapter = adapter_klass.new(server) 19 | @tx_adapter = adapter_klass.new(server) 20 | end 21 | 22 | def teardown 23 | [@rx_adapter, @tx_adapter].uniq.compact.each(&:shutdown) 24 | end 25 | 26 | def subscribe_as_queue(channel, adapter = @rx_adapter) 27 | queue = Queue.new 28 | 29 | callback = -> data { queue << data } 30 | subscribed = Concurrent::Event.new 31 | adapter.subscribe(channel, callback, Proc.new { subscribed.set }) 32 | subscribed.wait(WAIT_WHEN_EXPECTING_EVENT) 33 | assert_predicate subscribed, :set? 34 | 35 | yield queue 36 | 37 | sleep WAIT_WHEN_NOT_EXPECTING_EVENT 38 | assert_empty queue 39 | ensure 40 | adapter.unsubscribe(channel, callback) if subscribed.set? 41 | end 42 | 43 | def test_subscribe_and_unsubscribe 44 | subscribe_as_queue("channel") do |queue| 45 | end 46 | end 47 | 48 | def test_basic_broadcast 49 | subscribe_as_queue("channel") do |queue| 50 | @tx_adapter.broadcast("channel", "hello world") 51 | 52 | assert_equal "hello world", queue.pop 53 | end 54 | end 55 | 56 | def test_broadcast_after_unsubscribe 57 | keep_queue = nil 58 | subscribe_as_queue("channel") do |queue| 59 | keep_queue = queue 60 | 61 | @tx_adapter.broadcast("channel", "hello world") 62 | 63 | assert_equal "hello world", queue.pop 64 | end 65 | 66 | @tx_adapter.broadcast("channel", "hello void") 67 | 68 | sleep WAIT_WHEN_NOT_EXPECTING_EVENT 69 | assert_empty keep_queue 70 | end 71 | 72 | def test_multiple_broadcast 73 | subscribe_as_queue("channel") do |queue| 74 | @tx_adapter.broadcast("channel", "bananas") 75 | @tx_adapter.broadcast("channel", "apples") 76 | 77 | received = [] 78 | 2.times { received << queue.pop } 79 | assert_equal ["apples", "bananas"], received.sort 80 | end 81 | end 82 | 83 | def test_identical_subscriptions 84 | subscribe_as_queue("channel") do |queue| 85 | subscribe_as_queue("channel") do |queue_2| 86 | @tx_adapter.broadcast("channel", "hello") 87 | 88 | assert_equal "hello", queue_2.pop 89 | end 90 | 91 | assert_equal "hello", queue.pop 92 | end 93 | end 94 | 95 | def test_simultaneous_subscriptions 96 | subscribe_as_queue("channel") do |queue| 97 | subscribe_as_queue("other channel") do |queue_2| 98 | @tx_adapter.broadcast("channel", "apples") 99 | @tx_adapter.broadcast("other channel", "oranges") 100 | 101 | assert_equal "apples", queue.pop 102 | assert_equal "oranges", queue_2.pop 103 | end 104 | end 105 | end 106 | 107 | def test_channel_filtered_broadcast 108 | subscribe_as_queue("channel") do |queue| 109 | @tx_adapter.broadcast("other channel", "one") 110 | @tx_adapter.broadcast("channel", "two") 111 | 112 | assert_equal "two", queue.pop 113 | end 114 | end 115 | 116 | def test_long_identifiers 117 | channel_1 = "a" * 100 + "1" 118 | channel_2 = "a" * 100 + "2" 119 | subscribe_as_queue(channel_1) do |queue| 120 | subscribe_as_queue(channel_2) do |queue_2| 121 | @tx_adapter.broadcast(channel_1, "apples") 122 | @tx_adapter.broadcast(channel_2, "oranges") 123 | 124 | assert_equal "apples", queue.pop 125 | assert_equal "oranges", queue_2.pop 126 | end 127 | end 128 | end 129 | end 130 | -------------------------------------------------------------------------------- /lib/action_cable/subscription_adapter/enhanced_postgresql.rb: -------------------------------------------------------------------------------- 1 | # freeze_string_literal: true 2 | 3 | require "action_cable/subscription_adapter/postgresql" 4 | require "connection_pool" 5 | 6 | module ActionCable 7 | module SubscriptionAdapter 8 | class EnhancedPostgresql < PostgreSQL 9 | MAX_NOTIFY_SIZE = 7997 # documented as 8000 bytes, but there appears to be some overhead in transit 10 | LARGE_PAYLOAD_PREFIX = "__large_payload:" 11 | INSERTS_PER_DELETE = 100 # execute DELETE query every N inserts 12 | 13 | LARGE_PAYLOADS_TABLE = "action_cable_large_payloads" 14 | CREATE_LARGE_TABLE_QUERY = <<~SQL 15 | CREATE UNLOGGED TABLE IF NOT EXISTS #{LARGE_PAYLOADS_TABLE} ( 16 | id SERIAL PRIMARY KEY, 17 | payload TEXT NOT NULL, 18 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP 19 | ) 20 | SQL 21 | CREATE_CREATED_AT_INDEX_QUERY = <<~SQL 22 | CREATE INDEX IF NOT EXISTS index_action_cable_large_payloads_on_created_at 23 | ON #{LARGE_PAYLOADS_TABLE} (created_at) 24 | SQL 25 | INSERT_LARGE_PAYLOAD_QUERY = "INSERT INTO #{LARGE_PAYLOADS_TABLE} (payload, created_at) VALUES ($1, CURRENT_TIMESTAMP) RETURNING id" 26 | SELECT_LARGE_PAYLOAD_QUERY = "SELECT payload FROM #{LARGE_PAYLOADS_TABLE} WHERE id = $1" 27 | DELETE_LARGE_PAYLOAD_QUERY = "DELETE FROM #{LARGE_PAYLOADS_TABLE} WHERE created_at < CURRENT_TIMESTAMP - INTERVAL '2 minutes'" 28 | 29 | def initialize(*) 30 | super 31 | 32 | @url = @server.config.cable[:url] 33 | @connection_pool_size = @server.config.cable[:connection_pool_size] || ENV["RAILS_MAX_THREADS"] || 5 34 | end 35 | 36 | def broadcast(channel, payload) 37 | channel = channel_with_prefix(channel) 38 | 39 | with_broadcast_connection do |pg_conn| 40 | channel = pg_conn.escape_identifier(channel_identifier(channel)) 41 | 42 | if payload.bytesize > MAX_NOTIFY_SIZE 43 | payload_id = insert_large_payload(pg_conn, payload) 44 | 45 | if payload_id % INSERTS_PER_DELETE == 0 46 | pg_conn.exec(DELETE_LARGE_PAYLOAD_QUERY) 47 | end 48 | 49 | # Encrypt payload_id to prevent simple integer ID spoofing 50 | encrypted_payload_id = payload_encryptor.encrypt_and_sign(payload_id) 51 | 52 | payload = "#{LARGE_PAYLOAD_PREFIX}#{encrypted_payload_id}" 53 | end 54 | 55 | pg_conn.exec("NOTIFY #{channel}, #{pg_conn.escape_literal(payload)}") 56 | end 57 | end 58 | 59 | def payload_encryptor 60 | @payload_encryptor ||= begin 61 | secret = @server.config.cable[:payload_encryptor_secret] 62 | secret ||= Rails.application.secret_key_base if defined? Rails 63 | secret ||= ENV["SECRET_KEY_BASE"] 64 | 65 | raise ArgumentError, "Missing payload_encryptor_secret configuration for ActionCable EnhancedPostgresql adapter. You need to either explicitly configure it in cable.yml or set the SECRET_KEY_BASE environment variable." unless secret 66 | 67 | secret_32_byte = Digest::SHA256.digest(secret) 68 | ActiveSupport::MessageEncryptor.new(secret_32_byte) 69 | end 70 | end 71 | 72 | def with_broadcast_connection(&block) 73 | return super unless @url 74 | 75 | connection_pool.with do |pg_conn| 76 | yield pg_conn 77 | end 78 | end 79 | 80 | # Called from the Listener thread 81 | def with_subscriptions_connection(&block) 82 | return super unless @url 83 | 84 | pg_conn = PG::Connection.new(@url) 85 | pg_conn.exec("SET application_name = #{pg_conn.escape_identifier(identifier)}") 86 | yield pg_conn 87 | ensure 88 | pg_conn&.close 89 | end 90 | 91 | private 92 | 93 | def connection_pool 94 | @connection_pool ||= ConnectionPool.new(size: @connection_pool_size, timeout: 5) do 95 | PG::Connection.new(@url) 96 | end 97 | end 98 | 99 | def insert_large_payload(pg_conn, payload) 100 | result = pg_conn.exec_params(INSERT_LARGE_PAYLOAD_QUERY, [payload]) 101 | result.first.fetch("id").to_i 102 | rescue PG::UndefinedTable 103 | pg_conn.exec(CREATE_LARGE_TABLE_QUERY) 104 | pg_conn.exec(CREATE_CREATED_AT_INDEX_QUERY) 105 | retry 106 | end 107 | 108 | # Override needed to ensure we reference our local Listener class 109 | def listener 110 | @listener || @server.mutex.synchronize { @listener ||= Listener.new(self, @server.event_loop) } 111 | end 112 | 113 | class Listener < PostgreSQL::Listener 114 | def invoke_callback(callback, message) 115 | if message.start_with?(LARGE_PAYLOAD_PREFIX) 116 | encrypted_payload_id = message.delete_prefix(LARGE_PAYLOAD_PREFIX) 117 | payload_id = @adapter.payload_encryptor.decrypt_and_verify(encrypted_payload_id) 118 | 119 | @adapter.with_broadcast_connection do |pg_conn| 120 | result = pg_conn.exec_params(SELECT_LARGE_PAYLOAD_QUERY, [payload_id]) 121 | message = result.first.fetch("payload") 122 | end 123 | end 124 | 125 | @event_loop.post { super } 126 | end 127 | end 128 | end 129 | end 130 | end 131 | -------------------------------------------------------------------------------- /test/postgresql_test.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require_relative "test_helper" 4 | require_relative "common" 5 | require_relative "channel_prefix" 6 | 7 | require "active_record" 8 | 9 | require "action_cable/subscription_adapter/enhanced_postgresql" 10 | 11 | class PostgresqlAdapterTest < ActionCable::TestCase 12 | include CommonSubscriptionAdapterTest 13 | include ChannelPrefixTest 14 | 15 | def setup 16 | database_config = { "adapter" => "postgresql", "database" => "actioncable_enhanced_postgresql_test" } 17 | 18 | # Create the database unless it already exists 19 | begin 20 | ActiveRecord::Base.establish_connection database_config.merge("database" => "postgres") 21 | ActiveRecord::Base.connection.create_database database_config["database"], encoding: "utf8" 22 | rescue ActiveRecord::DatabaseAlreadyExists 23 | end 24 | 25 | # Connect to the database 26 | ActiveRecord::Base.establish_connection database_config 27 | 28 | begin 29 | ActiveRecord::Base.connection.connect! 30 | rescue 31 | @rx_adapter = @tx_adapter = nil 32 | skip "Couldn't connect to PostgreSQL: #{database_config.inspect}" 33 | end 34 | 35 | super 36 | end 37 | 38 | def teardown 39 | super 40 | 41 | ActiveRecord::Base.connection_handler.clear_all_connections! 42 | end 43 | 44 | def cable_config 45 | { adapter: "enhanced_postgresql", payload_encryptor_secret: SecureRandom.hex(16) } 46 | end 47 | 48 | def test_clear_active_record_connections_adapter_still_works 49 | server = ActionCable::Server::Base.new 50 | server.config.cable = cable_config.with_indifferent_access 51 | server.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 52 | 53 | adapter_klass = Class.new(server.config.pubsub_adapter) do 54 | def active? 55 | !@listener.nil? 56 | end 57 | end 58 | 59 | adapter = adapter_klass.new(server) 60 | 61 | subscribe_as_queue("channel", adapter) do |queue| 62 | adapter.broadcast("channel", "hello world") 63 | assert_equal "hello world", queue.pop 64 | end 65 | 66 | ActiveRecord::Base.connection_handler.clear_reloadable_connections! 67 | 68 | assert adapter.active? 69 | end 70 | 71 | def test_default_subscription_connection_identifier 72 | subscribe_as_queue("channel") { } 73 | 74 | identifiers = ActiveRecord::Base.connection.exec_query("SELECT application_name FROM pg_stat_activity").rows 75 | assert_includes identifiers, ["ActionCable-PID-#{$$}"] 76 | end 77 | 78 | def test_custom_subscription_connection_identifier 79 | server = ActionCable::Server::Base.new 80 | server.config.cable = cable_config.merge(id: "hello-world-42").with_indifferent_access 81 | server.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 82 | 83 | adapter = server.config.pubsub_adapter.new(server) 84 | 85 | subscribe_as_queue("channel", adapter) { } 86 | 87 | identifiers = ActiveRecord::Base.connection.exec_query("SELECT application_name FROM pg_stat_activity").rows 88 | assert_includes identifiers, ["hello-world-42"] 89 | end 90 | 91 | # Postgres has a NOTIFY payload limit of 8000 bytes which requires special handling to avoid 92 | # "PG::InvalidParameterValue: ERROR: payload string too long" errors. 93 | def test_large_payload_broadcast 94 | large_payloads_table = ActionCable::SubscriptionAdapter::EnhancedPostgresql::LARGE_PAYLOADS_TABLE 95 | ActiveRecord::Base.connection_pool.with_connection do |connection| 96 | connection.execute("DROP TABLE IF EXISTS #{large_payloads_table}") 97 | end 98 | 99 | server = ActionCable::Server::Base.new 100 | server.config.cable = cable_config.with_indifferent_access 101 | server.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 102 | adapter = server.config.pubsub_adapter.new(server) 103 | 104 | large_payload = "a" * (ActionCable::SubscriptionAdapter::EnhancedPostgresql::MAX_NOTIFY_SIZE + 1) 105 | 106 | subscribe_as_queue("channel", adapter) do |queue| 107 | adapter.broadcast("channel", large_payload) 108 | 109 | # The large payload is stored in the database at this point 110 | assert_equal 1, ActiveRecord::Base.connection.query("SELECT COUNT(*) FROM #{large_payloads_table}").first.first 111 | 112 | assert_equal large_payload, queue.pop 113 | end 114 | end 115 | 116 | def test_large_payload_escapes_correctly 117 | large_payloads_table = ActionCable::SubscriptionAdapter::EnhancedPostgresql::LARGE_PAYLOADS_TABLE 118 | ActiveRecord::Base.connection_pool.with_connection do |connection| 119 | connection.execute("DROP TABLE IF EXISTS #{large_payloads_table}") 120 | end 121 | 122 | server = ActionCable::Server::Base.new 123 | server.config.cable = cable_config.with_indifferent_access 124 | server.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 125 | adapter = server.config.pubsub_adapter.new(server) 126 | 127 | ascii_string = (32..126).map(&:chr).join.encode("UTF-8") 128 | 129 | expected_length = (ActionCable::SubscriptionAdapter::EnhancedPostgresql::MAX_NOTIFY_SIZE + 1) 130 | 131 | large_payload = (ascii_string * (1.0 * expected_length/ascii_string.length).ceil)[...expected_length] 132 | 133 | subscribe_as_queue("channel", adapter) do |queue| 134 | adapter.broadcast("channel", large_payload) 135 | 136 | # The large payload is stored in the database at this point 137 | assert_equal 1, ActiveRecord::Base.connection.query("SELECT COUNT(*) FROM #{large_payloads_table}").first.first 138 | 139 | got = queue.pop 140 | 141 | assert_equal large_payload.length, got.length, "Expected lengths to match" 142 | assert_equal large_payload, got, "Expected values to match" 143 | end 144 | end 145 | 146 | def test_automatic_payload_deletion 147 | inserts_per_delete = ActionCable::SubscriptionAdapter::EnhancedPostgresql::INSERTS_PER_DELETE 148 | large_payloads_table = ActionCable::SubscriptionAdapter::EnhancedPostgresql::LARGE_PAYLOADS_TABLE 149 | large_payload = "a" * (ActionCable::SubscriptionAdapter::EnhancedPostgresql::MAX_NOTIFY_SIZE + 1) 150 | pg_conn = ActiveRecord::Base.connection.raw_connection 151 | 152 | # Prep the database so that we are one insert away from a delete. All but one entry should be old 153 | # enough to be reaped on the next broadcast. 154 | pg_conn.exec("DROP TABLE IF EXISTS #{large_payloads_table}") 155 | pg_conn.exec(ActionCable::SubscriptionAdapter::EnhancedPostgresql::CREATE_LARGE_TABLE_QUERY) 156 | 157 | # Insert 98 stale payloads 158 | (inserts_per_delete - 2).times do 159 | pg_conn.exec("INSERT INTO #{large_payloads_table} (payload, created_at) VALUES ('a', NOW() - INTERVAL '3 minutes') RETURNING id") 160 | end 161 | # Insert 1 fresh payload 162 | new_payload_id = pg_conn.exec("INSERT INTO #{large_payloads_table} (payload, created_at) VALUES ('a', NOW() - INTERVAL '1 minutes') RETURNING id").first.fetch("id") 163 | 164 | # Sanity check that the auto incrementing ID is what we expect 165 | assert_equal inserts_per_delete - 1, new_payload_id 166 | 167 | server = ActionCable::Server::Base.new 168 | server.config.cable = cable_config.with_indifferent_access 169 | server.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 170 | adapter = server.config.pubsub_adapter.new(server) 171 | 172 | adapter.broadcast("channel", large_payload) 173 | 174 | remaining_payload_ids = pg_conn.query("SELECT id FROM #{large_payloads_table} ORDER BY id").values.flatten 175 | assert_equal [inserts_per_delete - 1, inserts_per_delete], remaining_payload_ids 176 | ensure 177 | pg_conn&.close 178 | end 179 | 180 | # Specifying url should bypass ActiveRecord and connect directly to the provided database 181 | def test_explicit_url_configuration 182 | large_payloads_table = ActionCable::SubscriptionAdapter::EnhancedPostgresql::LARGE_PAYLOADS_TABLE 183 | explicit_database = "actioncable_enhanced_postgresql_test_explicit" 184 | 185 | ActiveRecord::Base.connection_pool.with_connection do |connection| 186 | connection.execute("CREATE DATABASE #{explicit_database}") 187 | rescue ActiveRecord::DatabaseAlreadyExists 188 | end 189 | 190 | pg_conn = PG::Connection.open(dbname: explicit_database) 191 | pg_conn.exec("DROP TABLE IF EXISTS #{large_payloads_table}") 192 | 193 | server = ActionCable::Server::Base.new 194 | server.config.cable = cable_config.merge(url: "postgres://localhost/#{explicit_database}").with_indifferent_access 195 | server.config.logger = Logger.new(StringIO.new).tap { |l| l.level = Logger::UNKNOWN } 196 | adapter = server.config.pubsub_adapter.new(server) 197 | 198 | large_payload = "a" * (ActionCable::SubscriptionAdapter::EnhancedPostgresql::MAX_NOTIFY_SIZE + 1) 199 | 200 | subscribe_as_queue("channel", adapter) do |queue| 201 | adapter.broadcast("channel", large_payload) 202 | 203 | # The large payload is stored in the database at this point 204 | assert_equal "1", pg_conn.query("SELECT COUNT(*) FROM #{large_payloads_table}").first.fetch("count") 205 | 206 | assert_equal large_payload, queue.pop 207 | end 208 | ensure 209 | pg_conn&.close 210 | end 211 | end 212 | --------------------------------------------------------------------------------