├── .github ├── dependabot.yml └── workflows │ ├── linux-ruby-head.yml │ └── linux.yml ├── .gitignore ├── ChangeLog ├── Gemfile ├── Gemfile.v0.12 ├── README.md ├── Rakefile ├── VERSION ├── fluent-plugin-sql.gemspec ├── lib └── fluent │ └── plugin │ ├── in_sql.rb │ └── out_sql.rb └── test ├── fixtures └── schema.rb ├── helper.rb └── plugin ├── test_in_sql.rb ├── test_in_sql_with_custom_time.rb ├── test_in_sql_with_state_file.rb └── test_out_sql.rb /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: 'github-actions' 4 | directory: '/' 5 | schedule: 6 | interval: 'weekly' 7 | -------------------------------------------------------------------------------- /.github/workflows/linux-ruby-head.yml: -------------------------------------------------------------------------------- 1 | name: Test with Ruby head 2 | on: 3 | schedule: 4 | - cron: '11 23 * * 0' 5 | workflow_dispatch: 6 | 7 | jobs: 8 | build: 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | ruby: [ 'head' ] 14 | postgres: [ '16', '15', '14', '13', '12' ] 15 | os: 16 | - ubuntu-latest 17 | services: 18 | postgres: 19 | image: postgres:${{ matrix.postgres }} 20 | ports: 21 | - 5432:5432 22 | env: 23 | POSTGRES_USER: postgres 24 | POSTGRES_PASSWORD: password 25 | POSTGRES_DB: test 26 | options: >- 27 | --health-cmd pg_isready 28 | --health-interval 10s 29 | --health-timeout 5s 30 | --health-retries 5 31 | name: Ruby ${{ matrix.ruby }} with PostgreSQL ${{ matrix.postgres }} unit testing on ${{ matrix.os }} 32 | steps: 33 | - uses: actions/checkout@v4 34 | - uses: ruby/setup-ruby@v1 35 | with: 36 | ruby-version: ${{ matrix.ruby }} 37 | bundler-cache: true 38 | - name: prepare database 39 | env: 40 | PGPASSWORD: password 41 | run: | 42 | psql -h localhost -p 5432 -U postgres -c "CREATE ROLE fluentd WITH LOGIN ENCRYPTED PASSWORD 'fluentd';" 43 | psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE fluentd_test OWNER fluentd;" 44 | - name: unit testing 45 | run: bundle exec rake test 46 | -------------------------------------------------------------------------------- /.github/workflows/linux.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | on: 3 | push: 4 | branches: [master] 5 | pull_request: 6 | branches: [master] 7 | 8 | jobs: 9 | build: 10 | runs-on: ${{ matrix.os }} 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | ruby: [ '3.3', '3.2', '3.1', '3.0', '2.7' ] 15 | postgres: [ '16', '15', '14', '13', '12' ] 16 | os: 17 | - ubuntu-latest 18 | services: 19 | postgres: 20 | image: postgres:${{ matrix.postgres }} 21 | ports: 22 | - 5432:5432 23 | env: 24 | POSTGRES_USER: postgres 25 | POSTGRES_PASSWORD: password 26 | POSTGRES_DB: test 27 | options: >- 28 | --health-cmd pg_isready 29 | --health-interval 10s 30 | --health-timeout 5s 31 | --health-retries 5 32 | name: Ruby ${{ matrix.ruby }} with PostgreSQL ${{ matrix.postgres }} unit testing on ${{ matrix.os }} 33 | steps: 34 | - uses: actions/checkout@v4 35 | - uses: ruby/setup-ruby@v1 36 | with: 37 | ruby-version: ${{ matrix.ruby }} 38 | bundler-cache: true 39 | - name: prepare database 40 | env: 41 | PGPASSWORD: password 42 | run: | 43 | psql -h localhost -p 5432 -U postgres -c "CREATE ROLE fluentd WITH LOGIN ENCRYPTED PASSWORD 'fluentd';" 44 | psql -h localhost -p 5432 -U postgres -c "CREATE DATABASE fluentd_test OWNER fluentd;" 45 | - name: unit testing 46 | run: bundle exec rake test 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | Gemfile.lock 2 | pkg/* 3 | .vscode 4 | .idea/ 5 | -------------------------------------------------------------------------------- /ChangeLog: -------------------------------------------------------------------------------- 1 | Release 2.3.1 - 2024/02/29 2 | 3 |  * in_sql: Fix incompatibility against Ruby 3.2 4 | 5 | Release 2.3.0 - 2021/10/08 6 | 7 |  * in_sql: Make table time_format configurable 8 | 9 | Release 2.2.0 - 2021/04/20 10 | 11 |  * Upgrade ActiveRecord to 6.1 12 | 13 | Release 2.1.0 - 2020/09/09 14 | 15 |  * in_sql: Use Fluent::EventTime instead of Integer to preserve millisecond precision 16 | 17 | Release 2.0.0 - 2020/04/22 18 | 19 | * out_sql: Remove v0.12 API code and use full v1 API. Change buffer format. 20 | 21 | Release 1.1.1 - 2019/05/10 22 | 23 | * out_sql: Support schema_search_path option of PostgreSQL 24 | 25 | Release 1.1.0 - 2018/10/04 26 | 27 | * Upgrade ActiveRecord to 5.1 28 | 29 | Release 1.0.0 - 2018/04/06 30 | 31 | * Support v1 API 32 | 33 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source "http://rubygems.org" 2 | gemspec 3 | -------------------------------------------------------------------------------- /Gemfile.v0.12: -------------------------------------------------------------------------------- 1 | source "http://rubygems.org" 2 | 3 | gem 'json', '= 1.8.6' 4 | gem 'fluentd', '~> 0.12.0' 5 | 6 | gemspec 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SQL input plugin for [Fluentd](http://fluentd.org) event collector 2 | 3 | ## Overview 4 | 5 | This SQL plugin has two parts: 6 | 7 | 1. SQL **input** plugin reads records from RDBMSes periodically. An example use case would be getting "diffs" of a table (based on the "updated_at" field). 8 | 2. SQL **output** plugin that writes records into RDBMSes. An example use case would be aggregating server/app/sensor logs into RDBMS systems. 9 | 10 | ## Requirements 11 | 12 | | fluent-plugin-sql | fluentd | ruby | 13 | |-------------------|------------|--------| 14 | | >= 1.0.0 | >= v0.14.4 | >= 2.1 | 15 | | < 1.0.0 | < v0.14.0 | >= 1.9 | 16 | 17 | NOTE: fluent-plugin-sql v2's buffer format is different from v1. If you update the plugin to v2, don't reuse v1's buffer. 18 | 19 | ## Installation 20 | 21 | $ fluent-gem install fluent-plugin-sql --no-document 22 | $ fluent-gem install pg --no-document # for postgresql 23 | 24 | You should install actual RDBMS driver gem together. `pg` gem for postgresql adapter or `mysql2` gem for `mysql2` adapter. Other adapters supported by [ActiveRecord](https://github.com/rails/rails/tree/master/activerecord) should work. 25 | 26 | We recommend that mysql2 gem is higher than `0.3.12` and pg gem is higher than `0.16.0`. 27 | 28 | If you use ruby 2.1, use pg gem 0.21.0 (< 1.0.0) because ActiveRecord 5.1.4 or earlier doesn't support Ruby 2.1. 29 | 30 | ## Input: How It Works 31 | 32 | This plugin runs following SQL periodically: 33 | 34 | SELECT * FROM *table* WHERE *update\_column* > *last\_update\_column\_value* ORDER BY *update_column* ASC LIMIT 500 35 | 36 | What you need to configure is *update\_column*. The column should be an incremental column (such as AUTO\_ INCREMENT primary key) so that this plugin reads newly INSERTed rows. Alternatively, you can use a column incremented every time when you update the row (such as `last_updated_at` column) so that this plugin reads the UPDATEd rows as well. 37 | If you omit to set *update\_column* parameter, it uses primary key. 38 | 39 | It stores last selected rows to a file (named *state\_file*) to not forget the last row when Fluentd restarts. 40 | 41 | ## Input: Configuration 42 | 43 | 44 | @type sql 45 | 46 | host rdb_host 47 | port rdb_port 48 | database rdb_database 49 | adapter mysql2_or_postgresql_or_etc 50 | username myusername 51 | password mypassword 52 | 53 | tag_prefix my.rdb # optional, but recommended 54 | 55 | select_interval 60s # optional 56 | select_limit 500 # optional 57 | 58 | state_file /var/run/fluentd/sql_state 59 | 60 | 61 | table table1 62 | tag table1 # optional 63 | update_column update_col1 64 | time_column time_col2 # optional 65 |
66 | 67 | 68 | table table2 69 | tag table2 # optional 70 | update_column updated_at 71 | time_column updated_at # optional 72 | time_format %Y-%m-%d %H:%M:%S.%6N # optional 73 |
74 | 75 | # detects all tables instead of sections 76 | #all_tables 77 | 78 | 79 | * **host** RDBMS host 80 | * **port** RDBMS port 81 | * **database** RDBMS database name 82 | * **adapter** RDBMS driver name. You should install corresponding gem before start (mysql2 gem for mysql2 adapter, pg gem for postgresql adapter, etc.) 83 | * **username** RDBMS login user name 84 | * **password** RDBMS login password 85 | * **tag_prefix** prefix of tags of events. actual tag will be this\_tag\_prefix.tables\_tag (optional) 86 | * **select_interval** interval to run SQLs (optional) 87 | * **select_limit** LIMIT of number of rows for each SQL (optional) 88 | * **state_file** path to a file to store last rows 89 | * **all_tables** reads all tables instead of configuring each tables in \ sections 90 | 91 | \ sections: 92 | 93 | * **tag** tag name of events (optional; default value is table name) 94 | * **table** RDBM table name 95 | * **update_column**: see above description 96 | * **time_column** (optional): if this option is set, this plugin uses this column's value as the the event's time. Otherwise it uses current time. 97 | * **primary_key** (optional): if you want to get data from the table which doesn't have primary key like PostgreSQL's View, set this parameter. 98 | * **time_format** (optional): if you want to specify the format of the date used in the query, useful when using alternative adapters which have restrictions on format 99 | 100 | ## Input: Limitation 101 | 102 | You should make sure target tables have index (and/or partitions) on the *update\_column*. Otherwise SELECT causes full table scan and serious performance problem. 103 | 104 | You can't replicate DELETEd rows. 105 | 106 | ## Output: How It Works 107 | 108 | This plugin takes advantage of ActiveRecord underneath. For `host`, `port`, `database`, `adapter`, `username`, `password`, `socket` parameters, you can think of ActiveRecord's equivalent parameters. 109 | 110 | ## Output: Configuration 111 | 112 | 113 | @type sql 114 | host rdb_host 115 | port 3306 116 | database rdb_database 117 | adapter mysql2_or_postgresql_or_etc 118 | username myusername 119 | password mypassword 120 | socket path_to_socket 121 | remove_tag_prefix my.rdb # optional, dual of tag_prefix in in_sql 122 | 123 |
124 | table table1 125 | column_mapping 'timestamp:created_at,fluentdata1:dbcol1,fluentdata2:dbcol2,fluentdata3:dbcol3' 126 | # This is the default table because it has no "pattern" argument in
127 | # The logic is such that if all non-default
blocks 128 | # do not match, the default one is chosen. 129 | # The default table is required. 130 |
131 | 132 | # You can pass the same pattern you use in match statements. 133 | table table2 134 | # This is the non-default table. It is chosen if the tag matches the pattern 135 | # AFTER remove_tag_prefix is applied to the incoming event. For example, if 136 | # the message comes in with the tag my.rdb.hello.world, "remove_tag_prefix my.rdb" 137 | # makes it "hello.world", which gets matched here because of "pattern hello.*". 138 |
139 | 140 | 141 | table table3 142 | # This is the second non-default table. You can have as many non-default tables 143 | # as you wish. One caveat: non-default tables are matched top-to-bottom and 144 | # the events go into the first table it matches to. Hence, this particular table 145 | # never gets any data, since the above "hello.*" subsumes "hello.world". 146 |
147 | 148 | 149 | * **host** RDBMS host 150 | * **port** RDBMS port 151 | * **database** RDBMS database name 152 | * **adapter** RDBMS driver name. You should install corresponding gem before start (mysql2 gem for mysql2 adapter, pg gem for postgresql adapter, etc.) 153 | * **username** RDBMS login user name 154 | * **password** RDBMS login password 155 | * **socket** RDBMS socket path 156 | * **pool** A connection pool synchronizes thread access to a limited number of database connections 157 | * **timeout** RDBMS connection timeout 158 | * **remove_tag_prefix** remove the given prefix from the events. See "tag_prefix" in "Input: Configuration". (optional) 159 | 160 | \ sections: 161 | 162 | * **table** RDBM table name 163 | * **column_mapping**: [Required] Record to table schema mapping. The format is consists of `from:to` or `key` values are separated by `,`. For example, if set 'item_id:id,item_text:data,updated_at' to **column_mapping**, `item_id` field of record is stored into `id` column and `updated_at` field of record is stored into `updated_at` column. 164 | * **\**: the pattern to which the incoming event's tag (after it goes through `remove_tag_prefix`, if given). The patterns should follow the same syntax as [that of \](https://docs.fluentd.org/configuration/config-file#how-match-patterns-work). **Exactly one \ element must NOT have this parameter so that it becomes the default table to store data**. 165 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | 2 | require 'bundler' 3 | Bundler::GemHelper.install_tasks 4 | 5 | require 'rake/testtask' 6 | 7 | Rake::TestTask.new(:test) do |test| 8 | test.libs << 'lib' << 'test' 9 | test.test_files = FileList['test/*/*.rb'] 10 | test.verbose = true 11 | end 12 | 13 | task :default => [:build] 14 | 15 | -------------------------------------------------------------------------------- /VERSION: -------------------------------------------------------------------------------- 1 | 2.3.1 -------------------------------------------------------------------------------- /fluent-plugin-sql.gemspec: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | $:.push File.expand_path('../lib', __FILE__) 3 | 4 | Gem::Specification.new do |gem| 5 | gem.name = "fluent-plugin-sql" 6 | gem.description = "SQL input/output plugin for Fluentd event collector" 7 | gem.homepage = "https://github.com/fluent/fluent-plugin-sql" 8 | gem.summary = gem.description 9 | gem.version = File.read("VERSION").strip 10 | gem.authors = ["Sadayuki Furuhashi"] 11 | gem.email = "frsyuki@gmail.com" 12 | #gem.platform = Gem::Platform::RUBY 13 | gem.files = `git ls-files`.split("\n") 14 | gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n") 15 | gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) } 16 | gem.require_paths = ['lib'] 17 | gem.license = "Apache-2.0" 18 | 19 | gem.add_dependency "fluentd", [">= 1.7.0", "< 2"] 20 | gem.add_dependency 'activerecord', [">= 6.1", "< 7.2"] 21 | gem.add_dependency 'activerecord-import', "~> 1.0" 22 | gem.add_development_dependency "rake", ">= 0.9.2" 23 | gem.add_development_dependency "test-unit", "> 3.1.0" 24 | gem.add_development_dependency "test-unit-rr" 25 | gem.add_development_dependency "test-unit-notify" 26 | gem.add_development_dependency "pg", '~> 1.0' 27 | end 28 | -------------------------------------------------------------------------------- /lib/fluent/plugin/in_sql.rb: -------------------------------------------------------------------------------- 1 | # 2 | # Fluent 3 | # 4 | # Copyright (C) 2013 FURUHASHI Sadayuki 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | require "fluent/plugin/input" 20 | 21 | module Fluent::Plugin 22 | 23 | require 'active_record' 24 | 25 | class SQLInput < Input 26 | Fluent::Plugin.register_input('sql', self) 27 | 28 | desc 'RDBMS host' 29 | config_param :host, :string 30 | desc 'RDBMS port' 31 | config_param :port, :integer, default: nil 32 | desc 'RDBMS driver name.' 33 | config_param :adapter, :string 34 | desc 'RDBMS database name' 35 | config_param :database, :string 36 | desc 'RDBMS login user name' 37 | config_param :username, :string, default: nil 38 | desc 'RDBMS login password' 39 | config_param :password, :string, default: nil, secret: true 40 | desc 'RDBMS socket path' 41 | config_param :socket, :string, default: nil 42 | desc 'PostgreSQL schema search path' 43 | config_param :schema_search_path, :string, default: nil 44 | 45 | desc 'path to a file to store last rows' 46 | config_param :state_file, :string, default: nil 47 | desc 'prefix of tags of events. actual tag will be this_tag_prefix.tables_tag (optional)' 48 | config_param :tag_prefix, :string, default: nil 49 | desc 'interval to run SQLs (optional)' 50 | config_param :select_interval, :time, default: 60 51 | desc 'limit of number of rows for each SQL(optional)' 52 | config_param :select_limit, :time, default: 500 53 | 54 | class TableElement 55 | include Fluent::Configurable 56 | 57 | config_param :table, :string 58 | config_param :tag, :string, default: nil 59 | config_param :update_column, :string, default: nil 60 | config_param :time_column, :string, default: nil 61 | config_param :primary_key, :string, default: nil 62 | config_param :time_format, :string, default: '%Y-%m-%d %H:%M:%S.%6N%z' 63 | attr_reader :log 64 | 65 | def configure(conf) 66 | super 67 | end 68 | 69 | def init(tag_prefix, base_model, router, log) 70 | @router = router 71 | @tag = "#{tag_prefix}.#{@tag}" if tag_prefix 72 | @log = log 73 | 74 | # creates a model for this table 75 | table_name = @table 76 | primary_key = @primary_key 77 | time_format = @time_format 78 | 79 | @model = Class.new(base_model) do 80 | self.table_name = table_name 81 | self.inheritance_column = '_never_use_' 82 | self.primary_key = primary_key if primary_key 83 | self.const_set(:TIME_FORMAT, time_format) 84 | 85 | #self.include_root_in_json = false 86 | 87 | def read_attribute_for_serialization(n) 88 | v = send(n) 89 | if v.respond_to?(:to_msgpack) 90 | v 91 | elsif v.is_a? Time 92 | v.strftime(self.class::TIME_FORMAT) 93 | else 94 | v.to_s 95 | end 96 | end 97 | end 98 | 99 | # ActiveRecord requires model class to have a name. 100 | class_name = table_name.gsub(/\./, "_").singularize.camelize 101 | base_model.const_set(class_name, @model) 102 | 103 | # Sets model_name otherwise ActiveRecord causes errors 104 | model_name = ActiveModel::Name.new(@model, nil, class_name) 105 | @model.define_singleton_method(:model_name) { model_name } 106 | 107 | # if update_column is not set, here uses primary key 108 | unless @update_column 109 | pk = @model.columns_hash[@model.primary_key] 110 | unless pk 111 | raise "Composite primary key is not supported. Set update_column parameter to
section." 112 | end 113 | @update_column = pk.name 114 | end 115 | end 116 | 117 | # Make sure we always have a Fluent::EventTime object regardless of what comes in 118 | def normalized_time(tv, now) 119 | return Fluent::EventTime.from_time(tv) if tv.is_a?(Time) 120 | begin 121 | Fluent::EventTime.parse(tv.to_s) 122 | rescue 123 | log.warn "Message contains invalid timestamp, using current time instead (#{now.inspect})" 124 | now 125 | end 126 | end 127 | 128 | # emits next records and returns the last record of emitted records 129 | def emit_next_records(last_record, limit) 130 | relation = @model 131 | if last_record && last_update_value = last_record[@update_column] 132 | relation = relation.where("#{@update_column} > ?", last_update_value) 133 | end 134 | relation = relation.order("#{@update_column} ASC") 135 | relation = relation.limit(limit) if limit > 0 136 | 137 | now = Fluent::Engine.now 138 | 139 | me = Fluent::MultiEventStream.new 140 | relation.each do |obj| 141 | record = obj.serializable_hash rescue nil 142 | if record 143 | time = 144 | if @time_column && (tv = obj.read_attribute(@time_column)) 145 | normalized_time(tv, now) 146 | else 147 | now 148 | end 149 | 150 | me.add(time, record) 151 | last_record = record 152 | end 153 | end 154 | 155 | last_record = last_record.dup if last_record # some plugin rewrites record :( 156 | @router.emit_stream(@tag, me) 157 | 158 | return last_record 159 | end 160 | end 161 | 162 | def configure(conf) 163 | super 164 | 165 | unless @state_file 166 | $log.warn "'state_file PATH' parameter is not set to a 'sql' source." 167 | $log.warn "this parameter is highly recommended to save the last rows to resume tailing." 168 | end 169 | 170 | @tables = conf.elements.select {|e| 171 | e.name == 'table' 172 | }.map {|e| 173 | te = TableElement.new 174 | te.configure(e) 175 | te 176 | } 177 | 178 | if config['all_tables'] 179 | @all_tables = true 180 | end 181 | end 182 | 183 | SKIP_TABLE_REGEXP = /\Aschema_migrations\Z/i 184 | 185 | def start 186 | @state_store = @state_file.nil? ? MemoryStateStore.new : StateStore.new(@state_file) 187 | 188 | config = { 189 | adapter: @adapter, 190 | host: @host, 191 | port: @port, 192 | database: @database, 193 | username: @username, 194 | password: @password, 195 | socket: @socket, 196 | schema_search_path: @schema_search_path, 197 | } 198 | 199 | # creates subclass of ActiveRecord::Base so that it can have different 200 | # database configuration from ActiveRecord::Base. 201 | @base_model = Class.new(ActiveRecord::Base) do 202 | # base model doesn't have corresponding phisical table 203 | self.abstract_class = true 204 | end 205 | 206 | # ActiveRecord requires the base_model to have a name. Here sets name 207 | # of an anonymous class by assigning it to a constant. In Ruby, class has 208 | # a name of a constant assigned first 209 | SQLInput.const_set("BaseModel_#{rand(1 << 31)}", @base_model) 210 | 211 | # Now base_model can have independent configuration from ActiveRecord::Base 212 | @base_model.establish_connection(config) 213 | 214 | if @all_tables 215 | # get list of tables from the database 216 | @tables = @base_model.connection.tables.map do |table_name| 217 | if table_name.match(SKIP_TABLE_REGEXP) 218 | # some tables such as "schema_migrations" should be ignored 219 | nil 220 | else 221 | te = TableElement.new 222 | te.configure({ 223 | 'table' => table_name, 224 | 'tag' => table_name, 225 | 'update_column' => nil, 226 | }) 227 | te 228 | end 229 | end.compact 230 | end 231 | 232 | # ignore tables if TableElement#init failed 233 | @tables.reject! do |te| 234 | begin 235 | te.init(@tag_prefix, @base_model, router, log) 236 | log.info "Selecting '#{te.table}' table" 237 | false 238 | rescue => e 239 | log.warn "Can't handle '#{te.table}' table. Ignoring.", error: e 240 | log.warn_backtrace e.backtrace 241 | true 242 | end 243 | end 244 | 245 | @stop_flag = false 246 | @thread = Thread.new(&method(:thread_main)) 247 | end 248 | 249 | def shutdown 250 | @stop_flag = true 251 | $log.debug "Waiting for thread to finish" 252 | @thread.join 253 | end 254 | 255 | def thread_main 256 | until @stop_flag 257 | sleep @select_interval 258 | 259 | begin 260 | conn = @base_model.connection 261 | conn.active? || conn.reconnect! 262 | rescue => e 263 | log.warn "can't connect to database. Reconnect at next try" 264 | next 265 | end 266 | 267 | @tables.each do |t| 268 | begin 269 | last_record = @state_store.last_records[t.table] 270 | @state_store.last_records[t.table] = t.emit_next_records(last_record, @select_limit) 271 | @state_store.update! 272 | rescue => e 273 | log.error "unexpected error", error: e 274 | log.error_backtrace e.backtrace 275 | end 276 | end 277 | end 278 | end 279 | 280 | class StateStore 281 | def initialize(path) 282 | require 'yaml' 283 | 284 | @path = path 285 | if File.exist?(@path) 286 | @data = YAML.load_file(@path) 287 | if @data == false || @data == [] 288 | # this happens if an users created an empty file accidentally 289 | @data = {} 290 | elsif !@data.is_a?(Hash) 291 | raise "state_file on #{@path.inspect} is invalid" 292 | end 293 | else 294 | @data = {} 295 | end 296 | end 297 | 298 | def last_records 299 | @data['last_records'] ||= {} 300 | end 301 | 302 | def update! 303 | File.open(@path, 'w') {|f| 304 | f.write YAML.dump(@data) 305 | } 306 | end 307 | end 308 | 309 | class MemoryStateStore 310 | def initialize 311 | @data = {} 312 | end 313 | 314 | def last_records 315 | @data['last_records'] ||= {} 316 | end 317 | 318 | def update! 319 | end 320 | end 321 | end 322 | 323 | end 324 | -------------------------------------------------------------------------------- /lib/fluent/plugin/out_sql.rb: -------------------------------------------------------------------------------- 1 | require "fluent/plugin/output" 2 | 3 | require 'active_record' 4 | require 'activerecord-import' 5 | 6 | module Fluent::Plugin 7 | class SQLOutput < Output 8 | Fluent::Plugin.register_output('sql', self) 9 | 10 | helpers :inject, :compat_parameters, :event_emitter 11 | 12 | desc 'RDBMS host' 13 | config_param :host, :string 14 | desc 'RDBMS port' 15 | config_param :port, :integer, default: nil 16 | desc 'RDBMS driver name.' 17 | config_param :adapter, :string 18 | desc 'RDBMS login user name' 19 | config_param :username, :string, default: nil 20 | desc 'RDBMS login password' 21 | config_param :password, :string, default: nil, secret: true 22 | desc 'RDBMS database name' 23 | config_param :database, :string 24 | desc 'RDBMS socket path' 25 | config_param :socket, :string, default: nil 26 | desc 'PostgreSQL schema search path' 27 | config_param :schema_search_path, :string, default: nil 28 | desc 'remove the given prefix from the events' 29 | config_param :remove_tag_prefix, :string, default: nil 30 | desc 'enable fallback' 31 | config_param :enable_fallback, :bool, default: true 32 | desc "size of ActiveRecord's connection pool" 33 | config_param :pool, :integer, default: 5 34 | desc "specifies the timeout to establish a new connection to the database before failing" 35 | config_param :timeout, :integer, default: 5000 36 | 37 | config_section :buffer do 38 | config_set_default :chunk_keys, ["tag"] 39 | end 40 | 41 | attr_accessor :tables 42 | 43 | # TODO: Merge SQLInput's TableElement 44 | class TableElement 45 | include Fluent::Configurable 46 | 47 | config_param :table, :string 48 | config_param :column_mapping, :string 49 | config_param :num_retries, :integer, default: 5 50 | 51 | attr_reader :model 52 | attr_reader :pattern 53 | 54 | def initialize(pattern, log, enable_fallback) 55 | super() 56 | @pattern = Fluent::MatchPattern.create(pattern) 57 | @log = log 58 | @enable_fallback = enable_fallback 59 | end 60 | 61 | def configure(conf) 62 | super 63 | 64 | @mapping = parse_column_mapping(@column_mapping) 65 | @format_proc = Proc.new { |record| 66 | new_record = {} 67 | @mapping.each { |k, c| 68 | new_record[c] = record[k] 69 | } 70 | new_record 71 | } 72 | end 73 | 74 | def init(base_model) 75 | # See SQLInput for more details of following code 76 | table_name = @table 77 | @model = Class.new(base_model) do 78 | self.table_name = table_name 79 | self.inheritance_column = '_never_use_output_' 80 | end 81 | 82 | class_name = table_name.singularize.camelize 83 | base_model.const_set(class_name, @model) 84 | model_name = ActiveModel::Name.new(@model, nil, class_name) 85 | @model.define_singleton_method(:model_name) { model_name } 86 | 87 | # TODO: check column_names and table schema 88 | # @model.column_names 89 | end 90 | 91 | def import(chunk, output) 92 | tag = chunk.metadata.tag 93 | records = [] 94 | chunk.msgpack_each { |time, data| 95 | begin 96 | data = output.inject_values_to_record(tag, time, data) 97 | records << @model.new(@format_proc.call(data)) 98 | rescue => e 99 | args = {error: e, table: @table, record: Yajl.dump(data)} 100 | @log.warn "Failed to create the model. Ignore a record:", args 101 | end 102 | } 103 | begin 104 | @model.import(records) 105 | rescue ActiveRecord::StatementInvalid, ActiveRecord::Import::MissingColumnError => e 106 | if @enable_fallback 107 | # ignore other exceptions to use Fluentd retry mechanizm 108 | @log.warn "Got deterministic error. Fallback to one-by-one import", error: e 109 | one_by_one_import(records) 110 | else 111 | @log.warn "Got deterministic error. Fallback is disabled", error: e 112 | raise e 113 | end 114 | end 115 | end 116 | 117 | def one_by_one_import(records) 118 | records.each { |record| 119 | retries = 0 120 | begin 121 | @model.import([record]) 122 | rescue ActiveRecord::StatementInvalid, ActiveRecord::Import::MissingColumnError => e 123 | @log.error "Got deterministic error again. Dump a record", error: e, record: record 124 | rescue => e 125 | retries += 1 126 | if retries > @num_retries 127 | @log.error "Can't recover undeterministic error. Dump a record", error: e, record: record 128 | next 129 | end 130 | 131 | @log.warn "Failed to import a record: retry number = #{retries}", error: e 132 | sleep 0.5 133 | retry 134 | end 135 | } 136 | end 137 | 138 | private 139 | 140 | def parse_column_mapping(column_mapping_conf) 141 | mapping = {} 142 | column_mapping_conf.split(',').each { |column_map| 143 | key, column = column_map.strip.split(':', 2) 144 | column = key if column.nil? 145 | mapping[key] = column 146 | } 147 | mapping 148 | end 149 | end 150 | 151 | def initialize 152 | super 153 | end 154 | 155 | def configure(conf) 156 | compat_parameters_convert(conf, :inject, :buffer) 157 | 158 | super 159 | 160 | if remove_tag_prefix = conf['remove_tag_prefix'] 161 | @remove_tag_prefix = Regexp.new('^' + Regexp.escape(remove_tag_prefix)) 162 | end 163 | 164 | @tables = [] 165 | @default_table = nil 166 | conf.elements.select { |e| 167 | e.name == 'table' 168 | }.each { |e| 169 | te = TableElement.new(e.arg, log, @enable_fallback) 170 | te.configure(e) 171 | if e.arg.empty? 172 | $log.warn "Detect duplicate default table definition" if @default_table 173 | @default_table = te 174 | else 175 | @tables << te 176 | end 177 | } 178 | 179 | if @pool < @buffer_config.flush_thread_count 180 | log.warn "connection pool size is smaller than buffer's flush_thread_count. Recommend to increase pool value", :pool => @pool, :flush_thread_count => @buffer_config.flush_thread_count 181 | end 182 | 183 | if @default_table.nil? 184 | raise Fluent::ConfigError, "There is no default table.
is required in sql output" 185 | end 186 | end 187 | 188 | def start 189 | super 190 | 191 | config = { 192 | adapter: @adapter, 193 | host: @host, 194 | port: @port, 195 | database: @database, 196 | username: @username, 197 | password: @password, 198 | socket: @socket, 199 | schema_search_path: @schema_search_path, 200 | pool: @pool, 201 | timeout: @timeout, 202 | } 203 | 204 | @base_model = Class.new(ActiveRecord::Base) do 205 | self.abstract_class = true 206 | end 207 | 208 | SQLOutput.const_set("BaseModel_#{rand(1 << 31)}", @base_model) 209 | ActiveRecord::Base.establish_connection(config) 210 | 211 | # ignore tables if TableElement#init failed 212 | @tables.reject! do |te| 213 | init_table(te, @base_model) 214 | end 215 | init_table(@default_table, @base_model) 216 | end 217 | 218 | def shutdown 219 | super 220 | end 221 | 222 | def formatted_to_msgpack_binary 223 | true 224 | end 225 | 226 | def write(chunk) 227 | ActiveRecord::Base.connection_pool.with_connection do 228 | 229 | @tables.each { |table| 230 | tag = format_tag(chunk.metadata.tag) 231 | if table.pattern.match(tag) 232 | return table.import(chunk, self) 233 | end 234 | } 235 | @default_table.import(chunk, self) 236 | end 237 | end 238 | 239 | private 240 | 241 | def init_table(te, base_model) 242 | begin 243 | te.init(base_model) 244 | log.info "Selecting '#{te.table}' table" 245 | false 246 | rescue => e 247 | log.warn "Can't handle '#{te.table}' table. Ignoring.", error: e 248 | log.warn_backtrace e.backtrace 249 | true 250 | end 251 | end 252 | 253 | def format_tag(tag) 254 | if tag && @remove_tag_prefix 255 | tag.gsub(@remove_tag_prefix, '') 256 | else 257 | tag 258 | end 259 | end 260 | end 261 | end 262 | -------------------------------------------------------------------------------- /test/fixtures/schema.rb: -------------------------------------------------------------------------------- 1 | require "active_record" 2 | ActiveRecord::Base.establish_connection(host: "localhost", 3 | port: 5432, 4 | username: "fluentd", 5 | password: "fluentd", 6 | adapter: "postgresql", 7 | database: "fluentd_test") 8 | ActiveRecord::Schema.define(version: 20160225030107) do 9 | create_table "logs", force: :cascade do |t| 10 | t.string "host" 11 | t.string "ident" 12 | t.string "pid" 13 | t.text "message" 14 | t.datetime "created_at", null: false 15 | t.datetime "updated_at", null: false 16 | end 17 | 18 | create_table "messages", force: :cascade do |t| 19 | t.string "message" 20 | t.datetime "created_at", null: false 21 | t.datetime "updated_at", null: false 22 | end 23 | 24 | create_table "messages_custom_time", force: :cascade do |t| 25 | t.string "message" 26 | t.datetime "created_at", null: false 27 | t.datetime "updated_at", null: false 28 | t.string "custom_time" 29 | end 30 | end 31 | 32 | -------------------------------------------------------------------------------- /test/helper.rb: -------------------------------------------------------------------------------- 1 | require "test/unit" 2 | require "test/unit/rr" 3 | require "test/unit/notify" unless ENV['CI'] 4 | require "fluent/test" 5 | require "fluent/plugin/out_sql" 6 | require "fluent/plugin/in_sql" 7 | 8 | load "fixtures/schema.rb" 9 | -------------------------------------------------------------------------------- /test/plugin/test_in_sql.rb: -------------------------------------------------------------------------------- 1 | require "helper" 2 | require "fluent/test/driver/input" 3 | 4 | class SqlInputTest < Test::Unit::TestCase 5 | def setup 6 | Fluent::Test.setup 7 | end 8 | 9 | def teardown 10 | end 11 | 12 | CONFIG = %[ 13 | adapter postgresql 14 | host localhost 15 | port 5432 16 | database fluentd_test 17 | 18 | username fluentd 19 | password fluentd 20 | 21 | schema_search_path public 22 | 23 | tag_prefix db 24 | 25 |
26 | table messages 27 | tag logs 28 | update_column updated_at 29 | time_column updated_at 30 |
31 | ] 32 | 33 | def create_driver(conf = CONFIG) 34 | Fluent::Test::Driver::Input.new(Fluent::Plugin::SQLInput).configure(conf) 35 | end 36 | 37 | def test_configure 38 | d = create_driver 39 | expected = { 40 | host: "localhost", 41 | port: 5432, 42 | adapter: "postgresql", 43 | database: "fluentd_test", 44 | username: "fluentd", 45 | password: "fluentd", 46 | schema_search_path: "public", 47 | tag_prefix: "db" 48 | } 49 | actual = { 50 | host: d.instance.host, 51 | port: d.instance.port, 52 | adapter: d.instance.adapter, 53 | database: d.instance.database, 54 | username: d.instance.username, 55 | password: d.instance.password, 56 | schema_search_path: d.instance.schema_search_path, 57 | tag_prefix: d.instance.tag_prefix 58 | } 59 | assert_equal(expected, actual) 60 | tables = d.instance.instance_variable_get(:@tables) 61 | assert_equal(1, tables.size) 62 | messages = tables.first 63 | assert_equal("messages", messages.table) 64 | assert_equal("logs", messages.tag) 65 | end 66 | 67 | def test_message 68 | d = create_driver(CONFIG + "select_interval 1") 69 | Message.create!(message: "message 1") 70 | Message.create!(message: "message 2") 71 | Message.create!(message: "message 3") 72 | 73 | d.end_if do 74 | d.record_count >= 3 75 | end 76 | d.run 77 | 78 | assert_equal("db.logs", d.events[0][0]) 79 | expected = [ 80 | [d.events[0][1], "message 1"], 81 | [d.events[1][1], "message 2"], 82 | [d.events[2][1], "message 3"], 83 | ] 84 | actual = [ 85 | [Fluent::EventTime.parse(d.events[0][2]["updated_at"]), d.events[0][2]["message"]], 86 | [Fluent::EventTime.parse(d.events[1][2]["updated_at"]), d.events[1][2]["message"]], 87 | [Fluent::EventTime.parse(d.events[2][2]["updated_at"]), d.events[2][2]["message"]], 88 | ] 89 | assert_equal(expected, actual) 90 | end 91 | 92 | class Message < ActiveRecord::Base 93 | end 94 | end 95 | -------------------------------------------------------------------------------- /test/plugin/test_in_sql_with_custom_time.rb: -------------------------------------------------------------------------------- 1 | require "helper" 2 | require "fluent/test/driver/input" 3 | 4 | class SqlInputCustomTimeTest < Test::Unit::TestCase 5 | def setup 6 | Fluent::Test.setup 7 | end 8 | 9 | def teardown 10 | end 11 | 12 | CONFIG = %[ 13 | adapter postgresql 14 | host localhost 15 | port 5432 16 | database fluentd_test 17 | 18 | username fluentd 19 | password fluentd 20 | 21 | schema_search_path public 22 | 23 | tag_prefix db 24 | 25 | 26 | table messages_custom_time 27 | tag logs 28 | update_column updated_at 29 | time_column custom_time 30 |
31 | ] 32 | 33 | def create_driver(conf = CONFIG) 34 | Fluent::Test::Driver::Input.new(Fluent::Plugin::SQLInput).configure(conf) 35 | end 36 | 37 | def test_configure 38 | d = create_driver 39 | expected = { 40 | host: "localhost", 41 | port: 5432, 42 | adapter: "postgresql", 43 | database: "fluentd_test", 44 | username: "fluentd", 45 | password: "fluentd", 46 | schema_search_path: "public", 47 | tag_prefix: "db" 48 | } 49 | actual = { 50 | host: d.instance.host, 51 | port: d.instance.port, 52 | adapter: d.instance.adapter, 53 | database: d.instance.database, 54 | username: d.instance.username, 55 | password: d.instance.password, 56 | schema_search_path: d.instance.schema_search_path, 57 | tag_prefix: d.instance.tag_prefix 58 | } 59 | assert_equal(expected, actual) 60 | tables = d.instance.instance_variable_get(:@tables) 61 | assert_equal(1, tables.size) 62 | messages_custom_time = tables.first 63 | assert_equal("messages_custom_time", messages_custom_time.table) 64 | assert_equal("logs", messages_custom_time.tag) 65 | end 66 | 67 | def test_message 68 | d = create_driver(CONFIG + "select_interval 1") 69 | 70 | start_time = Fluent::EventTime.now 71 | 72 | # Create one message with a valid timestamp containing milliseconds and a time zone 73 | Message.create!(message: "message 1", custom_time: '2020-08-27 15:00:16.100758000 -0400') 74 | 75 | # Create one message without a timestamp so that we can test auto-creation 76 | Message.create!(message: "message 2 (no timestamp)", custom_time: nil) 77 | 78 | # Create one message with an unparseable timestamp so that we can check that a valid 79 | # one is auto-generated. 80 | Message.create!(message: "message 3 (bad timestamp)", custom_time: 'foo') 81 | 82 | d.end_if do 83 | d.record_count >= 3 84 | end 85 | d.run(timeout: 5) 86 | 87 | assert_equal("db.logs", d.events[0][0]) 88 | expected = [ 89 | [d.events[0][1], "message 1"], 90 | [d.events[1][1], "message 2 (no timestamp)"], 91 | [d.events[2][1], "message 3 (bad timestamp)"], 92 | ] 93 | 94 | actual = [ 95 | [Fluent::EventTime.parse(d.events[0][2]["custom_time"]), d.events[0][2]["message"]], 96 | d.events[1][2]["message"], 97 | d.events[2][2]["message"], 98 | ] 99 | 100 | assert_equal(expected[0], actual[0]) 101 | 102 | # Messages 2 and 3 should have the same messages but (usually) a slightly later 103 | # timestamps because they are generated by the input plugin instead of the test 104 | # code 105 | [1,2].each do |i| 106 | assert_equal(expected[i][1], actual[i]) 107 | assert_operator(expected[i][0], :>=, start_time) 108 | end 109 | end 110 | 111 | class Message < ActiveRecord::Base 112 | self.table_name = "messages_custom_time" 113 | end 114 | end 115 | -------------------------------------------------------------------------------- /test/plugin/test_in_sql_with_state_file.rb: -------------------------------------------------------------------------------- 1 | require "helper" 2 | require "fluent/test/driver/input" 3 | 4 | class SqlInputStateFileTest < Test::Unit::TestCase 5 | def setup 6 | Fluent::Test.setup 7 | end 8 | 9 | def teardown 10 | end 11 | 12 | CONFIG = %[ 13 | adapter postgresql 14 | host localhost 15 | port 5432 16 | database fluentd_test 17 | 18 | username fluentd 19 | password fluentd 20 | 21 | state_file /tmp/sql_state 22 | 23 | schema_search_path public 24 | 25 | tag_prefix db 26 | 27 | 28 | table messages 29 | tag logs 30 | update_column updated_at 31 | time_column updated_at 32 |
33 | ] 34 | 35 | def create_driver(conf = CONFIG) 36 | Fluent::Test::Driver::Input.new(Fluent::Plugin::SQLInput).configure(conf) 37 | end 38 | 39 | def test_configure 40 | d = create_driver 41 | expected = { 42 | host: "localhost", 43 | port: 5432, 44 | adapter: "postgresql", 45 | database: "fluentd_test", 46 | username: "fluentd", 47 | password: "fluentd", 48 | schema_search_path: "public", 49 | tag_prefix: "db" 50 | } 51 | actual = { 52 | host: d.instance.host, 53 | port: d.instance.port, 54 | adapter: d.instance.adapter, 55 | database: d.instance.database, 56 | username: d.instance.username, 57 | password: d.instance.password, 58 | schema_search_path: d.instance.schema_search_path, 59 | tag_prefix: d.instance.tag_prefix 60 | } 61 | assert_equal(expected, actual) 62 | tables = d.instance.instance_variable_get(:@tables) 63 | assert_equal(1, tables.size) 64 | messages = tables.first 65 | assert_equal("messages", messages.table) 66 | assert_equal("logs", messages.tag) 67 | end 68 | 69 | def test_message 70 | d = create_driver(CONFIG + "select_interval 1") 71 | Message.create!(message: "message 1") 72 | Message.create!(message: "message 2") 73 | Message.create!(message: "message 3") 74 | 75 | d.end_if do 76 | d.record_count >= 3 77 | end 78 | d.run 79 | 80 | assert_equal("db.logs", d.events[0][0]) 81 | expected = [ 82 | [d.events[0][1], "message 1"], 83 | [d.events[1][1], "message 2"], 84 | [d.events[2][1], "message 3"], 85 | ] 86 | actual = [ 87 | [Fluent::EventTime.parse(d.events[0][2]["updated_at"]), d.events[0][2]["message"]], 88 | [Fluent::EventTime.parse(d.events[1][2]["updated_at"]), d.events[1][2]["message"]], 89 | [Fluent::EventTime.parse(d.events[2][2]["updated_at"]), d.events[2][2]["message"]], 90 | ] 91 | assert_equal(expected, actual) 92 | end 93 | 94 | class Message < ActiveRecord::Base 95 | end 96 | end 97 | -------------------------------------------------------------------------------- /test/plugin/test_out_sql.rb: -------------------------------------------------------------------------------- 1 | require "helper" 2 | require "fluent/test/driver/output" 3 | 4 | class SqlOutputTest < Test::Unit::TestCase 5 | def setup 6 | Fluent::Test.setup 7 | end 8 | 9 | def teardown 10 | end 11 | 12 | CONFIG = %[ 13 | host localhost 14 | port 5432 15 | adapter postgresql 16 | 17 | database fluentd_test 18 | username fluentd 19 | password fluentd 20 | 21 | schema_search_path public 22 | 23 | remove_tag_prefix db 24 | 25 | 26 | table logs 27 | column_mapping timestamp:created_at,host:host,ident:ident,pid:pid,message:message 28 |
29 | ] 30 | 31 | def create_driver(conf = CONFIG) 32 | Fluent::Test::Driver::Output.new(Fluent::Plugin::SQLOutput).configure(conf) 33 | end 34 | 35 | def test_configure 36 | d = create_driver 37 | expected = { 38 | host: "localhost", 39 | port: 5432, 40 | adapter: "postgresql", 41 | database: "fluentd_test", 42 | username: "fluentd", 43 | password: "fluentd", 44 | schema_search_path: 'public', 45 | remove_tag_suffix: /^db/, 46 | enable_fallback: true, 47 | pool: 5 48 | } 49 | actual = { 50 | host: d.instance.host, 51 | port: d.instance.port, 52 | adapter: d.instance.adapter, 53 | database: d.instance.database, 54 | username: d.instance.username, 55 | password: d.instance.password, 56 | schema_search_path: d.instance.schema_search_path, 57 | remove_tag_suffix: d.instance.remove_tag_prefix, 58 | enable_fallback: d.instance.enable_fallback, 59 | pool: d.instance.pool 60 | } 61 | assert_equal(expected, actual) 62 | assert_empty(d.instance.tables) 63 | default_table = d.instance.instance_variable_get(:@default_table) 64 | assert_equal("logs", default_table.table) 65 | end 66 | 67 | def test_emit 68 | d = create_driver 69 | time = Time.parse("2011-01-02 13:14:15 UTC").to_i 70 | 71 | d.run(default_tag: 'test') do 72 | d.feed(time, {"message" => "message1"}) 73 | d.feed(time, {"message" => "message2"}) 74 | end 75 | 76 | default_table = d.instance.instance_variable_get(:@default_table) 77 | model = default_table.instance_variable_get(:@model) 78 | assert_equal(2, model.all.count) 79 | messages = model.pluck(:message).sort 80 | assert_equal(["message1", "message2"], messages) 81 | end 82 | 83 | class Fallback < self 84 | def test_simple 85 | d = create_driver 86 | time = Time.parse("2011-01-02 13:14:15 UTC").to_i 87 | 88 | d.run(default_tag: 'test') do 89 | d.feed(time, {"message" => "message1"}) 90 | d.feed(time, {"message" => "message2"}) 91 | 92 | default_table = d.instance.instance_variable_get(:@default_table) 93 | model = default_table.instance_variable_get(:@model) 94 | mock(model).import(anything).at_least(1) do 95 | raise ActiveRecord::Import::MissingColumnError.new("dummy_table", "dummy_column") 96 | end 97 | mock(default_table).one_by_one_import(anything) 98 | end 99 | end 100 | 101 | def test_limit 102 | d = create_driver 103 | time = Time.parse("2011-01-02 13:14:15 UTC").to_i 104 | 105 | d.run(default_tag: 'test') do 106 | d.feed(time, {"message" => "message1"}) 107 | d.feed(time, {"message" => "message2"}) 108 | 109 | default_table = d.instance.instance_variable_get(:@default_table) 110 | model = default_table.instance_variable_get(:@model) 111 | mock(model).import([anything, anything]).once do 112 | raise ActiveRecord::Import::MissingColumnError.new("dummy_table", "dummy_column") 113 | end 114 | mock(model).import([anything]).times(12) do 115 | raise StandardError 116 | end 117 | assert_equal(5, default_table.instance_variable_get(:@num_retries)) 118 | end 119 | end 120 | end 121 | end 122 | --------------------------------------------------------------------------------