├── test ├── adapters │ ├── mysql2.rb │ ├── jdbcmysql.rb │ ├── postgis.rb │ ├── sqlite3.rb │ ├── jdbcsqlite3.rb │ ├── makara_postgis.rb │ ├── postgresql.rb │ ├── spatialite.rb │ ├── jdbcpostgresql.rb │ ├── mysql2_makara.rb │ ├── mysql2spatial.rb │ ├── postgresql_makara.rb │ └── seamless_database_pool.rb ├── models │ ├── alarm.rb │ ├── car.rb │ ├── question.rb │ ├── rule.rb │ ├── dictionary.rb │ ├── group.rb │ ├── account.rb │ ├── promotion.rb │ ├── discount.rb │ ├── user.rb │ ├── bike_maker.rb │ ├── chapter.rb │ ├── end_note.rb │ ├── tag.rb │ ├── user_token.rb │ ├── animal.rb │ ├── vendor.rb │ ├── book.rb │ ├── widget.rb │ └── topic.rb ├── schema │ ├── postgis_schema.rb │ ├── jdbcpostgresql_schema.rb │ ├── version.rb │ ├── sqlite3_schema.rb │ ├── mysql2_schema.rb │ ├── postgresql_schema.rb │ └── generic_schema.rb ├── sqlite3 │ └── import_test.rb ├── jdbcsqlite3 │ └── import_test.rb ├── postgresql │ └── import_test.rb ├── jdbcpostgresql │ └── import_test.rb ├── mysql2 │ └── import_test.rb ├── jdbcmysql │ └── import_test.rb ├── mysql2_makara │ └── import_test.rb ├── mysqlspatial2 │ └── import_test.rb ├── postgis │ └── import_test.rb ├── makara_postgis │ └── import_test.rb ├── database.yml.sample ├── support │ ├── generate.rb │ ├── shared_examples │ │ ├── on_duplicate_key_ignore.rb │ │ ├── recursive_import.rb │ │ └── on_duplicate_key_update.rb │ ├── factories.rb │ ├── active_support │ │ └── test_case_extensions.rb │ ├── assertions.rb │ ├── mysql │ │ └── import_examples.rb │ ├── sqlite3 │ │ └── import_examples.rb │ └── postgresql │ │ └── import_examples.rb ├── value_sets_records_parser_test.rb ├── travis │ └── database.yml ├── synchronize_test.rb ├── test_helper.rb └── value_sets_bytes_parser_test.rb ├── gemfiles ├── 6.0.gemfile ├── 6.1.gemfile ├── 3.2.gemfile ├── 4.0.gemfile ├── 4.1.gemfile ├── 4.2.gemfile ├── 5.0.gemfile ├── 5.1.gemfile └── 5.2.gemfile ├── Brewfile ├── benchmarks ├── models │ ├── test_innodb.rb │ ├── test_memory.rb │ └── test_myisam.rb ├── lib │ ├── float.rb │ ├── output_to_csv.rb │ ├── mysql2_benchmark.rb │ ├── output_to_html.rb │ ├── cli_parser.rb │ └── base.rb ├── schema │ └── mysql2_schema.rb ├── README └── benchmark.rb ├── lib ├── activerecord-import │ ├── version.rb │ ├── adapters │ │ ├── mysql2_adapter.rb │ │ ├── em_mysql2_adapter.rb │ │ ├── abstract_adapter.rb │ │ ├── mysql_adapter.rb │ │ ├── sqlite3_adapter.rb │ │ └── postgresql_adapter.rb │ ├── active_record │ │ └── adapters │ │ │ ├── jdbcmysql_adapter.rb │ │ │ ├── mysql2_adapter.rb │ │ │ ├── sqlite3_adapter.rb │ │ │ ├── jdbcsqlite3_adapter.rb │ │ │ ├── postgresql_adapter.rb │ │ │ ├── jdbcpostgresql_adapter.rb │ │ │ ├── abstract_adapter.rb │ │ │ └── seamless_database_pool_adapter.rb │ ├── mysql2.rb │ ├── sqlite3.rb │ ├── postgresql.rb │ ├── base.rb │ ├── value_sets_parser.rb │ └── synchronize.rb └── activerecord-import.rb ├── .gitignore ├── .rubocop.yml ├── activerecord-import.gemspec ├── LICENSE ├── .rubocop_todo.yml ├── Gemfile ├── Rakefile ├── .travis.yml ├── CHANGELOG.md └── README.markdown /test/adapters/mysql2.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "mysql2" 2 | -------------------------------------------------------------------------------- /gemfiles/6.0.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 6.0.0' 2 | -------------------------------------------------------------------------------- /test/adapters/jdbcmysql.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "jdbcmysql" 2 | -------------------------------------------------------------------------------- /test/adapters/postgis.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "postgis" 2 | -------------------------------------------------------------------------------- /test/adapters/sqlite3.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "sqlite3" 2 | -------------------------------------------------------------------------------- /Brewfile: -------------------------------------------------------------------------------- 1 | brew "mysql" 2 | brew "postgresql" 3 | brew "sqlite" -------------------------------------------------------------------------------- /test/adapters/jdbcsqlite3.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "jdbcsqlite3" 2 | -------------------------------------------------------------------------------- /test/adapters/makara_postgis.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "postgis" 2 | -------------------------------------------------------------------------------- /test/adapters/postgresql.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "postgresql" 2 | -------------------------------------------------------------------------------- /test/adapters/spatialite.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "spatialite" 2 | -------------------------------------------------------------------------------- /test/adapters/jdbcpostgresql.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "jdbcpostgresql" 2 | -------------------------------------------------------------------------------- /test/adapters/mysql2_makara.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "mysql2_makara" 2 | -------------------------------------------------------------------------------- /test/adapters/mysql2spatial.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "mysql2spatial" 2 | -------------------------------------------------------------------------------- /test/adapters/postgresql_makara.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "postgresql" 2 | -------------------------------------------------------------------------------- /test/models/alarm.rb: -------------------------------------------------------------------------------- 1 | class Alarm < ActiveRecord::Base 2 | end 3 | -------------------------------------------------------------------------------- /gemfiles/6.1.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 6.1.0.alpha', github: "rails/rails" 2 | -------------------------------------------------------------------------------- /test/adapters/seamless_database_pool.rb: -------------------------------------------------------------------------------- 1 | ENV["ARE_DB"] = "seamless_database_pool" 2 | -------------------------------------------------------------------------------- /test/models/car.rb: -------------------------------------------------------------------------------- 1 | class Car < ActiveRecord::Base 2 | self.primary_key = :Name 3 | end 4 | -------------------------------------------------------------------------------- /test/models/question.rb: -------------------------------------------------------------------------------- 1 | class Question < ActiveRecord::Base 2 | has_one :rule 3 | end 4 | -------------------------------------------------------------------------------- /test/models/rule.rb: -------------------------------------------------------------------------------- 1 | class Rule < ActiveRecord::Base 2 | belongs_to :question 3 | end 4 | -------------------------------------------------------------------------------- /gemfiles/3.2.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 3.2.0' 2 | gem 'composite_primary_keys', '~> 5.0' 3 | -------------------------------------------------------------------------------- /gemfiles/4.0.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 4.0.0' 2 | gem 'composite_primary_keys', '~> 6.0' 3 | -------------------------------------------------------------------------------- /gemfiles/4.1.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 4.1.0' 2 | gem 'composite_primary_keys', '~> 7.0' 3 | -------------------------------------------------------------------------------- /gemfiles/4.2.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 4.2.0' 2 | gem 'composite_primary_keys', '~> 8.0' 3 | -------------------------------------------------------------------------------- /gemfiles/5.0.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 5.0.0' 2 | gem 'composite_primary_keys', '~> 9.0' 3 | -------------------------------------------------------------------------------- /test/models/dictionary.rb: -------------------------------------------------------------------------------- 1 | require_relative 'book' 2 | 3 | class Dictionary < Book 4 | end 5 | -------------------------------------------------------------------------------- /test/models/group.rb: -------------------------------------------------------------------------------- 1 | class Group < ActiveRecord::Base 2 | self.table_name = 'group' 3 | end 4 | -------------------------------------------------------------------------------- /gemfiles/5.1.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 5.1.0' 2 | gem 'composite_primary_keys', '~> 10.0' 3 | -------------------------------------------------------------------------------- /gemfiles/5.2.gemfile: -------------------------------------------------------------------------------- 1 | gem 'activerecord', '~> 5.2.0' 2 | gem 'composite_primary_keys', '~> 11.0' 3 | -------------------------------------------------------------------------------- /test/models/account.rb: -------------------------------------------------------------------------------- 1 | class Account < ActiveRecord::Base 2 | self.locking_column = :lock 3 | end 4 | -------------------------------------------------------------------------------- /test/schema/postgis_schema.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/postgresql_schema') 2 | -------------------------------------------------------------------------------- /test/models/promotion.rb: -------------------------------------------------------------------------------- 1 | class Promotion < ActiveRecord::Base 2 | self.primary_key = :promotion_id 3 | end 4 | -------------------------------------------------------------------------------- /test/schema/jdbcpostgresql_schema.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/postgresql_schema') 2 | -------------------------------------------------------------------------------- /benchmarks/models/test_innodb.rb: -------------------------------------------------------------------------------- 1 | class TestInnoDb < ActiveRecord::Base 2 | self.table_name = 'test_innodb' 3 | end 4 | -------------------------------------------------------------------------------- /benchmarks/models/test_memory.rb: -------------------------------------------------------------------------------- 1 | class TestMemory < ActiveRecord::Base 2 | self.table_name = 'test_memory' 3 | end 4 | -------------------------------------------------------------------------------- /benchmarks/models/test_myisam.rb: -------------------------------------------------------------------------------- 1 | class TestMyISAM < ActiveRecord::Base 2 | self.table_name = 'test_myisam' 3 | end 4 | -------------------------------------------------------------------------------- /test/models/discount.rb: -------------------------------------------------------------------------------- 1 | class Discount < ActiveRecord::Base 2 | belongs_to :discountable, polymorphic: true 3 | end 4 | -------------------------------------------------------------------------------- /lib/activerecord-import/version.rb: -------------------------------------------------------------------------------- 1 | module ActiveRecord 2 | module Import 3 | VERSION = "1.0.6".freeze 4 | end 5 | end 6 | -------------------------------------------------------------------------------- /test/models/user.rb: -------------------------------------------------------------------------------- 1 | class User < ActiveRecord::Base 2 | has_many :user_tokens, primary_key: :name, foreign_key: :user_name 3 | end 4 | -------------------------------------------------------------------------------- /test/models/bike_maker.rb: -------------------------------------------------------------------------------- 1 | module Bike 2 | def self.table_name_prefix 3 | 'bike_' 4 | end 5 | class Maker < ActiveRecord::Base 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /test/models/chapter.rb: -------------------------------------------------------------------------------- 1 | class Chapter < ActiveRecord::Base 2 | belongs_to :book, inverse_of: :chapters 3 | validates :title, presence: true 4 | end 5 | -------------------------------------------------------------------------------- /test/models/end_note.rb: -------------------------------------------------------------------------------- 1 | class EndNote < ActiveRecord::Base 2 | belongs_to :book, inverse_of: :end_notes 3 | validates :note, presence: true 4 | end 5 | -------------------------------------------------------------------------------- /test/models/tag.rb: -------------------------------------------------------------------------------- 1 | class Tag < ActiveRecord::Base 2 | self.primary_keys = :tag_id, :publisher_id unless ENV["SKIP_COMPOSITE_PK"] 3 | has_many :books, inverse_of: :tag 4 | end 5 | -------------------------------------------------------------------------------- /test/models/user_token.rb: -------------------------------------------------------------------------------- 1 | class UserToken < ActiveRecord::Base 2 | belongs_to :user, primary_key: :name, foreign_key: :user_name 3 | validates :user, presence: true 4 | end 5 | -------------------------------------------------------------------------------- /lib/activerecord-import.rb: -------------------------------------------------------------------------------- 1 | # rubocop:disable Style/FileName 2 | require "active_support/lazy_load_hooks" 3 | 4 | ActiveSupport.on_load(:active_record) do 5 | require "activerecord-import/base" 6 | end 7 | -------------------------------------------------------------------------------- /lib/activerecord-import/adapters/mysql2_adapter.rb: -------------------------------------------------------------------------------- 1 | require "activerecord-import/adapters/mysql_adapter" 2 | 3 | module ActiveRecord::Import::Mysql2Adapter 4 | include ActiveRecord::Import::MysqlAdapter 5 | end 6 | -------------------------------------------------------------------------------- /test/models/animal.rb: -------------------------------------------------------------------------------- 1 | class Animal < ActiveRecord::Base 2 | after_initialize :validate_name_presence, if: :new_record? 3 | def validate_name_presence 4 | raise ArgumentError if name.nil? 5 | end 6 | end 7 | -------------------------------------------------------------------------------- /lib/activerecord-import/adapters/em_mysql2_adapter.rb: -------------------------------------------------------------------------------- 1 | require "activerecord-import/adapters/mysql_adapter" 2 | 3 | module ActiveRecord::Import::EMMysql2Adapter 4 | include ActiveRecord::Import::MysqlAdapter 5 | end 6 | -------------------------------------------------------------------------------- /test/sqlite3/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/sqlite3/import_examples') 3 | 4 | should_support_sqlite3_import_functionality 5 | -------------------------------------------------------------------------------- /test/jdbcsqlite3/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/sqlite3/import_examples') 3 | 4 | should_support_sqlite3_import_functionality 5 | -------------------------------------------------------------------------------- /test/models/vendor.rb: -------------------------------------------------------------------------------- 1 | class Vendor < ActiveRecord::Base 2 | store :preferences, accessors: [:color], coder: JSON 3 | 4 | store_accessor :data, :size 5 | store_accessor :config, :contact 6 | store_accessor :settings, :charge_code 7 | end 8 | -------------------------------------------------------------------------------- /test/postgresql/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') 3 | 4 | should_support_postgresql_import_functionality 5 | -------------------------------------------------------------------------------- /test/jdbcpostgresql/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') 3 | 4 | should_support_postgresql_import_functionality 5 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/jdbcmysql_adapter.rb: -------------------------------------------------------------------------------- 1 | require "active_record/connection_adapters/mysql_adapter" 2 | require "activerecord-import/adapters/mysql_adapter" 3 | 4 | class ActiveRecord::ConnectionAdapters::MysqlAdapter 5 | include ActiveRecord::Import::MysqlAdapter 6 | end 7 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/mysql2_adapter.rb: -------------------------------------------------------------------------------- 1 | require "active_record/connection_adapters/mysql2_adapter" 2 | require "activerecord-import/adapters/mysql2_adapter" 3 | 4 | class ActiveRecord::ConnectionAdapters::Mysql2Adapter 5 | include ActiveRecord::Import::Mysql2Adapter 6 | end 7 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/sqlite3_adapter.rb: -------------------------------------------------------------------------------- 1 | require "active_record/connection_adapters/sqlite3_adapter" 2 | require "activerecord-import/adapters/sqlite3_adapter" 3 | 4 | class ActiveRecord::ConnectionAdapters::SQLite3Adapter 5 | include ActiveRecord::Import::SQLite3Adapter 6 | end 7 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/jdbcsqlite3_adapter.rb: -------------------------------------------------------------------------------- 1 | require "active_record/connection_adapters/sqlite3_adapter" 2 | require "activerecord-import/adapters/sqlite3_adapter" 3 | 4 | class ActiveRecord::ConnectionAdapters::SQLite3Adapter 5 | include ActiveRecord::Import::SQLite3Adapter 6 | end 7 | -------------------------------------------------------------------------------- /test/mysql2/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') 3 | require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') 4 | 5 | should_support_mysql_import_functionality 6 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/postgresql_adapter.rb: -------------------------------------------------------------------------------- 1 | require "active_record/connection_adapters/postgresql_adapter" 2 | require "activerecord-import/adapters/postgresql_adapter" 3 | 4 | class ActiveRecord::ConnectionAdapters::PostgreSQLAdapter 5 | include ActiveRecord::Import::PostgreSQLAdapter 6 | end 7 | -------------------------------------------------------------------------------- /test/jdbcmysql/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') 3 | require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') 4 | 5 | should_support_mysql_import_functionality 6 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/jdbcpostgresql_adapter.rb: -------------------------------------------------------------------------------- 1 | require "active_record/connection_adapters/postgresql_adapter" 2 | require "activerecord-import/adapters/postgresql_adapter" 3 | 4 | class ActiveRecord::ConnectionAdapters::PostgreSQLAdapter 5 | include ActiveRecord::Import::PostgreSQLAdapter 6 | end 7 | -------------------------------------------------------------------------------- /test/mysql2_makara/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | 3 | require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') 4 | require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') 5 | 6 | should_support_mysql_import_functionality 7 | -------------------------------------------------------------------------------- /test/mysqlspatial2/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | 3 | require File.expand_path(File.dirname(__FILE__) + '/../support/assertions') 4 | require File.expand_path(File.dirname(__FILE__) + '/../support/mysql/import_examples') 5 | 6 | should_support_mysql_import_functionality 7 | -------------------------------------------------------------------------------- /test/schema/version.rb: -------------------------------------------------------------------------------- 1 | class SchemaInfo < ActiveRecord::Base 2 | if respond_to?(:table_name=) 3 | self.table_name = 'schema_info' 4 | else 5 | # this is becoming deprecated in ActiveRecord but not all adapters supported it 6 | # at this time 7 | set_table_name 'schema_info' 8 | end 9 | VERSION = 12 10 | end 11 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/abstract_adapter.rb: -------------------------------------------------------------------------------- 1 | require "activerecord-import/adapters/abstract_adapter" 2 | 3 | module ActiveRecord # :nodoc: 4 | module ConnectionAdapters # :nodoc: 5 | class AbstractAdapter # :nodoc: 6 | include ActiveRecord::Import::AbstractAdapter::InstanceMethods 7 | end 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/activerecord-import/mysql2.rb: -------------------------------------------------------------------------------- 1 | warn <<-MSG 2 | [DEPRECATION] loading activerecord-import via 'require "activerecord-import/"' 3 | is deprecated. Update to autorequire using 'require "activerecord-import"'. See 4 | http://github.com/zdennis/activerecord-import/wiki/Requiring for more information 5 | MSG 6 | 7 | require "activerecord-import" 8 | -------------------------------------------------------------------------------- /lib/activerecord-import/sqlite3.rb: -------------------------------------------------------------------------------- 1 | warn <<-MSG 2 | [DEPRECATION] loading activerecord-import via 'require "activerecord-import/"' 3 | is deprecated. Update to autorequire using 'require "activerecord-import"'. See 4 | http://github.com/zdennis/activerecord-import/wiki/Requiring for more information 5 | MSG 6 | 7 | require "activerecord-import" 8 | -------------------------------------------------------------------------------- /lib/activerecord-import/postgresql.rb: -------------------------------------------------------------------------------- 1 | warn <<-MSG 2 | [DEPRECATION] loading activerecord-import via 'require "activerecord-import/"' 3 | is deprecated. Update to autorequire using 'require "activerecord-import"'. See 4 | http://github.com/zdennis/activerecord-import/wiki/Requiring for more information 5 | MSG 6 | 7 | require "activerecord-import" 8 | -------------------------------------------------------------------------------- /benchmarks/lib/float.rb: -------------------------------------------------------------------------------- 1 | # Taken from http://www.programmingishard.com/posts/show/128 2 | # Posted by rbates 3 | class Float 4 | def round_to(x) 5 | (self * 10**x).round.to_f / 10**x 6 | end 7 | 8 | def ceil_to(x) 9 | (self * 10**x).ceil.to_f / 10**x 10 | end 11 | 12 | def floor_to(x) 13 | (self * 10**x).floor.to_f / 10**x 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /lib/activerecord-import/active_record/adapters/seamless_database_pool_adapter.rb: -------------------------------------------------------------------------------- 1 | require "seamless_database_pool" 2 | require "active_record/connection_adapters/seamless_database_pool_adapter" 3 | require "activerecord-import/adapters/mysql_adapter" 4 | 5 | class ActiveRecord::ConnectionAdapters::SeamlessDatabasePoolAdapter 6 | include ActiveRecord::Import::MysqlAdapter 7 | end 8 | -------------------------------------------------------------------------------- /test/models/book.rb: -------------------------------------------------------------------------------- 1 | class Book < ActiveRecord::Base 2 | belongs_to :topic, inverse_of: :books 3 | belongs_to :tag, foreign_key: [:tag_id, :parent_id] 4 | 5 | has_many :chapters, inverse_of: :book 6 | has_many :discounts, as: :discountable 7 | has_many :end_notes, inverse_of: :book 8 | enum status: [:draft, :published] if ENV['AR_VERSION'].to_f >= 4.1 9 | end 10 | -------------------------------------------------------------------------------- /test/postgis/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') 3 | 4 | should_support_postgresql_import_functionality 5 | 6 | if ActiveRecord::Base.connection.supports_on_duplicate_key_update? 7 | should_support_postgresql_upsert_functionality 8 | end 9 | -------------------------------------------------------------------------------- /test/makara_postgis/import_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper') 2 | require File.expand_path(File.dirname(__FILE__) + '/../support/postgresql/import_examples') 3 | 4 | should_support_postgresql_import_functionality 5 | 6 | if ActiveRecord::Base.connection.supports_on_duplicate_key_update? 7 | should_support_postgresql_upsert_functionality 8 | end 9 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## MAC OS 2 | .DS_Store 3 | 4 | ## TEXTMATE 5 | *.tmproj 6 | tmtags 7 | 8 | ## EMACS 9 | *~ 10 | \#* 11 | .\#* 12 | 13 | ## VIM 14 | *.swp 15 | 16 | ## PROJECT::GENERAL 17 | coverage 18 | rdoc 19 | pkg 20 | *.gem 21 | *.lock 22 | 23 | ## PROJECT::SPECIFIC 24 | log/*.log 25 | test.db 26 | test/database.yml 27 | benchmarks/log/ 28 | 29 | .ruby-* 30 | .bundle/ 31 | .redcar/ 32 | .rvmrc 33 | docsite/ 34 | -------------------------------------------------------------------------------- /test/schema/sqlite3_schema.rb: -------------------------------------------------------------------------------- 1 | ActiveRecord::Schema.define do 2 | create_table :alarms, force: true do |t| 3 | t.column :device_id, :integer, null: false 4 | t.column :alarm_type, :integer, null: false 5 | t.column :status, :integer, null: false 6 | t.column :metadata, :text 7 | t.column :secret_key, :binary 8 | t.datetime :created_at 9 | t.datetime :updated_at 10 | end 11 | 12 | add_index :alarms, [:device_id, :alarm_type], unique: true, where: 'status <> 0' 13 | end 14 | -------------------------------------------------------------------------------- /test/models/widget.rb: -------------------------------------------------------------------------------- 1 | class CustomCoder 2 | def load(value) 3 | if value.nil? 4 | {} 5 | else 6 | YAML.load(value) 7 | end 8 | end 9 | 10 | def dump(value) 11 | YAML.dump(value) 12 | end 13 | end 14 | 15 | class Widget < ActiveRecord::Base 16 | self.primary_key = :w_id 17 | 18 | default_scope -> { where(active: true) } 19 | 20 | serialize :data, Hash 21 | serialize :json_data, JSON 22 | serialize :unspecified_data 23 | serialize :custom_data, CustomCoder.new 24 | end 25 | -------------------------------------------------------------------------------- /benchmarks/lib/output_to_csv.rb: -------------------------------------------------------------------------------- 1 | require 'csv' 2 | 3 | module OutputToCSV 4 | def self.output_results( filename, results ) 5 | CSV.open( filename, 'w' ) do |csv| 6 | # Iterate over each result set, which contains many results 7 | results.each do |result_set| 8 | columns = [] 9 | times = [] 10 | result_set.each do |result| 11 | columns << result.description 12 | times << result.tms.real 13 | end 14 | csv << columns 15 | csv << times 16 | end 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /benchmarks/schema/mysql2_schema.rb: -------------------------------------------------------------------------------- 1 | ActiveRecord::Schema.define do 2 | create_table :test_myisam, options: 'ENGINE=MyISAM', force: true do |t| 3 | t.column :my_name, :string, null: false 4 | t.column :description, :string 5 | end 6 | 7 | create_table :test_innodb, options: 'ENGINE=InnoDb', force: true do |t| 8 | t.column :my_name, :string, null: false 9 | t.column :description, :string 10 | end 11 | 12 | create_table :test_memory, options: 'ENGINE=Memory', force: true do |t| 13 | t.column :my_name, :string, null: false 14 | t.column :description, :string 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /benchmarks/lib/mysql2_benchmark.rb: -------------------------------------------------------------------------------- 1 | class Mysql2Benchmark < BenchmarkBase 2 | def benchmark_all( array_of_cols_and_vals ) 3 | methods = self.methods.find_all { |m| m =~ /benchmark_/ } 4 | methods.delete_if { |m| m =~ /benchmark_(all|model)/ } 5 | methods.each { |method| send( method, array_of_cols_and_vals ) } 6 | end 7 | 8 | def benchmark_myisam( array_of_cols_and_vals ) 9 | bm_model( TestMyISAM, array_of_cols_and_vals ) 10 | end 11 | 12 | def benchmark_innodb( array_of_cols_and_vals ) 13 | bm_model( TestInnoDb, array_of_cols_and_vals ) 14 | end 15 | 16 | def benchmark_memory( array_of_cols_and_vals ) 17 | bm_model( TestMemory, array_of_cols_and_vals ) 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /test/schema/mysql2_schema.rb: -------------------------------------------------------------------------------- 1 | ActiveRecord::Schema.define do 2 | create_table :books, force: :cascade do |t| 3 | t.string :title, null: false 4 | t.virtual :upper_title, type: :string, as: "upper(`title`)" if t.respond_to?(:virtual) 5 | t.string :publisher, null: false, default: 'Default Publisher' 6 | t.string :author_name, null: false 7 | t.datetime :created_at 8 | t.datetime :created_on 9 | t.datetime :updated_at 10 | t.datetime :updated_on 11 | t.date :publish_date 12 | t.integer :topic_id 13 | t.integer :tag_id 14 | t.integer :publisher_id 15 | t.boolean :for_sale, default: true 16 | t.integer :status, default: 0 17 | t.string :type 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /test/models/topic.rb: -------------------------------------------------------------------------------- 1 | class Topic < ActiveRecord::Base 2 | validates_presence_of :author_name 3 | validates :title, numericality: { only_integer: true }, on: :context_test 4 | validates :title, uniqueness: true 5 | validates :content, uniqueness: true 6 | validates :word_count, numericality: { greater_than: 0 }, if: :content? 7 | 8 | validate -> { errors.add(:title, :validate_failed) if title == 'validate_failed' } 9 | before_validation -> { errors.add(:title, :invalid) if title == 'invalid' } 10 | 11 | has_many :books, inverse_of: :topic 12 | belongs_to :parent, class_name: "Topic" 13 | 14 | composed_of :description, mapping: [%w(title title), %w(author_name author_name)], allow_nil: true, class_name: "TopicDescription" 15 | 16 | default_scope { where(approved: true) } 17 | 18 | private 19 | 20 | def word_count 21 | @word_count ||= content.to_s.scan(/\w+/).count 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | inherit_from: .rubocop_todo.yml 2 | 3 | Lint/EndAlignment: 4 | AlignWith: variable 5 | 6 | Metrics/AbcSize: 7 | Enabled: false 8 | 9 | Metrics/ClassLength: 10 | Enabled: false 11 | 12 | Metrics/CyclomaticComplexity: 13 | Enabled: false 14 | 15 | Metrics/LineLength: 16 | Enabled: false 17 | 18 | Metrics/MethodLength: 19 | Enabled: false 20 | 21 | Metrics/ModuleLength: 22 | Enabled: false 23 | 24 | Metrics/PerceivedComplexity: 25 | Enabled: false 26 | 27 | Style/AlignParameters: 28 | EnforcedStyle: with_fixed_indentation 29 | 30 | Style/ClassAndModuleChildren: 31 | Enabled: false 32 | 33 | Style/Documentation: 34 | Enabled: false 35 | 36 | Style/ElseAlignment: 37 | Enabled: false 38 | 39 | Style/SpaceInsideParens: 40 | Enabled: false 41 | 42 | Style/SpecialGlobalVars: 43 | Enabled: false 44 | 45 | Style/StringLiterals: 46 | Enabled: false 47 | 48 | Style/TrailingCommaInLiteral: 49 | Enabled: false 50 | -------------------------------------------------------------------------------- /test/database.yml.sample: -------------------------------------------------------------------------------- 1 | common: &common 2 | username: root 3 | password: 4 | encoding: utf8 5 | host: localhost 6 | database: activerecord_import_test 7 | 8 | mysql2: &mysql2 9 | <<: *common 10 | adapter: mysql2 11 | 12 | mysql2spatial: 13 | <<: *mysql2 14 | 15 | mysql2_makara: 16 | <<: *mysql2 17 | 18 | postgresql: &postgresql 19 | <<: *common 20 | username: postgres 21 | adapter: postgresql 22 | min_messages: warning 23 | 24 | postresql_makara: 25 | <<: *postgresql 26 | 27 | postgis: 28 | <<: *postgresql 29 | 30 | oracle: 31 | <<: *common 32 | adapter: oracle 33 | min_messages: debug 34 | 35 | seamless_database_pool: 36 | <<: *common 37 | adapter: seamless_database_pool 38 | prepared_statements: false 39 | pool_adapter: mysql2 40 | master: 41 | host: localhost 42 | 43 | sqlite: 44 | adapter: sqlite 45 | dbfile: test.db 46 | 47 | sqlite3: &sqlite3 48 | adapter: sqlite3 49 | database: test.db 50 | 51 | spatialite: 52 | <<: *sqlite3 53 | -------------------------------------------------------------------------------- /test/support/generate.rb: -------------------------------------------------------------------------------- 1 | class ActiveSupport::TestCase 2 | def Build(*args) # rubocop:disable Style/MethodName 3 | n = args.shift if args.first.is_a?(Numeric) 4 | factory = args.shift 5 | factory_bot_args = args.shift || {} 6 | 7 | if n 8 | [].tap do |collection| 9 | n.times.each { collection << FactoryBot.build(factory.to_s.singularize.to_sym, factory_bot_args) } 10 | end 11 | else 12 | FactoryBot.build(factory.to_s.singularize.to_sym, factory_bot_args) 13 | end 14 | end 15 | 16 | def Generate(*args) # rubocop:disable Style/MethodName 17 | n = args.shift if args.first.is_a?(Numeric) 18 | factory = args.shift 19 | factory_bot_args = args.shift || {} 20 | 21 | if n 22 | [].tap do |collection| 23 | n.times.each { collection << FactoryBot.create(factory.to_s.singularize.to_sym, factory_bot_args) } 24 | end 25 | else 26 | FactoryBot.create(factory.to_s.singularize.to_sym, factory_bot_args) 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /activerecord-import.gemspec: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | require File.expand_path('../lib/activerecord-import/version', __FILE__) 3 | 4 | Gem::Specification.new do |gem| 5 | gem.authors = ["Zach Dennis"] 6 | gem.email = ["zach.dennis@gmail.com"] 7 | gem.summary = "Bulk insert extension for ActiveRecord" 8 | gem.description = "A library for bulk inserting data using ActiveRecord." 9 | gem.homepage = "http://github.com/zdennis/activerecord-import" 10 | gem.license = "MIT" 11 | 12 | gem.files = `git ls-files`.split($\) 13 | gem.executables = gem.files.grep(%r{^bin/}).map { |f| File.basename(f) } 14 | gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) 15 | gem.name = "activerecord-import" 16 | gem.require_paths = ["lib"] 17 | gem.version = ActiveRecord::Import::VERSION 18 | 19 | gem.required_ruby_version = ">= 2.0.0" 20 | 21 | gem.add_runtime_dependency "activerecord", ">= 3.2" 22 | gem.add_development_dependency "rake" 23 | end 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2020 Zach Dennis 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /test/value_sets_records_parser_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/test_helper') 2 | 3 | require 'activerecord-import/value_sets_parser' 4 | 5 | describe "ActiveRecord::Import::ValueSetsRecordsParser" do 6 | context "#parse - computing insert value sets" do 7 | let(:parser) { ActiveRecord::Import::ValueSetsRecordsParser } 8 | let(:base_sql) { "INSERT INTO atable (a,b,c)" } 9 | let(:values) { ["(1,2,3)", "(2,3,4)", "(3,4,5)"] } 10 | 11 | context "when the max number of records is 1" do 12 | it "should return 3 value sets when given 3 values sets" do 13 | value_sets = parser.parse values, max_records: 1 14 | assert_equal 3, value_sets.size 15 | end 16 | end 17 | 18 | context "when the max number of records is 2" do 19 | it "should return 2 value sets when given 3 values sets" do 20 | value_sets = parser.parse values, max_records: 2 21 | assert_equal 2, value_sets.size 22 | end 23 | end 24 | 25 | context "when the max number of records is 3" do 26 | it "should return 1 value sets when given 3 values sets" do 27 | value_sets = parser.parse values, max_records: 3 28 | assert_equal 1, value_sets.size 29 | end 30 | end 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /benchmarks/README: -------------------------------------------------------------------------------- 1 | To run the benchmarks, from within the benchmarks run: 2 | ruby benchmark.rb [options] 3 | 4 | The following options are supported: 5 | --adapter [String] The database adapter to use. IE: mysql, postgresql, oracle 6 | 7 | --do-not-delete By default all records in the benchmark tables will be deleted at the end of the benchmark. This flag indicates not to delete the benchmark data. 8 | --num [Integer] The number of objects to benchmark. (Required!) 9 | --table-type [String] The table type to test. This can be used multiple times. By default it is all table types. 10 | --to-csv [String] Print results in a CSV file format 11 | --to-html [String] Print results in HTML format (String filename must be supplied) 12 | 13 | See "ruby benchmark.rb -h" for the complete listing of options. 14 | 15 | EXAMPLES 16 | -------- 17 | To output to html format: 18 | ruby benchmark.rb --adapter=mysql2 --to-html=results.html 19 | 20 | To output to csv format: 21 | ruby benchmark.rb --adapter=mysql2 --to-csv=results.csv 22 | 23 | LIMITATIONS 24 | ----------- 25 | Currently MySQL is the only supported adapter to benchmark. 26 | 27 | AUTHOR 28 | ------ 29 | Zach Dennis 30 | zach.dennis@gmail.com 31 | http://www.continuousthinking.com 32 | 33 | -------------------------------------------------------------------------------- /test/travis/database.yml: -------------------------------------------------------------------------------- 1 | common: &common 2 | username: root 3 | password: 4 | encoding: utf8 5 | host: localhost 6 | database: activerecord_import_test 7 | 8 | jdbcpostgresql: &postgresql 9 | <<: *common 10 | username: postgres 11 | adapter: jdbcpostgresql 12 | min_messages: warning 13 | 14 | jdbcmysql: &mysql2 15 | <<: *common 16 | adapter: jdbcmysql 17 | 18 | jdbcsqlite3: &sqlite3 19 | <<: *common 20 | adapter: jdbcsqlite3 21 | 22 | mysql2: &mysql2 23 | <<: *common 24 | adapter: mysql2 25 | 26 | mysql2spatial: 27 | <<: *mysql2 28 | 29 | mysql2_makara: 30 | <<: *mysql2 31 | 32 | oracle: 33 | <<: *common 34 | adapter: oracle 35 | min_messages: debug 36 | 37 | postgresql: &postgresql 38 | <<: *common 39 | username: postgres 40 | adapter: postgresql 41 | min_messages: warning 42 | 43 | postresql_makara: 44 | <<: *postgresql 45 | 46 | postgis: 47 | <<: *postgresql 48 | 49 | seamless_database_pool: 50 | <<: *common 51 | adapter: seamless_database_pool 52 | pool_adapter: mysql2 53 | prepared_statements: false 54 | master: 55 | host: localhost 56 | 57 | sqlite: 58 | adapter: sqlite 59 | dbfile: test.db 60 | 61 | sqlite3: &sqlite3 62 | adapter: sqlite3 63 | database: ":memory:" 64 | 65 | spatialite: 66 | <<: *sqlite3 67 | -------------------------------------------------------------------------------- /.rubocop_todo.yml: -------------------------------------------------------------------------------- 1 | # This configuration was generated by 2 | # `rubocop --auto-gen-config` 3 | # on 2016-03-17 18:14:55 -0700 using RuboCop version 0.38.0. 4 | # The point is for the user to remove these configuration records 5 | # one by one as the offenses are removed from the code base. 6 | # Note that changes in the inspected code, or installation of new 7 | # versions of RuboCop, may require this file to be generated again. 8 | 9 | # Offense count: 2 10 | Lint/HandleExceptions: 11 | Exclude: 12 | - 'lib/activerecord-import/base.rb' 13 | - 'test/import_test.rb' 14 | 15 | # Offense count: 2 16 | Lint/RescueException: 17 | Exclude: 18 | - 'benchmarks/lib/cli_parser.rb' 19 | - 'test/import_test.rb' 20 | 21 | # Offense count: 4 22 | # Cop supports --auto-correct. 23 | # Configuration parameters: AllowUnusedKeywordArguments, IgnoreEmptyMethods. 24 | Lint/UnusedMethodArgument: 25 | Exclude: 26 | - 'lib/activerecord-import/adapters/postgresql_adapter.rb' 27 | - 'lib/activerecord-import/import.rb' 28 | 29 | # Offense count: 2 30 | # Cop supports --auto-correct. 31 | # Configuration parameters: Keywords. 32 | # Keywords: TODO, FIXME, OPTIMIZE, HACK, REVIEW 33 | Style/CommentAnnotation: 34 | Exclude: 35 | - 'benchmarks/lib/cli_parser.rb' 36 | - 'lib/activerecord-import/import.rb' 37 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gemspec 4 | 5 | version = ENV['AR_VERSION'].to_f 6 | 7 | mysql2_version = '0.3.0' 8 | mysql2_version = '0.4.0' if version >= 4.2 9 | sqlite3_version = '1.3.0' 10 | sqlite3_version = '1.4.0' if version >= 6.0 11 | 12 | group :development, :test do 13 | gem 'rubocop', '~> 0.40.0' 14 | gem 'rake' 15 | end 16 | 17 | # Database Adapters 18 | platforms :ruby do 19 | gem "mysql2", "~> #{mysql2_version}" 20 | gem "pg", "~> 0.9" 21 | gem "sqlite3", "~> #{sqlite3_version}" 22 | gem "seamless_database_pool", "~> 1.0.20" 23 | end 24 | 25 | platforms :jruby do 26 | gem "jdbc-mysql" 27 | gem "jdbc-postgres" 28 | gem "activerecord-jdbcsqlite3-adapter", "~> 1.3" 29 | gem "activerecord-jdbcmysql-adapter", "~> 1.3" 30 | gem "activerecord-jdbcpostgresql-adapter", "~> 1.3" 31 | end 32 | 33 | # Support libs 34 | gem "factory_bot" 35 | gem "timecop" 36 | gem "chronic" 37 | gem "mocha", "~> 1.3.0" 38 | 39 | # Debugging 40 | platforms :jruby do 41 | gem "ruby-debug", "= 0.10.4" 42 | end 43 | 44 | platforms :mri_19 do 45 | gem "debugger" 46 | end 47 | 48 | platforms :ruby do 49 | gem "pry-byebug" 50 | gem "pry", "~> 0.12.0" 51 | gem "rb-readline" 52 | end 53 | 54 | if version >= 4.0 55 | gem "minitest" 56 | else 57 | gem "test-unit" 58 | end 59 | 60 | eval_gemfile File.expand_path("../gemfiles/#{version}.gemfile", __FILE__) 61 | -------------------------------------------------------------------------------- /lib/activerecord-import/base.rb: -------------------------------------------------------------------------------- 1 | require "pathname" 2 | require "active_record" 3 | require "active_record/version" 4 | 5 | module ActiveRecord::Import 6 | ADAPTER_PATH = "activerecord-import/active_record/adapters".freeze 7 | 8 | def self.base_adapter(adapter) 9 | case adapter 10 | when 'mysql2_makara' then 'mysql2' 11 | when 'mysql2spatial' then 'mysql2' 12 | when 'spatialite' then 'sqlite3' 13 | when 'postgresql_makara' then 'postgresql' 14 | when 'makara_postgis' then 'postgresql' 15 | when 'postgis' then 'postgresql' 16 | when 'cockroachdb' then 'postgresql' 17 | else adapter 18 | end 19 | end 20 | 21 | # Loads the import functionality for a specific database adapter 22 | def self.require_adapter(adapter) 23 | require File.join(ADAPTER_PATH, "/#{base_adapter(adapter)}_adapter") 24 | rescue LoadError 25 | # fallback 26 | end 27 | 28 | # Loads the import functionality for the passed in ActiveRecord connection 29 | def self.load_from_connection_pool(connection_pool) 30 | adapter = 31 | if connection_pool.respond_to?(:db_config) # ActiveRecord >= 6.1 32 | connection_pool.db_config.adapter 33 | else 34 | connection_pool.spec.config[:adapter] 35 | end 36 | require_adapter adapter 37 | end 38 | end 39 | 40 | require 'activerecord-import/import' 41 | require 'activerecord-import/active_record/adapters/abstract_adapter' 42 | require 'activerecord-import/synchronize' 43 | require 'activerecord-import/value_sets_parser' 44 | -------------------------------------------------------------------------------- /test/support/shared_examples/on_duplicate_key_ignore.rb: -------------------------------------------------------------------------------- 1 | def should_support_on_duplicate_key_ignore 2 | describe "#import" do 3 | extend ActiveSupport::TestCase::ImportAssertions 4 | let(:topic) { Topic.create!(title: "Book", author_name: "John Doe") } 5 | let(:topics) { [topic] } 6 | 7 | context "with :on_duplicate_key_ignore" do 8 | it "should skip duplicates and continue import" do 9 | topics << Topic.new(title: "Book 2", author_name: "Jane Doe") 10 | assert_difference "Topic.count", +1 do 11 | result = Topic.import topics, on_duplicate_key_ignore: true, validate: false 12 | assert_not_equal topics.first.id, result.ids.first 13 | assert_nil topics.last.id 14 | end 15 | end 16 | 17 | unless ENV["SKIP_COMPOSITE_PK"] 18 | context "with composite primary keys" do 19 | it "should import array of values successfully" do 20 | columns = [:tag_id, :publisher_id, :tag] 21 | values = [[1, 1, 'Mystery'], [1, 1, 'Science']] 22 | 23 | assert_difference "Tag.count", +1 do 24 | Tag.import columns, values, on_duplicate_key_ignore: true, validate: false 25 | end 26 | assert_equal 'Mystery', Tag.first.tag 27 | end 28 | end 29 | end 30 | end 31 | 32 | context "with :ignore" do 33 | it "should skip duplicates and continue import" do 34 | topics << Topic.new(title: "Book 2", author_name: "Jane Doe") 35 | assert_difference "Topic.count", +1 do 36 | result = Topic.import topics, ignore: true, validate: false 37 | assert_not_equal topics.first.id, result.ids.first 38 | assert_nil topics.last.id 39 | end 40 | end 41 | end 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /test/support/factories.rb: -------------------------------------------------------------------------------- 1 | FactoryBot.define do 2 | sequence(:book_title) { |n| "Book #{n}" } 3 | sequence(:chapter_title) { |n| "Chapter #{n}" } 4 | sequence(:end_note) { |n| "Endnote #{n}" } 5 | 6 | factory :group do 7 | sequence(:order) { |n| "Order #{n}" } 8 | end 9 | 10 | factory :invalid_topic, class: "Topic" do 11 | sequence(:title) { |n| "Title #{n}" } 12 | author_name { nil } 13 | end 14 | 15 | factory :topic do 16 | sequence(:title) { |n| "Title #{n}" } 17 | sequence(:author_name) { |n| "Author #{n}" } 18 | sequence(:content) { |n| "Content #{n}" } 19 | end 20 | 21 | factory :widget do 22 | sequence(:w_id) { |n| n } 23 | end 24 | 25 | factory :question do 26 | sequence(:body) { |n| "Text #{n}" } 27 | 28 | trait :with_rule do 29 | after(:build) do |question| 30 | question.build_rule(FactoryBot.attributes_for(:rule)) 31 | end 32 | end 33 | end 34 | 35 | factory :rule do 36 | sequence(:id) { |n| n } 37 | sequence(:condition_text) { |n| "q_#{n}_#{n}" } 38 | end 39 | 40 | factory :topic_with_book, parent: :topic do 41 | after(:build) do |topic| 42 | 2.times do 43 | book = topic.books.build(title: FactoryBot.generate(:book_title), author_name: 'Stephen King') 44 | 3.times do 45 | book.chapters.build(title: FactoryBot.generate(:chapter_title)) 46 | end 47 | 48 | 4.times do 49 | book.end_notes.build(note: FactoryBot.generate(:end_note)) 50 | end 51 | end 52 | end 53 | end 54 | 55 | factory :book do 56 | title { 'Tortilla Flat' } 57 | author_name { 'John Steinbeck' } 58 | end 59 | 60 | factory :car do 61 | sequence(:Name) { |n| n } 62 | sequence(:Features) { |n| "Feature #{n}" } 63 | end 64 | end 65 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | require "bundler" 2 | Bundler.setup 3 | 4 | require 'rake' 5 | require 'rake/testtask' 6 | 7 | namespace :display do 8 | task :notice do 9 | puts 10 | puts "To run tests you must supply the adapter, see rake -T for more information." 11 | puts 12 | end 13 | end 14 | task default: ["display:notice"] 15 | 16 | ADAPTERS = %w( 17 | mysql2 18 | mysql2_makara 19 | mysql2spatial 20 | jdbcmysql 21 | jdbcsqlite3 22 | jdbcpostgresql 23 | postgresql 24 | postgresql_makara 25 | postgis 26 | makara_postgis 27 | sqlite3 28 | spatialite 29 | seamless_database_pool 30 | ).freeze 31 | ADAPTERS.each do |adapter| 32 | namespace :test do 33 | desc "Runs #{adapter} database tests." 34 | Rake::TestTask.new(adapter) do |t| 35 | # FactoryBot has an issue with warnings, so turn off, so noisy 36 | # t.warning = true 37 | t.test_files = FileList["test/adapters/#{adapter}.rb", "test/*_test.rb", "test/active_record/*_test.rb", "test/#{adapter}/**/*_test.rb"] 38 | end 39 | task adapter 40 | end 41 | end 42 | 43 | begin 44 | require 'rcov/rcovtask' 45 | adapter = ENV['ARE_DB'] 46 | Rcov::RcovTask.new do |test| 47 | test.libs << 'test' 48 | test.pattern = ["test/adapters/#{adapter}.rb", "test/*_test.rb", "test/#{adapter}/**/*_test.rb"] 49 | test.verbose = true 50 | end 51 | rescue LoadError 52 | task :rcov do 53 | abort "RCov is not available. In order to run rcov, you must: sudo gem install rcov" 54 | end 55 | end 56 | 57 | require 'rdoc/task' 58 | Rake::RDocTask.new do |rdoc| 59 | version = File.exist?('VERSION') ? File.read('VERSION') : "" 60 | 61 | rdoc.rdoc_dir = 'rdoc' 62 | rdoc.title = "activerecord-import #{version}" 63 | rdoc.rdoc_files.include('README*') 64 | rdoc.rdoc_files.include('lib/**/*.rb') 65 | end 66 | 67 | require 'rubocop/rake_task' 68 | RuboCop::RakeTask.new 69 | -------------------------------------------------------------------------------- /benchmarks/lib/output_to_html.rb: -------------------------------------------------------------------------------- 1 | require 'erb' 2 | 3 | module OutputToHTML 4 | TEMPLATE_HEADER = <<"EOT".freeze 5 |
6 | All times are rounded to the nearest thousandth for display purposes. Speedups next to each time are computed 7 | before any rounding occurs. Also, all speedup calculations are computed by comparing a given time against 8 | the very first column (which is always the default ActiveRecord::Base.create method. 9 |
10 | EOT 11 | 12 | TEMPLATE = <<"EOT".freeze 13 | 27 | 28 | 29 | <% columns.each do |col| %> 30 | 31 | <% end %> 32 | 33 | 34 | <% times.each do |time| %> 35 | 36 | <% end %> 37 | 38 | 39 |
<%= col %>
<%= time %>
 
40 | EOT 41 | 42 | def self.output_results( filename, results ) 43 | html = '' 44 | results.each do |result_set| 45 | columns = [] 46 | times = [] 47 | result_set.each do |result| 48 | columns << result.description 49 | if result.failed 50 | times << "failed" 51 | else 52 | time = result.tms.real.round_to( 3 ) 53 | speedup = ( result_set.first.tms.real / result.tms.real ).round 54 | times << (result == result_set.first ? time.to_s : "#{time} (#{speedup}x speedup)") 55 | end 56 | end 57 | 58 | template = ERB.new( TEMPLATE, 0, "%<>") 59 | html << template.result( binding ) 60 | end 61 | 62 | File.open( filename, 'w' ) { |file| file.write( TEMPLATE_HEADER + html ) } 63 | end 64 | end 65 | -------------------------------------------------------------------------------- /test/schema/postgresql_schema.rb: -------------------------------------------------------------------------------- 1 | ActiveRecord::Schema.define do 2 | execute('CREATE extension IF NOT EXISTS "hstore";') 3 | execute('CREATE extension IF NOT EXISTS "pgcrypto";') 4 | execute('CREATE extension IF NOT EXISTS "uuid-ossp";') 5 | 6 | # create ENUM if it does not exist yet 7 | begin 8 | execute('CREATE TYPE vendor_type AS ENUM (\'wholesaler\', \'retailer\');') 9 | rescue ActiveRecord::StatementInvalid => e 10 | # since PostgreSQL does not support IF NOT EXISTS when creating a TYPE, 11 | # rescue the error and check the error class 12 | raise unless e.cause.is_a? PG::DuplicateObject 13 | execute('ALTER TYPE vendor_type ADD VALUE IF NOT EXISTS \'wholesaler\';') 14 | execute('ALTER TYPE vendor_type ADD VALUE IF NOT EXISTS \'retailer\';') 15 | end 16 | 17 | create_table :vendors, id: :uuid, force: :cascade do |t| 18 | t.string :name, null: true 19 | t.text :hours 20 | t.text :preferences 21 | 22 | if t.respond_to?(:json) 23 | t.json :pure_json_data 24 | t.json :data 25 | else 26 | t.text :data 27 | end 28 | 29 | if t.respond_to?(:hstore) 30 | t.hstore :config 31 | else 32 | t.text :config 33 | end 34 | 35 | if t.respond_to?(:jsonb) 36 | t.jsonb :pure_jsonb_data 37 | t.jsonb :settings 38 | t.jsonb :json_data, null: false, default: {} 39 | else 40 | t.text :settings 41 | t.text :json_data 42 | end 43 | 44 | t.column :vendor_type, :vendor_type 45 | 46 | t.datetime :created_at 47 | t.datetime :updated_at 48 | end 49 | 50 | create_table :alarms, force: true do |t| 51 | t.column :device_id, :integer, null: false 52 | t.column :alarm_type, :integer, null: false 53 | t.column :status, :integer, null: false 54 | t.column :metadata, :text 55 | t.column :secret_key, :binary 56 | t.datetime :created_at 57 | t.datetime :updated_at 58 | end 59 | 60 | add_index :alarms, [:device_id, :alarm_type], unique: true, where: 'status <> 0' 61 | end 62 | -------------------------------------------------------------------------------- /test/synchronize_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path('../test_helper', __FILE__) 2 | 3 | describe ".synchronize" do 4 | let(:topics) { Generate(3, :topics) } 5 | let(:titles) { %w(one two three) } 6 | 7 | setup do 8 | # update records outside of ActiveRecord knowing about it 9 | Topic.connection.execute( "UPDATE #{Topic.table_name} SET title='#{titles[0]}_haha' WHERE id=#{topics[0].id}", "Updating record 1 without ActiveRecord" ) 10 | Topic.connection.execute( "UPDATE #{Topic.table_name} SET title='#{titles[1]}_haha' WHERE id=#{topics[1].id}", "Updating record 2 without ActiveRecord" ) 11 | Topic.connection.execute( "UPDATE #{Topic.table_name} SET title='#{titles[2]}_haha' WHERE id=#{topics[2].id}", "Updating record 3 without ActiveRecord" ) 12 | end 13 | 14 | it "reloads data for the specified records" do 15 | Topic.synchronize topics 16 | 17 | actual_titles = topics.map(&:title) 18 | assert_equal "#{titles[0]}_haha", actual_titles[0], "the first record was not correctly updated" 19 | assert_equal "#{titles[1]}_haha", actual_titles[1], "the second record was not correctly updated" 20 | assert_equal "#{titles[2]}_haha", actual_titles[2], "the third record was not correctly updated" 21 | end 22 | 23 | it "the synchronized records aren't dirty" do 24 | # Update the in memory records so they're dirty 25 | topics.each { |topic| topic.title = 'dirty title' } 26 | 27 | Topic.synchronize topics 28 | 29 | assert_equal false, topics[0].changed?, "the first record was dirty" 30 | assert_equal false, topics[1].changed?, "the second record was dirty" 31 | assert_equal false, topics[2].changed?, "the third record was dirty" 32 | end 33 | 34 | it "ignores default scope" do 35 | # update records outside of ActiveRecord knowing about it 36 | Topic.connection.execute( "UPDATE #{Topic.table_name} SET approved='0' WHERE id=#{topics[0].id}", "Updating record 1 without ActiveRecord" ) 37 | 38 | Topic.synchronize topics 39 | assert_equal false, topics[0].approved 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: ruby 2 | cache: bundler 3 | rvm: 4 | - 2.5.5 5 | 6 | env: 7 | global: 8 | # https://github.com/discourse/discourse/blob/master/.travis.yml 9 | - RUBY_GC_MALLOC_LIMIT=50000000 10 | matrix: 11 | - AR_VERSION=5.1 12 | - AR_VERSION=5.2 13 | - AR_VERSION=6.0 14 | 15 | matrix: 16 | include: 17 | - rvm: 2.3.8 18 | env: AR_VERSION=3.2 19 | - rvm: 2.3.8 20 | env: AR_VERSION=4.0 21 | - rvm: 2.3.8 22 | env: AR_VERSION=4.1 23 | - rvm: 2.3.8 24 | env: AR_VERSION=4.2 25 | - rvm: 2.3.8 26 | env: AR_VERSION=5.0 27 | 28 | fast_finish: true 29 | 30 | addons: 31 | postgresql: "9.5" 32 | apt: 33 | sources: 34 | - travis-ci/sqlite3 35 | - mysql-5.7-trusty 36 | packages: 37 | - sqlite3 38 | - mysql-server 39 | - mysql-client 40 | - postgresql-9.5-postgis-2.4 41 | 42 | before_install: 43 | - gem update --system 44 | - sudo mysql -e "use mysql; update user set authentication_string=PASSWORD('') where User='root'; update user set plugin='mysql_native_password';FLUSH PRIVILEGES;" 45 | - sudo mysql_upgrade 46 | - sudo service mysql restart 47 | 48 | before_script: 49 | - mysql -e 'create database activerecord_import_test;' 50 | - psql -c 'create database activerecord_import_test;' -U postgres 51 | - psql activerecord_import_test -c 'create extension if not exists hstore;' -U postgres 52 | - psql -c 'create extension if not exists postgis;' -U postgres 53 | - psql -c 'create extension if not exists "uuid-ossp";' -U postgres 54 | - cp test/travis/database.yml test/database.yml 55 | 56 | script: 57 | - bundle exec rake test:mysql2 58 | - bundle exec rake test:mysql2_makara 59 | - bundle exec rake test:mysql2spatial 60 | - bundle exec rake test:postgis 61 | - bundle exec rake test:postgresql 62 | - bundle exec rake test:postgresql_makara 63 | - bundle exec rake test:seamless_database_pool 64 | - bundle exec rake test:spatialite 65 | - bundle exec rake test:sqlite3 66 | - bundle exec rubocop 67 | 68 | dist: xenial 69 | 70 | services: 71 | - mysql 72 | - postgresql 73 | 74 | sudo: required 75 | -------------------------------------------------------------------------------- /test/support/active_support/test_case_extensions.rb: -------------------------------------------------------------------------------- 1 | class ActiveSupport::TestCase 2 | include ActiveRecord::TestFixtures 3 | 4 | if ENV['AR_VERSION'].to_f >= 5.0 5 | self.use_transactional_tests = true 6 | else 7 | self.use_transactional_fixtures = true 8 | end 9 | 10 | class << self 11 | def requires_active_record_version(version_string, &blk) 12 | return unless Gem::Dependency.new('', version_string).match?('', ActiveRecord::VERSION::STRING) 13 | instance_eval(&blk) 14 | end 15 | 16 | def assertion(name, &block) 17 | mc = class << self; self; end 18 | mc.class_eval do 19 | define_method(name) do 20 | it(name, &block) 21 | end 22 | end 23 | end 24 | 25 | def asssertion_group(name, &block) 26 | mc = class << self; self; end 27 | mc.class_eval do 28 | define_method(name, &block) 29 | end 30 | end 31 | 32 | def macro(name, &block) 33 | class_eval do 34 | define_method(name, &block) 35 | end 36 | end 37 | 38 | def describe(description, toplevel = nil, &blk) 39 | text = toplevel ? description : "#{name} #{description}" 40 | klass = Class.new(self) 41 | 42 | klass.class_eval <<-RUBY_EVAL 43 | def self.name 44 | "#{text}" 45 | end 46 | RUBY_EVAL 47 | 48 | # do not inherit test methods from the superclass 49 | klass.class_eval do 50 | instance_methods.grep(/^test.+/) do |method| 51 | undef_method method 52 | end 53 | end 54 | 55 | klass.instance_eval(&blk) 56 | end 57 | alias context describe 58 | 59 | def let(name, &blk) 60 | define_method(name) do 61 | instance_variable_name = "@__let_#{name}" 62 | return instance_variable_get(instance_variable_name) if instance_variable_defined?(instance_variable_name) 63 | instance_variable_set(instance_variable_name, instance_eval(&blk)) 64 | end 65 | end 66 | 67 | def it(description, &blk) 68 | define_method("test_#{name}_#{description}", &blk) 69 | end 70 | end 71 | end 72 | 73 | def describe(description, &blk) 74 | ActiveSupport::TestCase.describe(description, true, &blk) 75 | end 76 | -------------------------------------------------------------------------------- /lib/activerecord-import/value_sets_parser.rb: -------------------------------------------------------------------------------- 1 | module ActiveRecord::Import 2 | class ValueSetTooLargeError < StandardError 3 | attr_reader :size 4 | def initialize(msg = "Value set exceeds max size", size = 0) 5 | @size = size 6 | super(msg) 7 | end 8 | end 9 | 10 | class ValueSetsBytesParser 11 | attr_reader :reserved_bytes, :max_bytes, :values 12 | 13 | def self.parse(values, options) 14 | new(values, options).parse 15 | end 16 | 17 | def initialize(values, options) 18 | @values = values 19 | @reserved_bytes = options[:reserved_bytes] || 0 20 | @max_bytes = options.fetch(:max_bytes) { default_max_bytes } 21 | end 22 | 23 | def parse 24 | value_sets = [] 25 | arr = [] 26 | current_size = 0 27 | values.each_with_index do |val, i| 28 | comma_bytes = arr.size 29 | insert_size = reserved_bytes + val.bytesize 30 | 31 | if insert_size > max_bytes 32 | raise ValueSetTooLargeError.new("#{insert_size} bytes exceeds the max allowed for an insert [#{@max_bytes}]", insert_size) 33 | end 34 | 35 | bytes_thus_far = reserved_bytes + current_size + val.bytesize + comma_bytes 36 | if bytes_thus_far <= max_bytes 37 | current_size += val.bytesize 38 | arr << val 39 | else 40 | value_sets << arr 41 | arr = [val] 42 | current_size = val.bytesize 43 | end 44 | 45 | # if we're on the last iteration push whatever we have in arr to value_sets 46 | value_sets << arr if i == (values.size - 1) 47 | end 48 | 49 | value_sets 50 | end 51 | 52 | private 53 | 54 | def default_max_bytes 55 | values_in_bytes = values.sum(&:bytesize) 56 | comma_separated_bytes = values.size - 1 57 | reserved_bytes + values_in_bytes + comma_separated_bytes 58 | end 59 | end 60 | 61 | class ValueSetsRecordsParser 62 | attr_reader :max_records, :values 63 | 64 | def self.parse(values, options) 65 | new(values, options).parse 66 | end 67 | 68 | def initialize(values, options) 69 | @values = values 70 | @max_records = options[:max_records] 71 | end 72 | 73 | def parse 74 | @values.in_groups_of(max_records, false) 75 | end 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /benchmarks/benchmark.rb: -------------------------------------------------------------------------------- 1 | require 'pathname' 2 | require "fileutils" 3 | require "active_record" 4 | require "active_record/base" 5 | 6 | benchmark_dir = File.dirname(__FILE__) 7 | 8 | $LOAD_PATH.unshift('.') 9 | 10 | # Get the gem into the load path 11 | $LOAD_PATH.unshift(File.join(benchmark_dir, '..', 'lib')) 12 | 13 | # Load the benchmark files 14 | Dir[File.join( benchmark_dir, 'lib', '*.rb' )].sort.each { |f| require f } 15 | 16 | # Parse the options passed in via the command line 17 | options = BenchmarkOptionParser.parse( ARGV ) 18 | 19 | FileUtils.mkdir_p 'log' 20 | ActiveRecord::Base.configurations["test"] = YAML.load_file(File.join(benchmark_dir, "../test/database.yml"))[options.adapter] 21 | ActiveRecord::Base.logger = Logger.new("log/test.log") 22 | ActiveRecord::Base.logger.level = Logger::DEBUG 23 | ActiveRecord::Base.default_timezone = :utc 24 | 25 | require "activerecord-import" 26 | ActiveRecord::Base.establish_connection(:test) 27 | 28 | ActiveSupport::Notifications.subscribe(/active_record.sql/) do |_, _, _, _, hsh| 29 | ActiveRecord::Base.logger.info hsh[:sql] 30 | end 31 | 32 | # Load base/generic schema 33 | require File.join(benchmark_dir, "../test/schema/version") 34 | require File.join(benchmark_dir, "../test/schema/generic_schema") 35 | adapter_schema = File.join(benchmark_dir, "schema/#{options.adapter}_schema.rb") 36 | require adapter_schema if File.exist?(adapter_schema) 37 | 38 | Dir[File.dirname(__FILE__) + "/models/*.rb"].each { |file| require file } 39 | 40 | require File.join( benchmark_dir, 'lib', "#{options.adapter}_benchmark" ) 41 | 42 | table_types = nil 43 | table_types = if options.benchmark_all_types 44 | ["all"] 45 | else 46 | options.table_types.keys 47 | end 48 | 49 | letter = options.adapter[0].chr 50 | clazz_str = letter.upcase + options.adapter[1..-1].downcase 51 | clazz = Object.const_get( clazz_str + "Benchmark" ) 52 | 53 | benchmarks = [] 54 | options.number_of_objects.each do |num| 55 | benchmarks << (benchmark = clazz.new) 56 | benchmark.send( "benchmark", table_types, num ) 57 | end 58 | 59 | options.outputs.each do |output| 60 | format = output.format.downcase 61 | output_module = Object.const_get( "OutputTo#{format.upcase}" ) 62 | benchmarks.each do |benchmark| 63 | output_module.output_results( output.filename, benchmark.results ) 64 | end 65 | end 66 | 67 | puts 68 | puts "Done with benchmark!" 69 | -------------------------------------------------------------------------------- /test/test_helper.rb: -------------------------------------------------------------------------------- 1 | require 'pathname' 2 | test_dir = Pathname.new File.dirname(__FILE__) 3 | $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib')) 4 | $LOAD_PATH.unshift(File.dirname(__FILE__)) 5 | 6 | require "fileutils" 7 | 8 | ENV["RAILS_ENV"] = "test" 9 | 10 | require "bundler" 11 | Bundler.setup 12 | 13 | require 'pry' unless RbConfig::CONFIG["RUBY_INSTALL_NAME"] =~ /jruby/ 14 | 15 | require "active_record" 16 | require "active_record/fixtures" 17 | require "active_support/test_case" 18 | 19 | if ActiveSupport::VERSION::STRING < "4.0" 20 | require 'test/unit' 21 | require 'mocha/test_unit' 22 | else 23 | require 'active_support/testing/autorun' 24 | require "mocha/mini_test" 25 | end 26 | 27 | require 'timecop' 28 | require 'chronic' 29 | 30 | begin 31 | require 'composite_primary_keys' 32 | rescue LoadError 33 | ENV["SKIP_COMPOSITE_PK"] = "true" 34 | end 35 | 36 | # Support MySQL 5.7 37 | if ActiveSupport::VERSION::STRING < "4.1" 38 | require "active_record/connection_adapters/mysql2_adapter" 39 | class ActiveRecord::ConnectionAdapters::Mysql2Adapter 40 | NATIVE_DATABASE_TYPES[:primary_key] = "int(11) auto_increment PRIMARY KEY" 41 | end 42 | end 43 | 44 | require "ruby-debug" if RUBY_VERSION.to_f < 1.9 45 | 46 | adapter = ENV["ARE_DB"] || "sqlite3" 47 | 48 | FileUtils.mkdir_p 'log' 49 | ActiveRecord::Base.logger = Logger.new("log/test.log") 50 | ActiveRecord::Base.logger.level = Logger::DEBUG 51 | ActiveRecord::Base.configurations["test"] = YAML.load_file(test_dir.join("database.yml"))[adapter] 52 | ActiveRecord::Base.default_timezone = :utc 53 | 54 | require "activerecord-import" 55 | ActiveRecord::Base.establish_connection :test 56 | 57 | ActiveSupport::Notifications.subscribe(/active_record.sql/) do |_, _, _, _, hsh| 58 | ActiveRecord::Base.logger.info hsh[:sql] 59 | end 60 | 61 | require "factory_bot" 62 | Dir[File.dirname(__FILE__) + "/support/**/*.rb"].each { |file| require file } 63 | 64 | # Load base/generic schema 65 | require test_dir.join("schema/version") 66 | require test_dir.join("schema/generic_schema") 67 | adapter_schema = test_dir.join("schema/#{adapter}_schema.rb") 68 | require adapter_schema if File.exist?(adapter_schema) 69 | 70 | Dir[File.dirname(__FILE__) + "/models/*.rb"].each { |file| require file } 71 | 72 | # Prevent this deprecation warning from breaking the tests. 73 | Rake::FileList.send(:remove_method, :import) 74 | 75 | ActiveSupport::TestCase.test_order = :random if ENV['AR_VERSION'].to_f >= 4.2 76 | -------------------------------------------------------------------------------- /lib/activerecord-import/adapters/abstract_adapter.rb: -------------------------------------------------------------------------------- 1 | module ActiveRecord::Import::AbstractAdapter 2 | module InstanceMethods 3 | def next_value_for_sequence(sequence_name) 4 | %(#{sequence_name}.nextval) 5 | end 6 | 7 | def insert_many( sql, values, _options = {}, *args ) # :nodoc: 8 | number_of_inserts = 1 9 | 10 | base_sql, post_sql = if sql.is_a?( String ) 11 | [sql, ''] 12 | elsif sql.is_a?( Array ) 13 | [sql.shift, sql.join( ' ' )] 14 | end 15 | 16 | sql2insert = base_sql + values.join( ',' ) + post_sql 17 | insert( sql2insert, *args ) 18 | 19 | ActiveRecord::Import::Result.new([], number_of_inserts, [], []) 20 | end 21 | 22 | def pre_sql_statements(options) 23 | sql = [] 24 | sql << options[:pre_sql] if options[:pre_sql] 25 | sql << options[:command] if options[:command] 26 | 27 | # add keywords like IGNORE or DELAYED 28 | if options[:keywords].is_a?(Array) 29 | sql.concat(options[:keywords]) 30 | elsif options[:keywords] 31 | sql << options[:keywords].to_s 32 | end 33 | 34 | sql 35 | end 36 | 37 | # Synchronizes the passed in ActiveRecord instances with the records in 38 | # the database by calling +reload+ on each instance. 39 | def after_import_synchronize( instances ) 40 | instances.each(&:reload) 41 | end 42 | 43 | # Returns an array of post SQL statements given the passed in options. 44 | def post_sql_statements( table_name, options ) # :nodoc: 45 | post_sql_statements = [] 46 | 47 | if supports_on_duplicate_key_update? && options[:on_duplicate_key_update] 48 | post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update], options[:primary_key], options[:locking_column] ) 49 | elsif logger && options[:on_duplicate_key_update] 50 | logger.warn "Ignoring on_duplicate_key_update because it is not supported by the database." 51 | end 52 | 53 | # custom user post_sql 54 | post_sql_statements << options[:post_sql] if options[:post_sql] 55 | 56 | # with rollup 57 | post_sql_statements << rollup_sql if options[:rollup] 58 | 59 | post_sql_statements 60 | end 61 | 62 | def increment_locking_column!(table_name, results, locking_column) 63 | if locking_column.present? 64 | results << "\"#{locking_column}\"=#{table_name}.\"#{locking_column}\"+1" 65 | end 66 | end 67 | 68 | def supports_on_duplicate_key_update? 69 | false 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /lib/activerecord-import/synchronize.rb: -------------------------------------------------------------------------------- 1 | module ActiveRecord # :nodoc: 2 | class Base # :nodoc: 3 | # Synchronizes the passed in ActiveRecord instances with data 4 | # from the database. This is like calling reload on an individual 5 | # ActiveRecord instance but it is intended for use on multiple instances. 6 | # 7 | # This uses one query for all instance updates and then updates existing 8 | # instances rather sending one query for each instance 9 | # 10 | # == Examples 11 | # # Synchronizing existing models by matching on the primary key field 12 | # posts = Post.where(author: "Zach").first 13 | # <.. out of system changes occur to change author name from Zach to Zachary..> 14 | # Post.synchronize posts 15 | # posts.first.author # => "Zachary" instead of Zach 16 | # 17 | # # Synchronizing using custom key fields 18 | # posts = Post.where(author: "Zach").first 19 | # <.. out of system changes occur to change the address of author 'Zach' to 1245 Foo Ln ..> 20 | # Post.synchronize posts, [:name] # queries on the :name column and not the :id column 21 | # posts.first.address # => "1245 Foo Ln" instead of whatever it was 22 | # 23 | def self.synchronize(instances, keys = [primary_key]) 24 | return if instances.empty? 25 | 26 | conditions = {} 27 | 28 | key_values = keys.map { |key| instances.map(&key.to_sym) } 29 | keys.zip(key_values).each { |key, values| conditions[key] = values } 30 | order = keys.map { |key| "#{key} ASC" }.join(",") 31 | 32 | klass = instances.first.class 33 | 34 | fresh_instances = klass.unscoped.where(conditions).order(order) 35 | instances.each do |instance| 36 | matched_instance = fresh_instances.detect do |fresh_instance| 37 | keys.all? { |key| fresh_instance.send(key) == instance.send(key) } 38 | end 39 | 40 | next unless matched_instance 41 | 42 | instance.send :clear_association_cache 43 | instance.send :clear_aggregation_cache if instance.respond_to?(:clear_aggregation_cache, true) 44 | instance.instance_variable_set :@attributes, matched_instance.instance_variable_get(:@attributes) 45 | 46 | if instance.respond_to?(:clear_changes_information) 47 | instance.clear_changes_information # Rails 4.2 and higher 48 | else 49 | instance.instance_variable_set :@attributes_cache, {} # Rails 4.0, 4.1 50 | instance.changed_attributes.clear # Rails 3.2 51 | instance.previous_changes.clear 52 | end 53 | 54 | # Since the instance now accurately reflects the record in 55 | # the database, ensure that instance.persisted? is true. 56 | instance.instance_variable_set '@new_record', false 57 | instance.instance_variable_set '@destroyed', false 58 | end 59 | end 60 | 61 | # See ActiveRecord::ConnectionAdapters::AbstractAdapter.synchronize 62 | def synchronize(instances, key = [ActiveRecord::Base.primary_key]) 63 | self.class.synchronize(instances, key) 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /test/support/assertions.rb: -------------------------------------------------------------------------------- 1 | class ActiveSupport::TestCase 2 | module ImportAssertions 3 | def self.extended(klass) 4 | klass.instance_eval do 5 | assertion(:should_not_update_created_at_on_timestamp_columns) do 6 | Timecop.freeze Chronic.parse("5 minutes from now") do 7 | perform_import 8 | assert_in_delta @topic.created_at.to_i, updated_topic.created_at.to_i, 1 9 | assert_in_delta @topic.created_on.to_i, updated_topic.created_on.to_i, 1 10 | end 11 | end 12 | 13 | assertion(:should_update_updated_at_on_timestamp_columns) do 14 | time = Chronic.parse("5 minutes from now") 15 | Timecop.freeze time do 16 | perform_import 17 | assert_in_delta time.to_i, updated_topic.updated_at.to_i, 1 18 | assert_in_delta time.to_i, updated_topic.updated_on.to_i, 1 19 | end 20 | end 21 | 22 | assertion(:should_not_update_updated_at_on_timestamp_columns) do 23 | time = Chronic.parse("5 minutes from now") 24 | Timecop.freeze time do 25 | perform_import 26 | assert_in_delta @topic.updated_at.to_i, updated_topic.updated_at.to_i, 1 27 | assert_in_delta @topic.updated_on.to_i, updated_topic.updated_on.to_i, 1 28 | end 29 | end 30 | 31 | assertion(:should_not_update_timestamps) do 32 | Timecop.freeze Chronic.parse("5 minutes from now") do 33 | perform_import timestamps: false 34 | assert_in_delta @topic.created_at.to_i, updated_topic.created_at.to_i, 1 35 | assert_in_delta @topic.created_on.to_i, updated_topic.created_on.to_i, 1 36 | assert_in_delta @topic.updated_at.to_i, updated_topic.updated_at.to_i, 1 37 | assert_in_delta @topic.updated_on.to_i, updated_topic.updated_on.to_i, 1 38 | end 39 | end 40 | 41 | assertion(:should_not_update_fields_not_mentioned) do 42 | assert_equal "John Doe", updated_topic.author_name 43 | end 44 | 45 | assertion(:should_update_fields_mentioned) do 46 | perform_import 47 | assert_equal "Book - 2nd Edition", updated_topic.title 48 | assert_equal "johndoe@example.com", updated_topic.author_email_address 49 | end 50 | 51 | assertion(:should_raise_update_fields_mentioned) do 52 | assert_raise ActiveRecord::RecordNotUnique do 53 | perform_import 54 | end 55 | 56 | assert_equal "Book", updated_topic.title 57 | assert_equal "john@doe.com", updated_topic.author_email_address 58 | end 59 | 60 | assertion(:should_update_fields_mentioned_with_hash_mappings) do 61 | perform_import 62 | assert_equal "johndoe@example.com", updated_topic.title 63 | assert_equal "Book - 2nd Edition", updated_topic.author_email_address 64 | end 65 | 66 | assertion(:should_update_foreign_keys) do 67 | perform_import 68 | assert_equal 57, updated_topic.parent_id 69 | end 70 | end 71 | end 72 | end 73 | end 74 | -------------------------------------------------------------------------------- /benchmarks/lib/cli_parser.rb: -------------------------------------------------------------------------------- 1 | require 'optparse' 2 | require 'ostruct' 3 | 4 | # 5 | # == PARAMETERS 6 | # * a - database adapter. ie: mysql, postgresql, oracle, etc. 7 | # * n - number of objects to test with. ie: 1, 100, 1000, etc. 8 | # * t - the table types to test. ie: myisam, innodb, memory, temporary, etc. 9 | # 10 | module BenchmarkOptionParser 11 | BANNER = "Usage: ruby #{$0} [options]\nSee ruby #{$0} -h for more options.".freeze 12 | 13 | def self.print_banner 14 | puts BANNER 15 | end 16 | 17 | def self.print_banner! 18 | print_banner 19 | exit 20 | end 21 | 22 | def self.print_options( options ) 23 | puts "Benchmarking the following options:" 24 | puts " Database adapter: #{options.adapter}" 25 | puts " Number of objects: #{options.number_of_objects}" 26 | puts " Table types:" 27 | print_valid_table_types( options, prefix: " " ) 28 | end 29 | 30 | # TODO IMPLEMENT THIS 31 | def self.print_valid_table_types( options, hsh = { prefix: '' } ) 32 | if !options.table_types.keys.empty? 33 | options.table_types.keys.sort.each { |type| puts hsh[:prefix].to_s + type.to_s } 34 | else 35 | puts 'No table types defined.' 36 | end 37 | end 38 | 39 | def self.parse( args ) 40 | options = OpenStruct.new( 41 | adapter: 'mysql2', 42 | table_types: {}, 43 | delete_on_finish: true, 44 | number_of_objects: [], 45 | outputs: [] 46 | ) 47 | 48 | opt_parser = OptionParser.new do |opts| 49 | opts.banner = BANNER 50 | 51 | # parse the database adapter 52 | opts.on( "a", "--adapter [String]", 53 | "The database adapter to use. IE: mysql, postgresql, oracle" ) do |arg| 54 | options.adapter = arg 55 | end 56 | 57 | # parse do_not_delete flag 58 | opts.on( "d", "--do-not-delete", 59 | "By default all records in the benchmark tables will be deleted at the end of the benchmark. " \ 60 | "This flag indicates not to delete the benchmark data." ) do |_| 61 | options.delete_on_finish = false 62 | end 63 | 64 | # parse the number of row objects to test 65 | opts.on( "n", "--num [Integer]", 66 | "The number of objects to benchmark." ) do |arg| 67 | options.number_of_objects << arg.to_i 68 | end 69 | 70 | # parse the table types to test 71 | opts.on( "t", "--table-type [String]", 72 | "The table type to test. This can be used multiple times." ) do |arg| 73 | if arg =~ /^all$/ 74 | options.table_types['all'] = options.benchmark_all_types = true 75 | else 76 | options.table_types[arg] = true 77 | end 78 | end 79 | 80 | # print results in CSV format 81 | opts.on( "--to-csv [String]", "Print results in a CSV file format" ) do |filename| 82 | options.outputs << OpenStruct.new( format: 'csv', filename: filename) 83 | end 84 | 85 | # print results in HTML format 86 | opts.on( "--to-html [String]", "Print results in HTML format" ) do |filename| 87 | options.outputs << OpenStruct.new( format: 'html', filename: filename ) 88 | end 89 | end # end opt.parse! 90 | 91 | begin 92 | opt_parser.parse!( args ) 93 | if options.table_types.empty? 94 | options.table_types['all'] = options.benchmark_all_types = true 95 | end 96 | rescue Exception 97 | print_banner! 98 | end 99 | 100 | options.number_of_objects = [1000] if options.number_of_objects.empty? 101 | options.outputs = [OpenStruct.new( format: 'html', filename: 'benchmark.html')] if options.outputs.empty? 102 | 103 | print_options( options ) 104 | 105 | options 106 | end 107 | end 108 | -------------------------------------------------------------------------------- /test/support/mysql/import_examples.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | def should_support_mysql_import_functionality 3 | # Forcefully disable strict mode for this session. 4 | ActiveRecord::Base.connection.execute "set sql_mode='STRICT_ALL_TABLES'" 5 | 6 | should_support_basic_on_duplicate_key_update 7 | should_support_on_duplicate_key_ignore 8 | 9 | describe "#import" do 10 | context "with :on_duplicate_key_update and validation checks turned off" do 11 | extend ActiveSupport::TestCase::ImportAssertions 12 | 13 | asssertion_group(:should_support_on_duplicate_key_update) do 14 | should_not_update_fields_not_mentioned 15 | should_update_foreign_keys 16 | should_not_update_created_at_on_timestamp_columns 17 | should_update_updated_at_on_timestamp_columns 18 | end 19 | 20 | macro(:perform_import) { raise "supply your own #perform_import in a context below" } 21 | macro(:updated_topic) { Topic.find(@topic.id) } 22 | 23 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 24 | let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } 25 | let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 26 | 27 | macro(:perform_import) do |*opts| 28 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false) 29 | end 30 | 31 | setup do 32 | Topic.import columns, values, validate: false 33 | @topic = Topic.find 99 34 | end 35 | 36 | context "using string hash map" do 37 | let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } } 38 | should_support_on_duplicate_key_update 39 | should_update_fields_mentioned 40 | end 41 | 42 | context "using string hash map, but specifying column mismatches" do 43 | let(:update_columns) { { "title" => "author_email_address", "author_email_address" => "title", "parent_id" => "parent_id" } } 44 | should_support_on_duplicate_key_update 45 | should_update_fields_mentioned_with_hash_mappings 46 | end 47 | 48 | context "using symbol hash map" do 49 | let(:update_columns) { { title: :title, author_email_address: :author_email_address, parent_id: :parent_id } } 50 | should_support_on_duplicate_key_update 51 | should_update_fields_mentioned 52 | end 53 | 54 | context "using symbol hash map, but specifying column mismatches" do 55 | let(:update_columns) { { title: :author_email_address, author_email_address: :title, parent_id: :parent_id } } 56 | should_support_on_duplicate_key_update 57 | should_update_fields_mentioned_with_hash_mappings 58 | end 59 | end 60 | 61 | context "with :synchronization option" do 62 | let(:topics) { [] } 63 | let(:values) { [[topics.first.id, "Jerry Carter", "title1"], [topics.last.id, "Chad Fowler", "title2"]] } 64 | let(:columns) { %w(id author_name title) } 65 | 66 | setup do 67 | topics << Topic.create!(title: "LDAP", author_name: "Big Bird", content: "Putting Directories to Work.") 68 | topics << Topic.create!(title: "Rails Recipes", author_name: "Elmo", content: "A trusted collection of solutions.") 69 | end 70 | 71 | it "synchronizes passed in ActiveRecord model instances with the data just imported" do 72 | columns2update = ['author_name'] 73 | 74 | expected_count = Topic.count 75 | Topic.import( columns, values, 76 | validate: false, 77 | on_duplicate_key_update: columns2update, 78 | synchronize: topics ) 79 | 80 | assert_equal expected_count, Topic.count, "no new records should have been created!" 81 | assert_equal "Jerry Carter", topics.first.author_name, "wrong author!" 82 | assert_equal "Chad Fowler", topics.last.author_name, "wrong author!" 83 | end 84 | end 85 | 86 | if ENV['AR_VERSION'].to_f >= 5.1 87 | context "with virtual columns" do 88 | let(:books) { [Book.new(author_name: "foo", title: "bar")] } 89 | 90 | it "ignores virtual columns and creates record" do 91 | assert_difference "Book.count", +1 do 92 | Book.import books 93 | end 94 | end 95 | end 96 | end 97 | end 98 | end 99 | -------------------------------------------------------------------------------- /test/value_sets_bytes_parser_test.rb: -------------------------------------------------------------------------------- 1 | require File.expand_path(File.dirname(__FILE__) + '/test_helper') 2 | 3 | require 'activerecord-import/value_sets_parser' 4 | 5 | describe ActiveRecord::Import::ValueSetsBytesParser do 6 | context "#parse - computing insert value sets" do 7 | let(:parser) { ActiveRecord::Import::ValueSetsBytesParser } 8 | let(:base_sql) { "INSERT INTO atable (a,b,c)" } 9 | let(:values) { ["(1,2,3)", "(2,3,4)", "(3,4,5)"] } 10 | 11 | context "when the max allowed bytes is 30 and the base SQL is 26 bytes" do 12 | it "should raise ActiveRecord::Import::ValueSetTooLargeError" do 13 | error = assert_raises ActiveRecord::Import::ValueSetTooLargeError do 14 | parser.parse values, reserved_bytes: base_sql.size, max_bytes: 30 15 | end 16 | assert_match(/33 bytes exceeds the max allowed for an insert \[30\]/, error.message) 17 | end 18 | end 19 | 20 | context "when the max allowed bytes is 33 and the base SQL is 26 bytes" do 21 | it "should return 3 value sets when given 3 value sets of 7 bytes a piece" do 22 | value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 33 23 | assert_equal 3, value_sets.size 24 | end 25 | end 26 | 27 | context "when the max allowed bytes is 40 and the base SQL is 26 bytes" do 28 | it "should return 3 value sets when given 3 value sets of 7 bytes a piece" do 29 | value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 40 30 | assert_equal 3, value_sets.size 31 | end 32 | end 33 | 34 | context "when the max allowed bytes is 41 and the base SQL is 26 bytes" do 35 | it "should return 2 value sets when given 2 value sets of 7 bytes a piece" do 36 | value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 41 37 | assert_equal 2, value_sets.size 38 | end 39 | end 40 | 41 | context "when the max allowed bytes is 48 and the base SQL is 26 bytes" do 42 | it "should return 2 value sets when given 2 value sets of 7 bytes a piece" do 43 | value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 48 44 | assert_equal 2, value_sets.size 45 | end 46 | end 47 | 48 | context "when the max allowed bytes is 49 and the base SQL is 26 bytes" do 49 | it "should return 1 value sets when given 1 value sets of 7 bytes a piece" do 50 | value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 49 51 | assert_equal 1, value_sets.size 52 | end 53 | end 54 | 55 | context "when the max allowed bytes is 999999 and the base SQL is 26 bytes" do 56 | it "should return 1 value sets when given 1 value sets of 7 bytes a piece" do 57 | value_sets = parser.parse values, reserved_bytes: base_sql.size, max_bytes: 999_999 58 | assert_equal 1, value_sets.size 59 | end 60 | end 61 | 62 | it "should properly build insert value set based on max packet allowed" do 63 | values = [ 64 | "('1','2','3')", 65 | "('4','5','6')", 66 | "('7','8','9')" 67 | ] 68 | 69 | base_sql_size_in_bytes = 15 70 | max_bytes = 30 71 | 72 | value_sets = parser.parse values, reserved_bytes: base_sql_size_in_bytes, max_bytes: max_bytes 73 | assert_equal 3, value_sets.size, 'Three value sets were expected!' 74 | 75 | # Each element in the value_sets array must be an array 76 | value_sets.each_with_index do |e, i| 77 | assert_kind_of Array, e, "Element #{i} was expected to be an Array!" 78 | end 79 | 80 | # Each element in the values array should have a 1:1 correlation to the elements 81 | # in the returned value_sets arrays 82 | assert_equal values[0], value_sets[0].first 83 | assert_equal values[1], value_sets[1].first 84 | assert_equal values[2], value_sets[2].first 85 | end 86 | 87 | context "data contains multi-byte chars" do 88 | it "should properly build insert value set based on max packet allowed" do 89 | # each accented e should be 2 bytes, so each entry is 6 bytes instead of 5 90 | values = [ 91 | "('é')", 92 | "('é')" 93 | ] 94 | 95 | base_sql_size_in_bytes = 15 96 | max_bytes = 26 97 | 98 | value_sets = parser.parse values, reserved_bytes: base_sql_size_in_bytes, max_bytes: max_bytes 99 | 100 | assert_equal 2, value_sets.size, 'Two value sets were expected!' 101 | end 102 | end 103 | end 104 | end 105 | -------------------------------------------------------------------------------- /lib/activerecord-import/adapters/mysql_adapter.rb: -------------------------------------------------------------------------------- 1 | module ActiveRecord::Import::MysqlAdapter 2 | include ActiveRecord::Import::ImportSupport 3 | include ActiveRecord::Import::OnDuplicateKeyUpdateSupport 4 | 5 | NO_MAX_PACKET = 0 6 | QUERY_OVERHEAD = 8 # This was shown to be true for MySQL, but it's not clear where the overhead is from. 7 | 8 | # +sql+ can be a single string or an array. If it is an array all 9 | # elements that are in position >= 1 will be appended to the final SQL. 10 | def insert_many( sql, values, options = {}, *args ) # :nodoc: 11 | # the number of inserts default 12 | number_of_inserts = 0 13 | 14 | base_sql, post_sql = if sql.is_a?( String ) 15 | [sql, ''] 16 | elsif sql.is_a?( Array ) 17 | [sql.shift, sql.join( ' ' )] 18 | end 19 | 20 | sql_size = QUERY_OVERHEAD + base_sql.size + post_sql.size 21 | 22 | # the number of bytes the requested insert statement values will take up 23 | values_in_bytes = values.sum(&:bytesize) 24 | 25 | # the number of bytes (commas) it will take to comma separate our values 26 | comma_separated_bytes = values.size - 1 27 | 28 | # the total number of bytes required if this statement is one statement 29 | total_bytes = sql_size + values_in_bytes + comma_separated_bytes 30 | 31 | max = max_allowed_packet 32 | 33 | # if we can insert it all as one statement 34 | if NO_MAX_PACKET == max || total_bytes <= max || options[:force_single_insert] 35 | number_of_inserts += 1 36 | sql2insert = base_sql + values.join( ',' ) + post_sql 37 | insert( sql2insert, *args ) 38 | else 39 | value_sets = ::ActiveRecord::Import::ValueSetsBytesParser.parse(values, 40 | reserved_bytes: sql_size, 41 | max_bytes: max) 42 | 43 | transaction(requires_new: true) do 44 | value_sets.each do |value_set| 45 | number_of_inserts += 1 46 | sql2insert = base_sql + value_set.join( ',' ) + post_sql 47 | insert( sql2insert, *args ) 48 | end 49 | end 50 | end 51 | 52 | ActiveRecord::Import::Result.new([], number_of_inserts, [], []) 53 | end 54 | 55 | # Returns the maximum number of bytes that the server will allow 56 | # in a single packet 57 | def max_allowed_packet # :nodoc: 58 | @max_allowed_packet ||= begin 59 | result = execute( "SHOW VARIABLES like 'max_allowed_packet'" ) 60 | # original Mysql gem responds to #fetch_row while Mysql2 responds to #first 61 | val = result.respond_to?(:fetch_row) ? result.fetch_row[1] : result.first[1] 62 | val.to_i 63 | end 64 | end 65 | 66 | def pre_sql_statements( options) 67 | sql = [] 68 | sql << "IGNORE" if options[:ignore] || options[:on_duplicate_key_ignore] 69 | sql + super 70 | end 71 | 72 | # Add a column to be updated on duplicate key update 73 | def add_column_for_on_duplicate_key_update( column, options = {} ) # :nodoc: 74 | if (columns = options[:on_duplicate_key_update]) 75 | case columns 76 | when Array then columns << column.to_sym unless columns.include?(column.to_sym) 77 | when Hash then columns[column.to_sym] = column.to_sym 78 | end 79 | end 80 | end 81 | 82 | # Returns a generated ON DUPLICATE KEY UPDATE statement given the passed 83 | # in +args+. 84 | def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc: 85 | sql = ' ON DUPLICATE KEY UPDATE ' 86 | arg = args.first 87 | locking_column = args.last 88 | if arg.is_a?( Array ) 89 | sql << sql_for_on_duplicate_key_update_as_array( table_name, locking_column, arg ) 90 | elsif arg.is_a?( Hash ) 91 | sql << sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, arg ) 92 | elsif arg.is_a?( String ) 93 | sql << arg 94 | else 95 | raise ArgumentError, "Expected Array or Hash" 96 | end 97 | sql 98 | end 99 | 100 | def sql_for_on_duplicate_key_update_as_array( table_name, locking_column, arr ) # :nodoc: 101 | results = arr.map do |column| 102 | qc = quote_column_name( column ) 103 | "#{table_name}.#{qc}=VALUES(#{qc})" 104 | end 105 | increment_locking_column!(table_name, results, locking_column) 106 | results.join( ',' ) 107 | end 108 | 109 | def sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, hsh ) # :nodoc: 110 | results = hsh.map do |column1, column2| 111 | qc1 = quote_column_name( column1 ) 112 | qc2 = quote_column_name( column2 ) 113 | "#{table_name}.#{qc1}=VALUES( #{qc2} )" 114 | end 115 | increment_locking_column!(table_name, results, locking_column) 116 | results.join( ',') 117 | end 118 | 119 | # Return true if the statement is a duplicate key record error 120 | def duplicate_key_update_error?(exception) # :nodoc: 121 | exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('Duplicate entry') 122 | end 123 | 124 | def increment_locking_column!(table_name, results, locking_column) 125 | if locking_column.present? 126 | results << "`#{locking_column}`=#{table_name}.`#{locking_column}`+1" 127 | end 128 | end 129 | end 130 | -------------------------------------------------------------------------------- /benchmarks/lib/base.rb: -------------------------------------------------------------------------------- 1 | class BenchmarkBase 2 | attr_reader :results 3 | 4 | # The main benchmark method dispatcher. This dispatches the benchmarks 5 | # to actual benchmark_xxxx methods. 6 | # 7 | # == PARAMETERS 8 | # * table_types - an array of table types to benchmark 9 | # * num - the number of record insertions to test 10 | def benchmark( table_types, num ) 11 | array_of_cols_and_vals = build_array_of_cols_and_vals( num ) 12 | table_types.each do |table_type| 13 | send( "benchmark_#{table_type}", array_of_cols_and_vals ) 14 | end 15 | end 16 | 17 | # Returns an OpenStruct which contains two attritues, +description+ and +tms+ after performing an 18 | # actual benchmark. 19 | # 20 | # == PARAMETERS 21 | # * description - the description of the block that is getting benchmarked 22 | # * blk - the block of code to benchmark 23 | # 24 | # == RETURNS 25 | # An OpenStruct object with the following attributes: 26 | # * description - the description of the benchmark ran 27 | # * tms - a Benchmark::Tms containing the results of the benchmark 28 | def bm( description ) 29 | tms = nil 30 | puts "Benchmarking #{description}" 31 | 32 | Benchmark.bm { |x| tms = x.report { yield } } 33 | delete_all 34 | failed = false 35 | 36 | OpenStruct.new description: description, tms: tms, failed: failed 37 | end 38 | 39 | # Given a model class (ie: Topic), and an array of columns and value sets 40 | # this will perform all of the benchmarks necessary for this library. 41 | # 42 | # == PARAMETERS 43 | # * model_clazz - the model class to benchmark (ie: Topic) 44 | # * array_of_cols_and_vals - an array of column identifiers and value sets 45 | # 46 | # == RETURNS 47 | # returns true 48 | def bm_model( model_clazz, array_of_cols_and_vals ) 49 | puts 50 | puts "------ Benchmarking #{model_clazz.name} -------" 51 | 52 | cols, vals = array_of_cols_and_vals 53 | num_inserts = vals.size 54 | 55 | # add a new result group for this particular benchmark 56 | group = [] 57 | @results << group 58 | 59 | description = "#{model_clazz.name}.create (#{num_inserts} records)" 60 | group << bm( description ) do 61 | vals.each do |values| 62 | model_clazz.create create_hash_for_cols_and_vals( cols, values ) 63 | end 64 | end 65 | 66 | description = "#{model_clazz.name}.import(column, values) for #{num_inserts} records with validations" 67 | group << bm( description ) { model_clazz.import cols, vals, validate: true } 68 | 69 | description = "#{model_clazz.name}.import(columns, values) for #{num_inserts} records without validations" 70 | group << bm( description ) { model_clazz.import cols, vals, validate: false } 71 | 72 | models = [] 73 | array_of_attrs = [] 74 | 75 | vals.each do |arr| 76 | array_of_attrs << (attrs = {}) 77 | arr.each_with_index { |value, i| attrs[cols[i]] = value } 78 | end 79 | array_of_attrs.each { |attrs| models << model_clazz.new(attrs) } 80 | 81 | description = "#{model_clazz.name}.import(models) for #{num_inserts} records with validations" 82 | group << bm( description ) { model_clazz.import models, validate: true } 83 | 84 | description = "#{model_clazz.name}.import(models) for #{num_inserts} records without validations" 85 | group << bm( description ) { model_clazz.import models, validate: false } 86 | 87 | true 88 | end 89 | 90 | # Returns a two element array composing of an array of columns and an array of 91 | # value sets given the passed +num+. 92 | # 93 | # === What is a value set? 94 | # A value set is an array of arrays. Each child array represents an array of value sets 95 | # for a given row of data. 96 | # 97 | # For example, say we wanted to represent an insertion of two records: 98 | # column_names = [ 'id', 'name', 'description' ] 99 | # record1 = [ 1, 'John Doe', 'A plumber' ] 100 | # record2 = [ 2, 'John Smith', 'A painter' ] 101 | # value_set [ record1, record2 ] 102 | # 103 | # == PARAMETER 104 | # * num - the number of records to create 105 | def build_array_of_cols_and_vals( num ) 106 | cols = [:my_name, :description] 107 | value_sets = [] 108 | num.times { |i| value_sets << ["My Name #{i}", "My Description #{i}"] } 109 | [cols, value_sets] 110 | end 111 | 112 | # Returns a hash of column identifier to value mappings giving the passed in 113 | # value array. 114 | # 115 | # Example: 116 | # cols = [ 'id', 'name', 'description' ] 117 | # values = [ 1, 'John Doe', 'A plumber' ] 118 | # hsh = create_hash_for_cols_and_vals( cols, values ) 119 | # # hsh => { 'id'=>1, 'name'=>'John Doe', 'description'=>'A plumber' } 120 | def create_hash_for_cols_and_vals( cols, vals ) 121 | h = {} 122 | cols.zip( vals ) { |col, val| h[col] = val } 123 | h 124 | end 125 | 126 | # Deletes all records from all ActiveRecord subclasses 127 | def delete_all 128 | ActiveRecord::Base.send( :subclasses ).each do |subclass| 129 | if subclass.table_exists? && subclass.respond_to?(:delete_all) 130 | subclass.delete_all 131 | end 132 | end 133 | end 134 | 135 | def initialize # :nodoc: 136 | @results = [] 137 | end 138 | end 139 | -------------------------------------------------------------------------------- /test/schema/generic_schema.rb: -------------------------------------------------------------------------------- 1 | ActiveRecord::Schema.define do 2 | create_table :schema_info, force: :cascade do |t| 3 | t.integer :version, unique: true 4 | end 5 | SchemaInfo.create version: SchemaInfo::VERSION 6 | 7 | create_table :group, force: :cascade do |t| 8 | t.string :order 9 | t.timestamps null: true 10 | end 11 | 12 | create_table :topics, force: :cascade do |t| 13 | t.string :title, null: false 14 | t.string :author_name 15 | t.string :author_email_address 16 | t.datetime :written_on 17 | t.time :bonus_time 18 | t.datetime :last_read 19 | t.text :content 20 | t.boolean :approved, default: '1' 21 | t.integer :replies_count 22 | t.integer :parent_id 23 | t.string :type 24 | t.datetime :created_at 25 | t.datetime :created_on 26 | t.datetime :updated_at 27 | t.datetime :updated_on 28 | end 29 | 30 | create_table :projects, force: :cascade do |t| 31 | t.string :name 32 | t.string :type 33 | end 34 | 35 | create_table :developers, force: :cascade do |t| 36 | t.string :name 37 | t.integer :salary, default: '70000' 38 | t.datetime :created_at 39 | t.integer :team_id 40 | t.datetime :updated_at 41 | end 42 | 43 | create_table :addresses, force: :cascade do |t| 44 | t.string :address 45 | t.string :city 46 | t.string :state 47 | t.string :zip 48 | t.integer :developer_id 49 | end 50 | 51 | create_table :teams, force: :cascade do |t| 52 | t.string :name 53 | end 54 | 55 | create_table :books, force: :cascade do |t| 56 | t.string :title, null: false 57 | t.string :publisher, null: false, default: 'Default Publisher' 58 | t.string :author_name, null: false 59 | t.datetime :created_at 60 | t.datetime :created_on 61 | t.datetime :updated_at 62 | t.datetime :updated_on 63 | t.date :publish_date 64 | t.integer :topic_id 65 | t.integer :tag_id 66 | t.integer :publisher_id 67 | t.boolean :for_sale, default: true 68 | t.integer :status, default: 0 69 | t.string :type 70 | end 71 | 72 | create_table :chapters, force: :cascade do |t| 73 | t.string :title 74 | t.integer :book_id, null: false 75 | t.datetime :created_at 76 | t.datetime :updated_at 77 | end 78 | 79 | create_table :end_notes, primary_key: :end_note_id, force: :cascade do |t| 80 | t.string :note 81 | t.integer :book_id, null: false 82 | t.datetime :created_at 83 | t.datetime :updated_at 84 | end 85 | 86 | create_table :languages, force: :cascade do |t| 87 | t.string :name 88 | t.integer :developer_id 89 | end 90 | 91 | create_table :shopping_carts, force: :cascade do |t| 92 | t.string :name, null: true 93 | t.datetime :created_at 94 | t.datetime :updated_at 95 | end 96 | 97 | create_table :cart_items, force: :cascade do |t| 98 | t.string :shopping_cart_id, null: false 99 | t.string :book_id, null: false 100 | t.integer :copies, default: 1 101 | t.datetime :created_at 102 | t.datetime :updated_at 103 | end 104 | 105 | add_index :cart_items, [:shopping_cart_id, :book_id], unique: true, name: 'uk_shopping_cart_books' 106 | 107 | create_table :animals, force: :cascade do |t| 108 | t.string :name, null: false 109 | t.string :size, default: nil 110 | t.datetime :created_at 111 | t.datetime :updated_at 112 | end 113 | 114 | add_index :animals, [:name], unique: true, name: 'uk_animals' 115 | 116 | create_table :widgets, id: false, force: :cascade do |t| 117 | t.integer :w_id, primary_key: true 118 | t.boolean :active, default: false 119 | t.text :data 120 | t.text :json_data 121 | t.text :unspecified_data 122 | t.text :custom_data 123 | end 124 | 125 | create_table :promotions, primary_key: :promotion_id, force: :cascade do |t| 126 | t.string :code 127 | t.string :description 128 | t.decimal :discount 129 | end 130 | 131 | add_index :promotions, [:code], unique: true, name: 'uk_code' 132 | 133 | create_table :discounts, force: :cascade do |t| 134 | t.decimal :amount 135 | t.integer :discountable_id 136 | t.string :discountable_type 137 | end 138 | 139 | create_table :rules, id: false, force: :cascade do |t| 140 | t.integer :id 141 | t.string :condition_text 142 | t.integer :question_id 143 | end 144 | 145 | create_table :questions, force: :cascade do |t| 146 | t.string :body 147 | end 148 | 149 | create_table :vendors, force: :cascade do |t| 150 | t.string :name, null: true 151 | t.text :preferences 152 | t.text :data 153 | t.text :config 154 | t.text :settings 155 | end 156 | 157 | create_table :cars, id: false, force: :cascade do |t| 158 | t.string :Name, null: true 159 | t.string :Features 160 | end 161 | 162 | create_table :users, force: :cascade do |t| 163 | t.string :name, null: false 164 | t.integer :lock_version, null: false, default: 0 165 | end 166 | 167 | create_table :user_tokens, force: :cascade do |t| 168 | t.string :user_name, null: false 169 | t.string :token, null: false 170 | end 171 | 172 | create_table :accounts, force: :cascade do |t| 173 | t.string :name, null: false 174 | t.integer :lock, null: false, default: 0 175 | end 176 | 177 | create_table :bike_makers, force: :cascade do |t| 178 | t.string :name, null: false 179 | t.integer :lock_version, null: false, default: 0 180 | end 181 | 182 | add_index :cars, :Name, unique: true 183 | 184 | unless ENV["SKIP_COMPOSITE_PK"] 185 | execute %( 186 | CREATE TABLE IF NOT EXISTS tags ( 187 | tag_id INT NOT NULL, 188 | publisher_id INT NOT NULL, 189 | tag VARCHAR(50), 190 | PRIMARY KEY (tag_id, publisher_id) 191 | ); 192 | ).split.join(' ').strip 193 | end 194 | end 195 | -------------------------------------------------------------------------------- /lib/activerecord-import/adapters/sqlite3_adapter.rb: -------------------------------------------------------------------------------- 1 | module ActiveRecord::Import::SQLite3Adapter 2 | include ActiveRecord::Import::ImportSupport 3 | include ActiveRecord::Import::OnDuplicateKeyUpdateSupport 4 | 5 | MIN_VERSION_FOR_IMPORT = "3.7.11".freeze 6 | MIN_VERSION_FOR_UPSERT = "3.24.0".freeze 7 | SQLITE_LIMIT_COMPOUND_SELECT = 500 8 | 9 | # Override our conformance to ActiveRecord::Import::ImportSupport interface 10 | # to ensure that we only support import in supported version of SQLite. 11 | # Which INSERT statements with multiple value sets was introduced in 3.7.11. 12 | def supports_import? 13 | database_version >= MIN_VERSION_FOR_IMPORT 14 | end 15 | 16 | def supports_on_duplicate_key_update? 17 | database_version >= MIN_VERSION_FOR_UPSERT 18 | end 19 | 20 | # +sql+ can be a single string or an array. If it is an array all 21 | # elements that are in position >= 1 will be appended to the final SQL. 22 | def insert_many( sql, values, _options = {}, *args ) # :nodoc: 23 | number_of_inserts = 0 24 | 25 | base_sql, post_sql = if sql.is_a?( String ) 26 | [sql, ''] 27 | elsif sql.is_a?( Array ) 28 | [sql.shift, sql.join( ' ' )] 29 | end 30 | 31 | value_sets = ::ActiveRecord::Import::ValueSetsRecordsParser.parse(values, 32 | max_records: SQLITE_LIMIT_COMPOUND_SELECT) 33 | 34 | transaction(requires_new: true) do 35 | value_sets.each do |value_set| 36 | number_of_inserts += 1 37 | sql2insert = base_sql + value_set.join( ',' ) + post_sql 38 | insert( sql2insert, *args ) 39 | end 40 | end 41 | 42 | ActiveRecord::Import::Result.new([], number_of_inserts, [], []) 43 | end 44 | 45 | def pre_sql_statements( options ) 46 | sql = [] 47 | # Options :recursive and :on_duplicate_key_ignore are mutually exclusive 48 | if !supports_on_duplicate_key_update? && (options[:ignore] || options[:on_duplicate_key_ignore]) 49 | sql << "OR IGNORE" 50 | end 51 | sql + super 52 | end 53 | 54 | def post_sql_statements( table_name, options ) # :nodoc: 55 | sql = [] 56 | 57 | if supports_on_duplicate_key_update? 58 | # Options :recursive and :on_duplicate_key_ignore are mutually exclusive 59 | if (options[:ignore] || options[:on_duplicate_key_ignore]) && !options[:on_duplicate_key_update] 60 | sql << sql_for_on_duplicate_key_ignore( options[:on_duplicate_key_ignore] ) 61 | end 62 | end 63 | 64 | sql + super 65 | end 66 | 67 | def next_value_for_sequence(sequence_name) 68 | %{nextval('#{sequence_name}')} 69 | end 70 | 71 | # Add a column to be updated on duplicate key update 72 | def add_column_for_on_duplicate_key_update( column, options = {} ) # :nodoc: 73 | arg = options[:on_duplicate_key_update] 74 | if arg.is_a?( Hash ) 75 | columns = arg.fetch( :columns ) { arg[:columns] = [] } 76 | case columns 77 | when Array then columns << column.to_sym unless columns.include?( column.to_sym ) 78 | when Hash then columns[column.to_sym] = column.to_sym 79 | end 80 | elsif arg.is_a?( Array ) 81 | arg << column.to_sym unless arg.include?( column.to_sym ) 82 | end 83 | end 84 | 85 | # Returns a generated ON CONFLICT DO NOTHING statement given the passed 86 | # in +args+. 87 | def sql_for_on_duplicate_key_ignore( *args ) # :nodoc: 88 | arg = args.first 89 | conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash ) 90 | " ON CONFLICT #{conflict_target}DO NOTHING" 91 | end 92 | 93 | # Returns a generated ON CONFLICT DO UPDATE statement given the passed 94 | # in +args+. 95 | def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc: 96 | arg, primary_key, locking_column = args 97 | arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String ) 98 | return unless arg.is_a?( Hash ) 99 | 100 | sql = ' ON CONFLICT ' 101 | conflict_target = sql_for_conflict_target( arg ) 102 | 103 | columns = arg.fetch( :columns, [] ) 104 | condition = arg[:condition] 105 | if columns.respond_to?( :empty? ) && columns.empty? 106 | return sql << "#{conflict_target}DO NOTHING" 107 | end 108 | 109 | conflict_target ||= sql_for_default_conflict_target( primary_key ) 110 | unless conflict_target 111 | raise ArgumentError, 'Expected :conflict_target to be specified' 112 | end 113 | 114 | sql << "#{conflict_target}DO UPDATE SET " 115 | if columns.is_a?( Array ) 116 | sql << sql_for_on_duplicate_key_update_as_array( table_name, locking_column, columns ) 117 | elsif columns.is_a?( Hash ) 118 | sql << sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, columns ) 119 | elsif columns.is_a?( String ) 120 | sql << columns 121 | else 122 | raise ArgumentError, 'Expected :columns to be an Array or Hash' 123 | end 124 | 125 | sql << " WHERE #{condition}" if condition.present? 126 | 127 | sql 128 | end 129 | 130 | def sql_for_on_duplicate_key_update_as_array( table_name, locking_column, arr ) # :nodoc: 131 | results = arr.map do |column| 132 | qc = quote_column_name( column ) 133 | "#{qc}=EXCLUDED.#{qc}" 134 | end 135 | increment_locking_column!(table_name, results, locking_column) 136 | results.join( ',' ) 137 | end 138 | 139 | def sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, hsh ) # :nodoc: 140 | results = hsh.map do |column1, column2| 141 | qc1 = quote_column_name( column1 ) 142 | qc2 = quote_column_name( column2 ) 143 | "#{qc1}=EXCLUDED.#{qc2}" 144 | end 145 | increment_locking_column!(table_name, results, locking_column) 146 | results.join( ',' ) 147 | end 148 | 149 | def sql_for_conflict_target( args = {} ) 150 | conflict_target = args[:conflict_target] 151 | index_predicate = args[:index_predicate] 152 | if conflict_target.present? 153 | '(' << Array( conflict_target ).reject( &:blank? ).join( ', ' ) << ') '.tap do |sql| 154 | sql << "WHERE #{index_predicate} " if index_predicate 155 | end 156 | end 157 | end 158 | 159 | def sql_for_default_conflict_target( primary_key ) 160 | conflict_target = Array(primary_key).join(', ') 161 | "(#{conflict_target}) " if conflict_target.present? 162 | end 163 | 164 | # Return true if the statement is a duplicate key record error 165 | def duplicate_key_update_error?(exception) # :nodoc: 166 | exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key') 167 | end 168 | 169 | private 170 | 171 | def database_version 172 | defined?(sqlite_version) ? sqlite_version : super 173 | end 174 | end 175 | -------------------------------------------------------------------------------- /lib/activerecord-import/adapters/postgresql_adapter.rb: -------------------------------------------------------------------------------- 1 | module ActiveRecord::Import::PostgreSQLAdapter 2 | include ActiveRecord::Import::ImportSupport 3 | include ActiveRecord::Import::OnDuplicateKeyUpdateSupport 4 | 5 | MIN_VERSION_FOR_UPSERT = 90_500 6 | 7 | def insert_many( sql, values, options = {}, *args ) # :nodoc: 8 | number_of_inserts = 1 9 | returned_values = [] 10 | ids = [] 11 | results = [] 12 | 13 | base_sql, post_sql = if sql.is_a?( String ) 14 | [sql, ''] 15 | elsif sql.is_a?( Array ) 16 | [sql.shift, sql.join( ' ' )] 17 | end 18 | 19 | sql2insert = base_sql + values.join( ',' ) + post_sql 20 | 21 | columns = returning_columns(options) 22 | if columns.blank? || (options[:no_returning] && !options[:recursive]) 23 | insert( sql2insert, *args ) 24 | else 25 | returned_values = if columns.size > 1 26 | # Select composite columns 27 | select_rows( sql2insert, *args ) 28 | else 29 | select_values( sql2insert, *args ) 30 | end 31 | query_cache.clear if query_cache_enabled 32 | end 33 | 34 | if options[:returning].blank? 35 | ids = returned_values 36 | elsif options[:primary_key].blank? 37 | results = returned_values 38 | else 39 | # split primary key and returning columns 40 | ids, results = split_ids_and_results(returned_values, columns, options) 41 | end 42 | 43 | ActiveRecord::Import::Result.new([], number_of_inserts, ids, results) 44 | end 45 | 46 | def split_ids_and_results(values, columns, options) 47 | ids = [] 48 | results = [] 49 | id_indexes = Array(options[:primary_key]).map { |key| columns.index(key) } 50 | returning_indexes = Array(options[:returning]).map { |key| columns.index(key) } 51 | 52 | values.each do |value| 53 | value_array = Array(value) 54 | ids << id_indexes.map { |i| value_array[i] } 55 | results << returning_indexes.map { |i| value_array[i] } 56 | end 57 | 58 | ids.map!(&:first) if id_indexes.size == 1 59 | results.map!(&:first) if returning_indexes.size == 1 60 | 61 | [ids, results] 62 | end 63 | 64 | def next_value_for_sequence(sequence_name) 65 | %{nextval('#{sequence_name}')} 66 | end 67 | 68 | def post_sql_statements( table_name, options ) # :nodoc: 69 | sql = [] 70 | 71 | if supports_on_duplicate_key_update? 72 | # Options :recursive and :on_duplicate_key_ignore are mutually exclusive 73 | if (options[:ignore] || options[:on_duplicate_key_ignore]) && !options[:on_duplicate_key_update] && !options[:recursive] 74 | sql << sql_for_on_duplicate_key_ignore( table_name, options[:on_duplicate_key_ignore] ) 75 | end 76 | elsif logger && options[:on_duplicate_key_ignore] && !options[:on_duplicate_key_update] 77 | logger.warn "Ignoring on_duplicate_key_ignore because it is not supported by the database." 78 | end 79 | 80 | sql += super(table_name, options) 81 | 82 | columns = returning_columns(options) 83 | unless columns.blank? || (options[:no_returning] && !options[:recursive]) 84 | sql << " RETURNING \"#{columns.join('", "')}\"" 85 | end 86 | 87 | sql 88 | end 89 | 90 | def returning_columns(options) 91 | columns = [] 92 | columns += Array(options[:primary_key]) if options[:primary_key].present? 93 | columns |= Array(options[:returning]) if options[:returning].present? 94 | columns 95 | end 96 | 97 | # Add a column to be updated on duplicate key update 98 | def add_column_for_on_duplicate_key_update( column, options = {} ) # :nodoc: 99 | arg = options[:on_duplicate_key_update] 100 | if arg.is_a?( Hash ) 101 | columns = arg.fetch( :columns ) { arg[:columns] = [] } 102 | case columns 103 | when Array then columns << column.to_sym unless columns.include?( column.to_sym ) 104 | when Hash then columns[column.to_sym] = column.to_sym 105 | end 106 | elsif arg.is_a?( Array ) 107 | arg << column.to_sym unless arg.include?( column.to_sym ) 108 | end 109 | end 110 | 111 | # Returns a generated ON CONFLICT DO NOTHING statement given the passed 112 | # in +args+. 113 | def sql_for_on_duplicate_key_ignore( table_name, *args ) # :nodoc: 114 | arg = args.first 115 | conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash ) 116 | " ON CONFLICT #{conflict_target}DO NOTHING" 117 | end 118 | 119 | # Returns a generated ON CONFLICT DO UPDATE statement given the passed 120 | # in +args+. 121 | def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc: 122 | arg, primary_key, locking_column = args 123 | arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String ) 124 | return unless arg.is_a?( Hash ) 125 | 126 | sql = ' ON CONFLICT ' 127 | conflict_target = sql_for_conflict_target( arg ) 128 | 129 | columns = arg.fetch( :columns, [] ) 130 | condition = arg[:condition] 131 | if columns.respond_to?( :empty? ) && columns.empty? 132 | return sql << "#{conflict_target}DO NOTHING" 133 | end 134 | 135 | conflict_target ||= sql_for_default_conflict_target( table_name, primary_key ) 136 | unless conflict_target 137 | raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified' 138 | end 139 | 140 | sql << "#{conflict_target}DO UPDATE SET " 141 | if columns.is_a?( Array ) 142 | sql << sql_for_on_duplicate_key_update_as_array( table_name, locking_column, columns ) 143 | elsif columns.is_a?( Hash ) 144 | sql << sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, columns ) 145 | elsif columns.is_a?( String ) 146 | sql << columns 147 | else 148 | raise ArgumentError, 'Expected :columns to be an Array or Hash' 149 | end 150 | 151 | sql << " WHERE #{condition}" if condition.present? 152 | 153 | sql 154 | end 155 | 156 | def sql_for_on_duplicate_key_update_as_array( table_name, locking_column, arr ) # :nodoc: 157 | results = arr.map do |column| 158 | qc = quote_column_name( column ) 159 | "#{qc}=EXCLUDED.#{qc}" 160 | end 161 | increment_locking_column!(table_name, results, locking_column) 162 | results.join( ',' ) 163 | end 164 | 165 | def sql_for_on_duplicate_key_update_as_hash( table_name, locking_column, hsh ) # :nodoc: 166 | results = hsh.map do |column1, column2| 167 | qc1 = quote_column_name( column1 ) 168 | qc2 = quote_column_name( column2 ) 169 | "#{qc1}=EXCLUDED.#{qc2}" 170 | end 171 | increment_locking_column!(table_name, results, locking_column) 172 | results.join( ',' ) 173 | end 174 | 175 | def sql_for_conflict_target( args = {} ) 176 | constraint_name = args[:constraint_name] 177 | conflict_target = args[:conflict_target] 178 | index_predicate = args[:index_predicate] 179 | if constraint_name.present? 180 | "ON CONSTRAINT #{constraint_name} " 181 | elsif conflict_target.present? 182 | '(' << Array( conflict_target ).reject( &:blank? ).join( ', ' ) << ') '.tap do |sql| 183 | sql << "WHERE #{index_predicate} " if index_predicate 184 | end 185 | end 186 | end 187 | 188 | def sql_for_default_conflict_target( table_name, primary_key ) 189 | conflict_target = Array(primary_key).join(', ') 190 | "(#{conflict_target}) " if conflict_target.present? 191 | end 192 | 193 | # Return true if the statement is a duplicate key record error 194 | def duplicate_key_update_error?(exception) # :nodoc: 195 | exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key') 196 | end 197 | 198 | def supports_on_duplicate_key_update? 199 | database_version >= MIN_VERSION_FOR_UPSERT 200 | end 201 | 202 | def supports_setting_primary_key_of_imported_objects? 203 | true 204 | end 205 | 206 | private 207 | 208 | def database_version 209 | defined?(postgresql_version) ? postgresql_version : super 210 | end 211 | end 212 | -------------------------------------------------------------------------------- /test/support/shared_examples/recursive_import.rb: -------------------------------------------------------------------------------- 1 | def should_support_recursive_import 2 | describe "importing objects with associations" do 3 | let(:new_topics) { Build(num_topics, :topic_with_book) } 4 | let(:new_topics_with_invalid_chapter) do 5 | chapter = new_topics.first.books.first.chapters.first 6 | chapter.title = nil 7 | new_topics 8 | end 9 | let(:num_topics) { 3 } 10 | let(:num_books) { 6 } 11 | let(:num_chapters) { 18 } 12 | let(:num_endnotes) { 24 } 13 | 14 | let(:new_question_with_rule) { FactoryBot.build :question, :with_rule } 15 | 16 | it 'imports top level' do 17 | assert_difference "Topic.count", +num_topics do 18 | Topic.import new_topics, recursive: true 19 | new_topics.each do |topic| 20 | assert_not_nil topic.id 21 | end 22 | end 23 | end 24 | 25 | it 'imports first level associations' do 26 | assert_difference "Book.count", +num_books do 27 | Topic.import new_topics, recursive: true 28 | new_topics.each do |topic| 29 | topic.books.each do |book| 30 | assert_equal topic.id, book.topic_id 31 | end 32 | end 33 | end 34 | end 35 | 36 | it 'imports polymorphic associations' do 37 | discounts = Array.new(1) { |i| Discount.new(amount: i) } 38 | books = Array.new(1) { |i| Book.new(author_name: "Author ##{i}", title: "Book ##{i}") } 39 | books.each do |book| 40 | book.discounts << discounts 41 | end 42 | Book.import books, recursive: true 43 | books.each do |book| 44 | book.discounts.each do |discount| 45 | assert_not_nil discount.discountable_id 46 | assert_equal 'Book', discount.discountable_type 47 | end 48 | end 49 | end 50 | 51 | it 'imports polymorphic associations from subclass' do 52 | discounts = Array.new(1) { |i| Discount.new(amount: i) } 53 | dictionaries = Array.new(1) { |i| Dictionary.new(author_name: "Author ##{i}", title: "Book ##{i}") } 54 | dictionaries.each do |dictionary| 55 | dictionary.discounts << discounts 56 | end 57 | Dictionary.import dictionaries, recursive: true 58 | assert_equal 1, Dictionary.last.discounts.count 59 | dictionaries.each do |dictionary| 60 | dictionary.discounts.each do |discount| 61 | assert_not_nil discount.discountable_id 62 | assert_equal 'Book', discount.discountable_type 63 | end 64 | end 65 | end 66 | 67 | [{ recursive: false }, {}].each do |import_options| 68 | it "skips recursion for #{import_options}" do 69 | assert_difference "Book.count", 0 do 70 | Topic.import new_topics, import_options 71 | end 72 | end 73 | end 74 | 75 | it 'imports deeper nested associations' do 76 | assert_difference "Chapter.count", +num_chapters do 77 | assert_difference "EndNote.count", +num_endnotes do 78 | Topic.import new_topics, recursive: true 79 | new_topics.each do |topic| 80 | topic.books.each do |book| 81 | book.chapters.each do |chapter| 82 | assert_equal book.id, chapter.book_id 83 | end 84 | book.end_notes.each do |endnote| 85 | assert_equal book.id, endnote.book_id 86 | end 87 | end 88 | end 89 | end 90 | end 91 | end 92 | 93 | # Models are only valid if all associations are valid 94 | it "only imports models with valid associations" do 95 | assert_difference "Topic.count", 2 do 96 | assert_difference "Book.count", 4 do 97 | assert_difference "Chapter.count", 12 do 98 | assert_difference "EndNote.count", 16 do 99 | Topic.import new_topics_with_invalid_chapter, recursive: true 100 | end 101 | end 102 | end 103 | end 104 | end 105 | 106 | it "skips validation of the associations if requested" do 107 | assert_difference "Chapter.count", +num_chapters do 108 | Topic.import new_topics_with_invalid_chapter, validate: false, recursive: true 109 | end 110 | end 111 | 112 | it 'imports has_one associations' do 113 | assert_difference 'Rule.count' do 114 | Question.import [new_question_with_rule], recursive: true 115 | end 116 | end 117 | 118 | it "imports an imported belongs_to association id" do 119 | first_new_topic = new_topics[0] 120 | second_new_topic = new_topics[1] 121 | 122 | books = first_new_topic.books.to_a 123 | Topic.import new_topics, validate: false 124 | 125 | assert_difference "Book.count", books.size do 126 | Book.import books, validate: false 127 | end 128 | 129 | books.each do |book| 130 | assert_equal book.topic_id, first_new_topic.id 131 | end 132 | 133 | books.each { |book| book.topic_id = second_new_topic.id } 134 | assert_no_difference "Book.count", books.size do 135 | Book.import books, validate: false, on_duplicate_key_update: [:topic_id] 136 | end 137 | 138 | books.each do |book| 139 | assert_equal book.topic_id, second_new_topic.id 140 | end 141 | 142 | books.each { |book| book.topic_id = nil } 143 | assert_no_difference "Book.count", books.size do 144 | Book.import books, validate: false, on_duplicate_key_update: [:topic_id] 145 | end 146 | 147 | books.each do |book| 148 | assert_equal book.topic_id, nil 149 | end 150 | end 151 | 152 | unless ENV["SKIP_COMPOSITE_PK"] 153 | describe "with composite primary keys" do 154 | it "should import models and set id" do 155 | tags = [] 156 | tags << Tag.new(tag_id: 1, publisher_id: 1, tag: 'Mystery') 157 | tags << Tag.new(tag_id: 2, publisher_id: 1, tag: 'Science') 158 | 159 | assert_difference "Tag.count", +2 do 160 | Tag.import tags 161 | end 162 | 163 | assert_equal 1, tags[0].tag_id 164 | assert_equal 2, tags[1].tag_id 165 | end 166 | end 167 | end 168 | 169 | describe "all_or_none" do 170 | [Book, Chapter, Topic, EndNote].each do |type| 171 | it "creates #{type}" do 172 | assert_difference "#{type}.count", 0 do 173 | Topic.import new_topics_with_invalid_chapter, all_or_none: true, recursive: true 174 | end 175 | end 176 | end 177 | end 178 | 179 | # If adapter supports on_duplicate_key_update, it is only applied to top level models so that SQL with invalid 180 | # columns, keys, etc isn't generated for child associations when doing recursive import 181 | if ActiveRecord::Base.connection.supports_on_duplicate_key_update? 182 | describe "on_duplicate_key_update" do 183 | let(:new_topics) { Build(1, :topic_with_book) } 184 | 185 | it "imports objects with associations" do 186 | assert_difference "Topic.count", +1 do 187 | Topic.import new_topics, recursive: true, on_duplicate_key_update: [:updated_at], validate: false 188 | new_topics.each do |topic| 189 | assert_not_nil topic.id 190 | end 191 | end 192 | end 193 | end 194 | end 195 | 196 | # If returning option is provided, it is only applied to top level models so that SQL with invalid 197 | # columns, keys, etc isn't generated for child associations when doing recursive import 198 | describe "returning" do 199 | let(:new_topics) { Build(1, :topic_with_book) } 200 | 201 | it "imports objects with associations" do 202 | assert_difference "Topic.count", +1 do 203 | Topic.import new_topics, recursive: true, returning: [:content], validate: false 204 | new_topics.each do |topic| 205 | assert_not_nil topic.id 206 | end 207 | end 208 | end 209 | end 210 | 211 | # If no returning option is provided, it is ignored 212 | describe "no returning" do 213 | let(:new_topics) { Build(1, :topic_with_book) } 214 | 215 | it "is ignored and imports objects with associations" do 216 | assert_difference "Topic.count", +1 do 217 | Topic.import new_topics, recursive: true, no_returning: true, validate: false 218 | new_topics.each do |topic| 219 | assert_not_nil topic.id 220 | end 221 | end 222 | end 223 | end 224 | end 225 | end 226 | -------------------------------------------------------------------------------- /test/support/sqlite3/import_examples.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | def should_support_sqlite3_import_functionality 3 | if ActiveRecord::Base.connection.supports_on_duplicate_key_update? 4 | should_support_sqlite_upsert_functionality 5 | end 6 | 7 | describe "#supports_imports?" do 8 | it "should support import" do 9 | assert ActiveRecord::Base.supports_import? 10 | end 11 | end 12 | 13 | describe "#import" do 14 | it "imports with a single insert on SQLite 3.7.11 or higher" do 15 | assert_difference "Topic.count", +507 do 16 | result = Topic.import Build(7, :topics) 17 | assert_equal 1, result.num_inserts, "Failed to issue a single INSERT statement. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 18 | assert_equal 7, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 19 | 20 | result = Topic.import Build(500, :topics) 21 | assert_equal 1, result.num_inserts, "Failed to issue a single INSERT statement. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 22 | assert_equal 507, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 23 | end 24 | end 25 | 26 | it "imports with a two inserts on SQLite 3.7.11 or higher" do 27 | assert_difference "Topic.count", +501 do 28 | result = Topic.import Build(501, :topics) 29 | assert_equal 2, result.num_inserts, "Failed to issue a two INSERT statements. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 30 | assert_equal 501, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 31 | end 32 | end 33 | 34 | it "imports with a five inserts on SQLite 3.7.11 or higher" do 35 | assert_difference "Topic.count", +2500 do 36 | result = Topic.import Build(2500, :topics) 37 | assert_equal 5, result.num_inserts, "Failed to issue a two INSERT statements. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 38 | assert_equal 2500, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed" 39 | end 40 | end 41 | end 42 | end 43 | 44 | def should_support_sqlite_upsert_functionality 45 | should_support_basic_on_duplicate_key_update 46 | should_support_on_duplicate_key_ignore 47 | 48 | describe "#import" do 49 | extend ActiveSupport::TestCase::ImportAssertions 50 | 51 | macro(:perform_import) { raise "supply your own #perform_import in a context below" } 52 | macro(:updated_topic) { Topic.find(@topic.id) } 53 | 54 | context "with :on_duplicate_key_ignore and validation checks turned off" do 55 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 56 | let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } 57 | let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 58 | 59 | setup do 60 | Topic.import columns, values, validate: false 61 | end 62 | 63 | it "should not update any records" do 64 | result = Topic.import columns, updated_values, on_duplicate_key_ignore: true, validate: false 65 | assert_equal [], result.ids 66 | end 67 | end 68 | 69 | context "with :on_duplicate_key_update and validation checks turned off" do 70 | asssertion_group(:should_support_on_duplicate_key_update) do 71 | should_not_update_fields_not_mentioned 72 | should_update_foreign_keys 73 | should_not_update_created_at_on_timestamp_columns 74 | should_update_updated_at_on_timestamp_columns 75 | end 76 | 77 | context "using a hash" do 78 | context "with :columns a hash" do 79 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 80 | let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } 81 | let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 82 | 83 | macro(:perform_import) do |*opts| 84 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: update_columns }, validate: false) 85 | end 86 | 87 | setup do 88 | Topic.import columns, values, validate: false 89 | @topic = Topic.find 99 90 | end 91 | 92 | it "should not modify the passed in :on_duplicate_key_update columns array" do 93 | assert_nothing_raised do 94 | columns = %w(title author_name).freeze 95 | Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: { columns: columns } 96 | end 97 | end 98 | 99 | context "using string hash map" do 100 | let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } } 101 | should_support_on_duplicate_key_update 102 | should_update_fields_mentioned 103 | end 104 | 105 | context "using string hash map, but specifying column mismatches" do 106 | let(:update_columns) { { "title" => "author_email_address", "author_email_address" => "title", "parent_id" => "parent_id" } } 107 | should_support_on_duplicate_key_update 108 | should_update_fields_mentioned_with_hash_mappings 109 | end 110 | 111 | context "using symbol hash map" do 112 | let(:update_columns) { { title: :title, author_email_address: :author_email_address, parent_id: :parent_id } } 113 | should_support_on_duplicate_key_update 114 | should_update_fields_mentioned 115 | end 116 | 117 | context "using symbol hash map, but specifying column mismatches" do 118 | let(:update_columns) { { title: :author_email_address, author_email_address: :title, parent_id: :parent_id } } 119 | should_support_on_duplicate_key_update 120 | should_update_fields_mentioned_with_hash_mappings 121 | end 122 | end 123 | 124 | context 'with :index_predicate' do 125 | let(:columns) { %w( id device_id alarm_type status metadata ) } 126 | let(:values) { [[99, 17, 1, 1, 'foo']] } 127 | let(:updated_values) { [[99, 17, 1, 2, 'bar']] } 128 | 129 | macro(:perform_import) do |*opts| 130 | Alarm.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: [:device_id, :alarm_type], index_predicate: 'status <> 0', columns: [:status] }, validate: false) 131 | end 132 | 133 | macro(:updated_alarm) { Alarm.find(@alarm.id) } 134 | 135 | setup do 136 | Alarm.import columns, values, validate: false 137 | @alarm = Alarm.find 99 138 | end 139 | 140 | context 'supports on duplicate key update for partial indexes' do 141 | it 'should not update created_at timestamp columns' do 142 | Timecop.freeze Chronic.parse("5 minutes from now") do 143 | perform_import 144 | assert_in_delta @alarm.created_at.to_i, updated_alarm.created_at.to_i, 1 145 | end 146 | end 147 | 148 | it 'should update updated_at timestamp columns' do 149 | time = Chronic.parse("5 minutes from now") 150 | Timecop.freeze time do 151 | perform_import 152 | assert_in_delta time.to_i, updated_alarm.updated_at.to_i, 1 153 | end 154 | end 155 | 156 | it 'should not update fields not mentioned' do 157 | perform_import 158 | assert_equal 'foo', updated_alarm.metadata 159 | end 160 | 161 | it 'should update fields mentioned with hash mappings' do 162 | perform_import 163 | assert_equal 2, updated_alarm.status 164 | end 165 | end 166 | end 167 | 168 | context 'with :condition' do 169 | let(:columns) { %w( id device_id alarm_type status metadata) } 170 | let(:values) { [[99, 17, 1, 1, 'foo']] } 171 | let(:updated_values) { [[99, 17, 1, 1, 'bar']] } 172 | 173 | macro(:perform_import) do |*opts| 174 | Alarm.import( 175 | columns, 176 | updated_values, 177 | opts.extract_options!.merge( 178 | on_duplicate_key_update: { 179 | conflict_target: [:id], 180 | condition: "alarms.metadata NOT LIKE '%foo%'", 181 | columns: [:metadata] 182 | }, 183 | validate: false 184 | ) 185 | ) 186 | end 187 | 188 | macro(:updated_alarm) { Alarm.find(@alarm.id) } 189 | 190 | setup do 191 | Alarm.import columns, values, validate: false 192 | @alarm = Alarm.find 99 193 | end 194 | 195 | it 'should not update fields not matched' do 196 | perform_import 197 | assert_equal 'foo', updated_alarm.metadata 198 | end 199 | end 200 | 201 | context "with no :conflict_target" do 202 | context "with no primary key" do 203 | it "raises ArgumentError" do 204 | error = assert_raises ArgumentError do 205 | Rule.import Build(3, :rules), on_duplicate_key_update: [:condition_text], validate: false 206 | end 207 | assert_match(/Expected :conflict_target to be specified/, error.message) 208 | end 209 | end 210 | end 211 | 212 | context "with no :columns" do 213 | let(:columns) { %w( id title author_name author_email_address ) } 214 | let(:values) { [[100, "Book", "John Doe", "john@doe.com"]] } 215 | let(:updated_values) { [[100, "Title Should Not Change", "Author Should Not Change", "john@nogo.com"]] } 216 | 217 | macro(:perform_import) do |*opts| 218 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id }, validate: false) 219 | end 220 | 221 | setup do 222 | Topic.import columns, values, validate: false 223 | @topic = Topic.find 100 224 | end 225 | 226 | should_update_updated_at_on_timestamp_columns 227 | end 228 | end 229 | end 230 | end 231 | end 232 | -------------------------------------------------------------------------------- /test/support/shared_examples/on_duplicate_key_update.rb: -------------------------------------------------------------------------------- 1 | def should_support_basic_on_duplicate_key_update 2 | describe "#import" do 3 | extend ActiveSupport::TestCase::ImportAssertions 4 | 5 | macro(:perform_import) { raise "supply your own #perform_import in a context below" } 6 | macro(:updated_topic) { Topic.find(@topic.id) } 7 | 8 | context "with lock_version upsert" do 9 | describe 'optimistic lock' do 10 | it 'lock_version upsert after on_duplcate_key_update by model' do 11 | users = [ 12 | User.new(name: 'Salomon'), 13 | User.new(name: 'Nathan') 14 | ] 15 | User.import(users) 16 | assert User.count == users.length 17 | User.all.each do |user| 18 | assert_equal 0, user.lock_version 19 | end 20 | updated_users = User.all.map do |user| 21 | user.name += ' Rothschild' 22 | user 23 | end 24 | User.import(updated_users, on_duplicate_key_update: [:name]) 25 | assert User.count == updated_users.length 26 | User.all.each_with_index do |user, i| 27 | assert_equal user.name, users[i].name + ' Rothschild' 28 | assert_equal 1, user.lock_version 29 | end 30 | end 31 | 32 | it 'lock_version upsert after on_duplcate_key_update by array' do 33 | users = [ 34 | User.new(name: 'Salomon'), 35 | User.new(name: 'Nathan') 36 | ] 37 | User.import(users) 38 | assert User.count == users.length 39 | User.all.each do |user| 40 | assert_equal 0, user.lock_version 41 | end 42 | 43 | columns = [:id, :name] 44 | updated_values = User.all.map do |user| 45 | user.name += ' Rothschild' 46 | [user.id, user.name] 47 | end 48 | User.import(columns, updated_values, on_duplicate_key_update: [:name]) 49 | assert User.count == updated_values.length 50 | User.all.each_with_index do |user, i| 51 | assert_equal user.name, users[i].name + ' Rothschild' 52 | assert_equal 1, user.lock_version 53 | end 54 | end 55 | 56 | it 'lock_version upsert after on_duplcate_key_update by hash' do 57 | users = [ 58 | User.new(name: 'Salomon'), 59 | User.new(name: 'Nathan') 60 | ] 61 | User.import(users) 62 | assert User.count == users.length 63 | User.all.each do |user| 64 | assert_equal 0, user.lock_version 65 | end 66 | updated_values = User.all.map do |user| 67 | user.name += ' Rothschild' 68 | { id: user.id, name: user.name } 69 | end 70 | User.import(updated_values, on_duplicate_key_update: [:name]) 71 | assert User.count == updated_values.length 72 | User.all.each_with_index do |user, i| 73 | assert_equal user.name, users[i].name + ' Rothschild' 74 | assert_equal 1, user.lock_version 75 | end 76 | updated_values2 = User.all.map do |user| 77 | user.name += ' jr.' 78 | { id: user.id, name: user.name } 79 | end 80 | User.import(updated_values2, on_duplicate_key_update: [:name]) 81 | assert User.count == updated_values2.length 82 | User.all.each_with_index do |user, i| 83 | assert_equal user.name, users[i].name + ' Rothschild jr.' 84 | assert_equal 2, user.lock_version 85 | end 86 | end 87 | 88 | it 'upsert optimistic lock columns other than lock_version by model' do 89 | accounts = [ 90 | Account.new(name: 'Salomon'), 91 | Account.new(name: 'Nathan') 92 | ] 93 | Account.import(accounts) 94 | assert Account.count == accounts.length 95 | Account.all.each do |user| 96 | assert_equal 0, user.lock 97 | end 98 | updated_accounts = Account.all.map do |user| 99 | user.name += ' Rothschild' 100 | user 101 | end 102 | Account.import(updated_accounts, on_duplicate_key_update: [:id, :name]) 103 | assert Account.count == updated_accounts.length 104 | Account.all.each_with_index do |user, i| 105 | assert_equal user.name, accounts[i].name + ' Rothschild' 106 | assert_equal 1, user.lock 107 | end 108 | end 109 | 110 | it 'upsert optimistic lock columns other than lock_version by array' do 111 | accounts = [ 112 | Account.new(name: 'Salomon'), 113 | Account.new(name: 'Nathan') 114 | ] 115 | Account.import(accounts) 116 | assert Account.count == accounts.length 117 | Account.all.each do |user| 118 | assert_equal 0, user.lock 119 | end 120 | 121 | columns = [:id, :name] 122 | updated_values = Account.all.map do |user| 123 | user.name += ' Rothschild' 124 | [user.id, user.name] 125 | end 126 | Account.import(columns, updated_values, on_duplicate_key_update: [:name]) 127 | assert Account.count == updated_values.length 128 | Account.all.each_with_index do |user, i| 129 | assert_equal user.name, accounts[i].name + ' Rothschild' 130 | assert_equal 1, user.lock 131 | end 132 | end 133 | 134 | it 'upsert optimistic lock columns other than lock_version by hash' do 135 | accounts = [ 136 | Account.new(name: 'Salomon'), 137 | Account.new(name: 'Nathan') 138 | ] 139 | Account.import(accounts) 140 | assert Account.count == accounts.length 141 | Account.all.each do |user| 142 | assert_equal 0, user.lock 143 | end 144 | updated_values = Account.all.map do |user| 145 | user.name += ' Rothschild' 146 | { id: user.id, name: user.name } 147 | end 148 | Account.import(updated_values, on_duplicate_key_update: [:name]) 149 | assert Account.count == updated_values.length 150 | Account.all.each_with_index do |user, i| 151 | assert_equal user.name, accounts[i].name + ' Rothschild' 152 | assert_equal 1, user.lock 153 | end 154 | end 155 | 156 | it 'update the lock_version of models separated by namespaces by model' do 157 | makers = [ 158 | Bike::Maker.new(name: 'Yamaha'), 159 | Bike::Maker.new(name: 'Honda') 160 | ] 161 | Bike::Maker.import(makers) 162 | assert Bike::Maker.count == makers.length 163 | Bike::Maker.all.each do |maker| 164 | assert_equal 0, maker.lock_version 165 | end 166 | updated_makers = Bike::Maker.all.map do |maker| 167 | maker.name += ' bikes' 168 | maker 169 | end 170 | Bike::Maker.import(updated_makers, on_duplicate_key_update: [:name]) 171 | assert Bike::Maker.count == updated_makers.length 172 | Bike::Maker.all.each_with_index do |maker, i| 173 | assert_equal maker.name, makers[i].name + ' bikes' 174 | assert_equal 1, maker.lock_version 175 | end 176 | end 177 | it 'update the lock_version of models separated by namespaces by array' do 178 | makers = [ 179 | Bike::Maker.new(name: 'Yamaha'), 180 | Bike::Maker.new(name: 'Honda') 181 | ] 182 | Bike::Maker.import(makers) 183 | assert Bike::Maker.count == makers.length 184 | Bike::Maker.all.each do |maker| 185 | assert_equal 0, maker.lock_version 186 | end 187 | 188 | columns = [:id, :name] 189 | updated_values = Bike::Maker.all.map do |maker| 190 | maker.name += ' bikes' 191 | [maker.id, maker.name] 192 | end 193 | Bike::Maker.import(columns, updated_values, on_duplicate_key_update: [:name]) 194 | assert Bike::Maker.count == updated_values.length 195 | Bike::Maker.all.each_with_index do |maker, i| 196 | assert_equal maker.name, makers[i].name + ' bikes' 197 | assert_equal 1, maker.lock_version 198 | end 199 | end 200 | 201 | it 'update the lock_version of models separated by namespaces by hash' do 202 | makers = [ 203 | Bike::Maker.new(name: 'Yamaha'), 204 | Bike::Maker.new(name: 'Honda') 205 | ] 206 | Bike::Maker.import(makers) 207 | assert Bike::Maker.count == makers.length 208 | Bike::Maker.all.each do |maker| 209 | assert_equal 0, maker.lock_version 210 | end 211 | updated_values = Bike::Maker.all.map do |maker| 212 | maker.name += ' bikes' 213 | { id: maker.id, name: maker.name } 214 | end 215 | Bike::Maker.import(updated_values, on_duplicate_key_update: [:name]) 216 | assert Bike::Maker.count == updated_values.length 217 | Bike::Maker.all.each_with_index do |maker, i| 218 | assert_equal maker.name, makers[i].name + ' bikes' 219 | assert_equal 1, maker.lock_version 220 | end 221 | end 222 | end 223 | end 224 | 225 | context "with :on_duplicate_key_update" do 226 | describe 'using :all' do 227 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 228 | let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] } 229 | 230 | macro(:perform_import) do |*opts| 231 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: :all, validate: false) 232 | end 233 | 234 | setup do 235 | values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]] 236 | Topic.import columns + ['replies_count'], values, validate: false 237 | end 238 | 239 | it 'updates all specified columns' do 240 | perform_import 241 | updated_topic = Topic.find(99) 242 | assert_equal 'Book - 2nd Edition', updated_topic.title 243 | assert_equal 'Jane Doe', updated_topic.author_name 244 | assert_equal 'janedoe@example.com', updated_topic.author_email_address 245 | assert_equal 57, updated_topic.parent_id 246 | assert_equal 3, updated_topic.replies_count 247 | end 248 | end 249 | 250 | describe "argument safety" do 251 | it "should not modify the passed in :on_duplicate_key_update array" do 252 | assert_nothing_raised do 253 | columns = %w(title author_name).freeze 254 | Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: columns 255 | end 256 | end 257 | end 258 | 259 | context "with timestamps enabled" do 260 | let(:time) { Chronic.parse("5 minutes from now") } 261 | 262 | it 'should not overwrite changed updated_at with current timestamp' do 263 | topic = Topic.create(author_name: "Jane Doe", title: "Book") 264 | timestamp = Time.now.utc 265 | topic.updated_at = timestamp 266 | Topic.import [topic], on_duplicate_key_update: :all, validate: false 267 | assert_equal timestamp.to_s, Topic.last.updated_at.to_s 268 | end 269 | 270 | it 'should update updated_at with current timestamp' do 271 | topic = Topic.create(author_name: "Jane Doe", title: "Book") 272 | Timecop.freeze(time) do 273 | Topic.import [topic], on_duplicate_key_update: [:updated_at], validate: false 274 | assert_in_delta time.to_i, topic.reload.updated_at.to_i, 1.second 275 | end 276 | end 277 | end 278 | 279 | context "with validation checks turned off" do 280 | asssertion_group(:should_support_on_duplicate_key_update) do 281 | should_not_update_fields_not_mentioned 282 | should_update_foreign_keys 283 | should_not_update_created_at_on_timestamp_columns 284 | should_update_updated_at_on_timestamp_columns 285 | end 286 | 287 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 288 | let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } 289 | let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 290 | 291 | macro(:perform_import) do |*opts| 292 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false) 293 | end 294 | 295 | setup do 296 | Topic.import columns, values, validate: false 297 | @topic = Topic.find 99 298 | end 299 | 300 | context "using an empty array" do 301 | let(:update_columns) { [] } 302 | should_not_update_fields_not_mentioned 303 | should_update_updated_at_on_timestamp_columns 304 | end 305 | 306 | context "using string column names" do 307 | let(:update_columns) { %w(title author_email_address parent_id) } 308 | should_support_on_duplicate_key_update 309 | should_update_fields_mentioned 310 | end 311 | 312 | context "using symbol column names" do 313 | let(:update_columns) { [:title, :author_email_address, :parent_id] } 314 | should_support_on_duplicate_key_update 315 | should_update_fields_mentioned 316 | end 317 | end 318 | 319 | context "with a table that has a non-standard primary key" do 320 | let(:columns) { [:promotion_id, :code] } 321 | let(:values) { [[1, 'DISCOUNT1']] } 322 | let(:updated_values) { [[1, 'DISCOUNT2']] } 323 | let(:update_columns) { [:code] } 324 | 325 | macro(:perform_import) do |*opts| 326 | Promotion.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false) 327 | end 328 | macro(:updated_promotion) { Promotion.find(@promotion.promotion_id) } 329 | 330 | setup do 331 | Promotion.import columns, values, validate: false 332 | @promotion = Promotion.find 1 333 | end 334 | 335 | it "should update specified columns" do 336 | perform_import 337 | assert_equal 'DISCOUNT2', updated_promotion.code 338 | end 339 | end 340 | 341 | unless ENV["SKIP_COMPOSITE_PK"] 342 | context "with composite primary keys" do 343 | it "should import array of values successfully" do 344 | columns = [:tag_id, :publisher_id, :tag] 345 | Tag.import columns, [[1, 1, 'Mystery']], validate: false 346 | 347 | assert_difference "Tag.count", +0 do 348 | Tag.import columns, [[1, 1, 'Science']], on_duplicate_key_update: [:tag], validate: false 349 | end 350 | assert_equal 'Science', Tag.first.tag 351 | end 352 | end 353 | end 354 | end 355 | 356 | context "with :on_duplicate_key_update turned off" do 357 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 358 | let(:values) { [[100, "Book", "John Doe", "john@doe.com", 17]] } 359 | let(:updated_values) { [[100, "Book - 2nd Edition", "This should raise an exception", "john@nogo.com", 57]] } 360 | 361 | macro(:perform_import) do |*opts| 362 | # `on_duplicate_key_update: false` is the tested feature 363 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: false, validate: false) 364 | end 365 | 366 | setup do 367 | Topic.import columns, values, validate: false 368 | @topic = Topic.find 100 369 | end 370 | 371 | it "should raise ActiveRecord::RecordNotUnique" do 372 | assert_raise ActiveRecord::RecordNotUnique do 373 | perform_import 374 | end 375 | end 376 | end 377 | end 378 | end 379 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## Changes in 1.0.6 2 | 3 | ### Fixes 4 | 5 | * Handle after_initialize callbacks. Thanks to @AhMohsen46 via \#691 and 6 | \#692. 7 | * Fix regression introduced in 1.0.4. Explicity allow adapters to 8 | support on duplicate key update. Thanks to @dsobiera, @jkowens via \#696. 9 | 10 | ## Changes in 1.0.5 11 | 12 | ### Fixes 13 | 14 | * Allow serialized attributes to be returned from import. Thanks to @timanovsky, @jkowens via \#660. 15 | * Return ActiveRecord::Connection from 16 | ActiveREcord::Base#establish_connection. Thanks to @reverentF via 17 | \#663. 18 | * Support PostgreSQL array. Thanks to @ujihisa via \#669. 19 | * Skip loading association ids when column changed. Thanks to @Aristat 20 | via \#673. 21 | 22 | ## Changes in 1.0.4 23 | 24 | ### Fixes 25 | 26 | * Use prepend pattern for ActiveRecord::Base#establish_connection patching. Thanks to @dombesz via \#648. 27 | * Fix NoMethodError when using PostgreSQL ENUM types. Thanks to @sebcoetzee via \#651. 28 | * Fix issue updating optimistic lock in Postgres. Thanks to @timanovsky 29 | via \#656. 30 | 31 | ## Changes in 1.0.3 32 | 33 | ### New Features 34 | 35 | * Add support for ActiveRecord 6.1.0.alpha. Thanks to @imtayadeway via 36 | \#642. 37 | 38 | ### Fixes 39 | 40 | * Return an empty array for results instead of nil when importing empty 41 | array. Thanks to @gyfis via \#636. 42 | 43 | ## Changes in 1.0.2 44 | 45 | ### New Features 46 | 47 | * Add support for CockroachDB adapter. Thanks to @willie via \#605. 48 | * Add support for ActiveRecord 6.0.0.rc1. Thanks to @madeindjs, @bill-filler, 49 | @jkowens via \#619, \#623. 50 | 51 | ### Fixes 52 | 53 | * Fixes NoMethodError when attempting to use nil logger. Thanks to @MattMecel, 54 | @khiav22357. 55 | * Fix issue validating STI models. Thanks to @thejbsmith, @jkowens via 56 | \#626. 57 | 58 | ## Changes in 1.0.1 59 | 60 | ### Fixes 61 | 62 | * Raise an error with a helpful message if array of values exceeds the number of 63 | columns for a table. Thanks to @golddranks via \#589. 64 | * Properly check if model responds to import before creating alias. 65 | Thanks to @jcw- via \#591. 66 | * No longer pass :returning option to child associations on recursive 67 | import. Thanks to @dmitriy-kiriyenko via \#595. 68 | * Fix import issue for models with Postgresql json/jsonb fields. Thanks 69 | to @stokarenko via \#594. 70 | * Fix issue importing models with timestamps that contain timezone 71 | information. Thaks to @dekaikiwi, @jkowens via \#598. 72 | * Ignore :no_returning when using :recursive option. Thanks to @dgollahon, @jkowens 73 | via \#599. 74 | 75 | ## Changes in 1.0.0 76 | 77 | ### New Features 78 | 79 | * Move ActiveRecord::Dirty changes to previous_changes after import. 80 | Thanks to @stokarenko via \#584. 81 | 82 | ### Breaking Changes 83 | 84 | * Previously :on_duplicate_key_update was enabled by default for MySQL. 85 | The update timestamp columns (updated_at, updated_on) would be updated 86 | on duplicate key. This was behavior is inconsistent with the other database 87 | adapters and could also be considered surprising. Going forward it must 88 | be explicitly enabled. See \#548. 89 | 90 | ## Changes in 0.28.2 91 | 92 | ### Fixes 93 | 94 | * Fix issue where validations where not working in certain scenarios. 95 | Thanks to @CASIXx1 via \#579. 96 | 97 | ## Changes in 0.28.1 98 | 99 | ### Fixes 100 | 101 | * Fix issue where ActiveRecord presence validations were being mutated. 102 | Limited custom presence validation to bulk imports. 103 | 104 | ## Changes in 0.28.0 105 | 106 | ### New Features 107 | 108 | * Allow updated timestamps to be manually set.Thanks to @Rob117, @jkowens via \#570. 109 | 110 | ### Fixes 111 | 112 | * Fix validating presence of belongs_to associations. Existence 113 | of the parent record is not validated, but the foreign key field 114 | cannot be empty. Thanks to @Rob117, @jkowens via \#575. 115 | 116 | ## Changes in 0.27.0 117 | 118 | ### New Features 119 | 120 | * Add "secret" option validate_uniqueness to enable uniqueness 121 | validators when validating import. This is not a recommended 122 | approach (See #228), but is being added back in for projects 123 | that depended on this feature. Thanks to @jkowens via \#554. 124 | 125 | ## Changes in 0.26.0 126 | 127 | ### New Features 128 | 129 | * Add on_duplicate_key_update for SQLite. Thanks to @jkowens via \#542. 130 | * Add option to update all fields on_duplicate_key_update. Thanks to @aimerald, @jkowens via \#543. 131 | 132 | ### Fixes 133 | 134 | * Handle deeply frozen options hashes. Thanks to @jturkel via \#546. 135 | * Switch from FactoryGirl to FactoryBot. Thanks to @koic via \#547. 136 | * Allow import to work with ProxySQL. Thanks to @GregFarrell via \#550. 137 | 138 | ## Changes in 0.25.0 139 | 140 | ### New Features 141 | 142 | * Add support for makara_postgis adapter. Thanks to @chadwilken via \#527. 143 | * Skip validating presence of belongs_to associations. Thanks to @Sohair63, @naiyt, @jkowens via \#528. 144 | 145 | ### Fixes 146 | 147 | * Add missing require for ActiveSupport.on_load. Thanks to @onk via \#529. 148 | * Support setting attribute values in before_validation callbacks. 149 | Thanks to @SirRawlins, @jkowens via \#531. 150 | * Ignore virtual columns. Thanks to @dbourguignon, @jkowens via \#530. 151 | 152 | ## Changes in 0.24.0 153 | 154 | ### Fixes 155 | 156 | * Use the association primary key when importing. Thanks to @dpogue via \#512. 157 | * Allow association ids to be updated. Thanks to @Aristat via \#515. 158 | 159 | ## Changes in 0.23.0 160 | 161 | ### New Features 162 | 163 | * Rename `import` method to `bulk_import` and alias to `import`. Thanks 164 | to @itay-grudev, @jkowens via \#498. 165 | * Increment lock_version on duplicate key update. Thanks to @aimerald 166 | via \#500. 167 | 168 | ### Fixes 169 | 170 | * Fix import_without_validations_or_callbacks exception if array is empty. 171 | Thanks to @doloopwhile via \#508. 172 | 173 | ## Changes in 0.22.0 174 | 175 | ### New Features 176 | 177 | * Add support for importing hashes thru a has many association. Thanks 178 | to @jkowens via \#483. 179 | 180 | ### Fixes 181 | 182 | * Fix validation logic for recursive import. For those on Rails 5.0 and 5.1, 183 | this change requires models with polymorphic associations to specify the `inverse_of` 184 | argument (See issue #495). Thanks to @eric-simonton-sama, @jkowens via 185 | \#489. 186 | 187 | ## Changes in 0.21.0 188 | 189 | ### New Features 190 | 191 | * Allow SQL subqueries (objects that respond to .to_sql) to be passed as values. Thanks 192 | to @jalada, @jkowens via \#471 193 | * Raise an ArgumentError when importing an array of hashes if any of the 194 | hash objects have different keys. Thanks to @mbell697 via \#465. 195 | 196 | ### Fixes 197 | 198 | * Fix issue loading incorrect foreign key value when syncing belongs_to 199 | associations with custom foreign key columns. Thanks to @marcgreenstock, @jkowens via \#470. 200 | * Fix issue importing models with polymorphic belongs_to associations. 201 | Thanks to @zorab47, @jkowens via \#476. 202 | * Fix issue importing STI models with ActiveRecord 4.0. Thanks to 203 | @kazuki-st, @jkowens via \#478. 204 | 205 | ## Changes in 0.20.2 206 | 207 | ### Fixes 208 | 209 | * Unscope model when synchronizing with database. Thanks to @indigoviolet via \#455. 210 | 211 | ## Changes in 0.20.1 212 | 213 | ### Fixes 214 | 215 | * Prevent :on_duplicate_key_update args from being modified. Thanks to @joshuamcginnis, @jkowens via \#451. 216 | 217 | ## Changes in 0.20.0 218 | 219 | ### New Features 220 | 221 | * Allow returning columns to be specified for PostgreSQL. Thanks to 222 | @tjwp via \#433. 223 | 224 | ### Fixes 225 | 226 | * Fixes an issue when bypassing uniqueness validators. Thanks to @vmaxv via \#444. 227 | * For AR < 4.2, prevent type casting for binary columns on Postgresql. Thanks to @mwalsher via \#446. 228 | * Fix issue logging class name on import. Thanks to @sophylee, @jkowens via \#447. 229 | * Copy belongs_to association id to foreign key column before importing. Thanks to @jkowens via \#448. 230 | * Reset model instance on validate. Thanks to @vmaxv via \#449. 231 | 232 | ## Changes in 0.19.1 233 | 234 | ### Fixes 235 | 236 | * Fix a regression where models weren't properly being marked clean. Thanks to @tjwp via \#434. 237 | * Raise ActiveRecord::Import::ValueSetTooLargeError when a record being inserted exceeds the 238 | `max_allowed_packet` for MySQL. Thanks to @saizai, @jkowens via \#437. 239 | * Fix issue concatenating column names array with primary key. Thanks to @keeguon via \#440. 240 | 241 | ## Changes in 0.19.0 242 | 243 | ### New Features 244 | 245 | * For PostgreSQL, add option to set WHERE condition in conflict_action. Thanks to 246 | @Saidbek via \#423. 247 | 248 | ### Fixes 249 | 250 | * Fix issue importing saved records with serialized fields. Thanks to 251 | @Andreis13, @jkowens via \#425. 252 | * Fix issue importing records that have columns defined with default values 253 | that are functions or expressions. Thanks to @Andreis13, @jkowens via \#428. 254 | 255 | ## Changes in 0.18.3 256 | 257 | ### Fixes 258 | 259 | * Set models new_record attribute to false when importing with 260 | :on_duplicate_key_ignore. Thanks to @nijikon, @jkowens via \#416. 261 | 262 | ## Changes in 0.18.2 263 | 264 | ### Fixes 265 | 266 | * Enable custom validate callbacks when validating import. Thanks to @afn via \#410. 267 | * Prevent wrong IDs being set on models when using :on_duplicate_key_ignore. 268 | Thanks to @afn, @jkowens via \#412. 269 | 270 | ## Changes in 0.18.1 271 | 272 | ### Fixes 273 | 274 | * Fix to enable validation callbacks (before_validation, 275 | after_validation). Thanks to @sinsoku, @jkowens via \#406. 276 | 277 | ## Changes in 0.18.0 278 | 279 | ### New Features 280 | 281 | * Uniqueness validation is bypassed when validating models since 282 | it cannot be guaranteed if there are duplicates in a batch. 283 | Thanks to @jkowens via \#301. 284 | * Allow for custom timestamp columns. Thanks to @mojidabckuu, @jkowens 285 | via \#401. 286 | 287 | ### Fixes 288 | 289 | * Fix ActiveRecord 5 issue coercing boolean values when serializing 290 | for the database. Thanks to @rjrobinson, @jkowens via \#403. 291 | 292 | ## Changes in 0.17.2 293 | 294 | ### Fixes 295 | 296 | * Fix issue where PostgreSQL cannot recognize columns if names 297 | include mixed case characters. Thanks to @hugobgranja via \#379. 298 | * Fix an issue for ActiveRecord 5 where serialized fields with 299 | default values were not being typecast. Thanks to @whistlerbrk, 300 | @jkowens via \#386. 301 | * Add option :force_single_insert for MySQL to make sure a single 302 | insert is attempted instead of performing multiple inserts based 303 | on max_allowed_packet. Thanks to @mtparet via \#387. 304 | 305 | ## Changes in 0.17.1 306 | 307 | ### Fixes 308 | 309 | * Along with setting id on models for adapters that support it, 310 | add created_at and updated_at timestamps. Thanks to @jacob-carlborg 311 | via \#364. 312 | * Properly set returned ids when using composite_primary_keys. 313 | Thanks to @guigs, @jkowens via \#371. 314 | 315 | ## Changes in 0.17.0 316 | 317 | ### New Features 318 | 319 | * Add support for composite_primary_keys gem. Thanks to @jkowens 320 | via \#350. 321 | * Add support for importing an array of hashes. Thanks to @jkowens 322 | via \#352. 323 | * Add JDBC SQLite3 support. Thanks to @jkowens via \#356. 324 | 325 | ### Fixes 326 | 327 | * Remove support for SQLite recursive imports. See \#351. 328 | * Improve import speed for Rails 5. Thanks to @ranchodeluxe, @jkowens 329 | via \#359. 330 | 331 | ## Changes in 0.16.2 332 | 333 | ### Fixes 334 | 335 | * Fixes issue clearing query cache on wrong connection when using 336 | multiple databases. Thanks to @KentoMoriwaki via \#337 337 | * Raises an ArgumentError on incorrect usage of nested arrays. Thanks 338 | to @Nitrodist via \#340 339 | * Fixes issue that prevented uuid primary keys from being set manually. 340 | Thanks to @Dclusin-og, @jkowens via \#342 341 | 342 | ## Changes in 0.16.1 343 | 344 | ### Fixes 345 | 346 | * Fixes issue with missing error messages on failed instances when 347 | importing using arrays of columns and values. Thanks to @Fivell via \#332 348 | * Update so SQLite only return ids if table has a primary key field via \#333 349 | 350 | 351 | ## Changes in 0.16.0 352 | 353 | ### New Features 354 | 355 | * Add partial index upsert support for PostgreSQL. Thanks to @luislew via \#305 356 | * Add UUID primary key support for PostgreSQL. Thanks to @jkowens via 357 | \#312 358 | * Add store accessor support for JSON, JSON, and HSTORE data types. 359 | Thanks to @jkowens via \#322 360 | * Log warning if database does not support :on_duplicate_key_update. 361 | Thanks to @jkowens via \#324 362 | * Add option :on_duplicate_key_ignore for MySQL and SQLite. Thanks to 363 | @jkowens via \#326 364 | 365 | ### Fixes 366 | 367 | * Fixes issue with recursive import using same primary key for all models. 368 | Thanks to @chopraanmol1 via \#309 369 | * Fixes issue importing from STI subclass with polymorphic associations. 370 | Thanks to @JNajera via \#314 371 | * Fixes issue setting returned IDs to wrong models when some fail validation. Also fixes issue with SQLite returning wrong IDs. Thanks to @mizukami234 via \#315 372 | 373 | 374 | ## Changes in 0.15.0 375 | 376 | ### New Features 377 | 378 | * An ArgumentError is now raised if when no `conflict_target` or `conflict_name` is provided or can be determined when using the `on_duplicate_key_update` option for PostgreSQL. Thanks to @jkowens via \#290 379 | * Support for Rails 5.0 final release for all except the JDBC driver which is not yet updated to support Rails 5.0 380 | 381 | ### Fixes 382 | 383 | * activerecord-import no longer modifies a value array inside of the given values array when called with `import(columns, values)`. Thanks to @jkowens via \#291 384 | 385 | ### Misc 386 | 387 | * `raise_error` is used to raise errors for ActiveRecord 5.0. Thanks to @couragecourag via \#294 `raise_record_invalid` has been 388 | 389 | 390 | ## Changes in 0.14.1 391 | 392 | ### Fixes 393 | 394 | * JRuby/JDBCDriver with PostgreSQL will no longer raise a JDBCDriver error when using the :no_returning boolean option. Thanks to @jkowens via \#287 395 | 396 | ## Changes in 0.14.0 397 | 398 | ### New Features 399 | 400 | * Support for ActiveRecord 3.1 has been dropped. Thanks to @sferik via \#254 401 | * SQLite3 has learned the :recursive option. Thanks to @jkowens via \#281 402 | * :on_duplicate_key_ignore will be ignored when imports are being done with :recursive. Thanks to @jkowens via \#268 403 | * :activerecord-import learned how to tell PostgreSQL to return no data back from the import via the :no_returning boolean option. Thanks to @makaroni4 via \#276 404 | 405 | ### Fixes 406 | 407 | * Polymorphic associations will not import the :type column. Thanks to @seanlinsley via \#282 and \#283 408 | * ~2X speed increase for importing models with validations. Thanks to @jkowens via \#266 409 | 410 | ### Misc 411 | 412 | * Benchmark HTML report has been fixed. Thanks to @jkowens via \#264 413 | * seamless_database_pool has been updated to work with AR 5.0. Thanks to @jkowens via \#280 414 | * Code cleanup, removal of redundant condition checks. Thanks to @pavlik4k via \#273 415 | * Code cleanup, removal of deprecated `alias_method_chain`. Thanks to @codeodor via \#271 416 | 417 | 418 | ## Changes in 0.13.0 419 | 420 | ### New Features 421 | 422 | * Addition of :batch_size option to control the number of rows to insert per INSERT statement. The default is the total number of records being inserted so there is a single INSERT statement. Thanks to @jkowens via \#245 423 | 424 | * Addition `import!` which will raise an exception if a validation occurs. It will fail fast. Thanks to @jkowens via \#246 425 | 426 | ### Fixes 427 | 428 | * Fixing issue with recursive import when utilizing the `:on_duplicate_key_update` option. The `on_duplicate_key_update` only applies to parent models at this time. Thanks to @yuri-karpovich for reporting and @jkowens for fixing via \#249 429 | 430 | ### Misc 431 | 432 | * Refactoring of fetching and assigning attributes. Thanks to @jkownes via \#259 433 | * Lots of code cleanup and addition of Rubocop linter. Thanks to @sferik via \#256 and \#250 434 | * Resolving errors with the test suite when running against ActiveRecord 4.0 and 4.1. Thanks to @jkowens via \#262 435 | * Cleaning up the TravisCI settings and packages. Thanks to @sferik via \#258 and \#251 436 | 437 | ## Changes in 0.12.0 438 | 439 | ### New Features 440 | 441 | * PostgreSQL UPSERT support has been added. Thanks @jkowens via \#218 442 | 443 | ### Fixes 444 | 445 | * has_one and has_many associations will now be recursively imported regardless of :autosave being set. Thanks @sferik, @jkowens via \#243, \#234 446 | * Fixing an issue with enum column support for Rails > 4.1. Thanks @aquajach via \#235 447 | 448 | ### Removals 449 | 450 | * Support for em-synchrony has been removed since it appears the project has been abandoned. Thanks @sferik, @zdennis via \#239 451 | * Support for the mysql gem/adapter has been removed since it has officially been abandoned. Use the mysql2 gem/adapter instead. Thanks @sferik, @zdennis via \#239 452 | 453 | ### Misc 454 | 455 | * Cleaned up TravisCI output and removing deprecation warnings. Thanks @jkowens, @zdennis \#242 456 | 457 | 458 | ## Changes before 0.12.0 459 | 460 | > Never look back. What's gone is now history. But in the process make memory of events to help you understand what will help you to make your dream a true story. Mistakes of the past are lessons, success of the past is inspiration. – Dr. Anil Kr Sinha 461 | -------------------------------------------------------------------------------- /test/support/postgresql/import_examples.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | def should_support_postgresql_import_functionality 3 | should_support_recursive_import 4 | 5 | if ActiveRecord::Base.connection.supports_on_duplicate_key_update? 6 | should_support_postgresql_upsert_functionality 7 | end 8 | 9 | describe "#supports_imports?" do 10 | it "should support import" do 11 | assert ActiveRecord::Base.supports_import? 12 | end 13 | end 14 | 15 | describe "#import" do 16 | it "should import with a single insert" do 17 | # see ActiveRecord::ConnectionAdapters::AbstractAdapter test for more specifics 18 | assert_difference "Topic.count", +10 do 19 | result = Topic.import Build(3, :topics) 20 | assert_equal 1, result.num_inserts 21 | 22 | result = Topic.import Build(7, :topics) 23 | assert_equal 1, result.num_inserts 24 | end 25 | end 26 | 27 | context "setting attributes and marking clean" do 28 | let(:topic) { Build(:topics) } 29 | 30 | setup { Topic.import([topic]) } 31 | 32 | it "assigns ids" do 33 | assert topic.id.present? 34 | end 35 | 36 | it "marks models as clean" do 37 | assert !topic.changed? 38 | end 39 | 40 | if ENV['AR_VERSION'].to_f > 4.1 41 | it "moves the dirty changes to previous_changes" do 42 | assert topic.previous_changes.present? 43 | end 44 | end 45 | 46 | it "marks models as persisted" do 47 | assert !topic.new_record? 48 | assert topic.persisted? 49 | end 50 | 51 | it "assigns timestamps" do 52 | assert topic.created_at.present? 53 | assert topic.updated_at.present? 54 | end 55 | end 56 | 57 | describe "with query cache enabled" do 58 | setup do 59 | unless ActiveRecord::Base.connection.query_cache_enabled 60 | ActiveRecord::Base.connection.enable_query_cache! 61 | @disable_cache_on_teardown = true 62 | end 63 | end 64 | 65 | it "clears cache on insert" do 66 | before_import = Topic.all.to_a 67 | 68 | Topic.import(Build(2, :topics), validate: false) 69 | 70 | after_import = Topic.all.to_a 71 | assert_equal 2, after_import.size - before_import.size 72 | end 73 | 74 | teardown do 75 | if @disable_cache_on_teardown 76 | ActiveRecord::Base.connection.disable_query_cache! 77 | end 78 | end 79 | end 80 | 81 | describe "no_returning" do 82 | let(:books) { [Book.new(author_name: "foo", title: "bar")] } 83 | 84 | it "creates records" do 85 | assert_difference "Book.count", +1 do 86 | Book.import books, no_returning: true 87 | end 88 | end 89 | 90 | it "returns no ids" do 91 | assert_equal [], Book.import(books, no_returning: true).ids 92 | end 93 | end 94 | 95 | describe "returning" do 96 | let(:books) { [Book.new(author_name: "King", title: "It")] } 97 | let(:result) { Book.import(books, returning: %w(author_name title)) } 98 | let(:book_id) do 99 | if RUBY_PLATFORM == 'java' || ENV['AR_VERSION'].to_i >= 5.0 100 | books.first.id 101 | else 102 | books.first.id.to_s 103 | end 104 | end 105 | 106 | it "creates records" do 107 | assert_difference("Book.count", +1) { result } 108 | end 109 | 110 | it "returns ids" do 111 | result 112 | assert_equal [book_id], result.ids 113 | end 114 | 115 | it "returns specified columns" do 116 | assert_equal [%w(King It)], result.results 117 | end 118 | 119 | context "when given an empty array" do 120 | let(:result) { Book.import([], returning: %w(title)) } 121 | 122 | setup { result } 123 | 124 | it "returns empty arrays for ids and results" do 125 | assert_equal [], result.ids 126 | assert_equal [], result.results 127 | end 128 | end 129 | 130 | context "when a returning column is a serialized attribute" do 131 | let(:vendor) { Vendor.new(hours: { monday: '8-5' }) } 132 | let(:result) { Vendor.import([vendor], returning: %w(hours)) } 133 | 134 | it "creates records" do 135 | assert_difference("Vendor.count", +1) { result } 136 | end 137 | end 138 | 139 | context "when primary key and returning overlap" do 140 | let(:result) { Book.import(books, returning: %w(id title)) } 141 | 142 | setup { result } 143 | 144 | it "returns ids" do 145 | assert_equal [book_id], result.ids 146 | end 147 | 148 | it "returns specified columns" do 149 | assert_equal [[book_id, 'It']], result.results 150 | end 151 | end 152 | 153 | context "setting model attributes" do 154 | let(:code) { 'abc' } 155 | let(:discount) { 0.10 } 156 | let(:original_promotion) do 157 | Promotion.new(code: code, discount: discount) 158 | end 159 | let(:updated_promotion) do 160 | Promotion.new(code: code, description: 'ABC discount') 161 | end 162 | let(:returning_columns) { %w(discount) } 163 | 164 | setup do 165 | Promotion.import([original_promotion]) 166 | Promotion.import([updated_promotion], 167 | on_duplicate_key_update: { conflict_target: %i(code), columns: %i(description) }, 168 | returning: returning_columns) 169 | end 170 | 171 | it "sets model attributes" do 172 | assert_equal updated_promotion.discount, discount 173 | end 174 | 175 | context "returning multiple columns" do 176 | let(:returning_columns) { %w(discount description) } 177 | 178 | it "sets model attributes" do 179 | assert_equal updated_promotion.discount, discount 180 | end 181 | end 182 | end 183 | end 184 | end 185 | 186 | if ENV['AR_VERSION'].to_f >= 4.0 187 | describe "with a uuid primary key" do 188 | let(:vendor) { Vendor.new(name: "foo") } 189 | let(:vendors) { [vendor] } 190 | 191 | it "creates records" do 192 | assert_difference "Vendor.count", +1 do 193 | Vendor.import vendors 194 | end 195 | end 196 | 197 | it "assigns an id to the model objects" do 198 | Vendor.import vendors 199 | assert_not_nil vendor.id 200 | end 201 | end 202 | 203 | describe "with an assigned uuid primary key" do 204 | let(:id) { SecureRandom.uuid } 205 | let(:vendor) { Vendor.new(id: id, name: "foo") } 206 | let(:vendors) { [vendor] } 207 | 208 | it "creates records with correct id" do 209 | assert_difference "Vendor.count", +1 do 210 | Vendor.import vendors 211 | end 212 | assert_equal id, vendor.id 213 | end 214 | end 215 | end 216 | 217 | describe "with store accessor fields" do 218 | if ENV['AR_VERSION'].to_f >= 4.0 219 | it "imports values for json fields" do 220 | vendors = [Vendor.new(name: 'Vendor 1', size: 100)] 221 | assert_difference "Vendor.count", +1 do 222 | Vendor.import vendors 223 | end 224 | assert_equal(100, Vendor.first.size) 225 | end 226 | 227 | it "imports values for hstore fields" do 228 | vendors = [Vendor.new(name: 'Vendor 1', contact: 'John Smith')] 229 | assert_difference "Vendor.count", +1 do 230 | Vendor.import vendors 231 | end 232 | assert_equal('John Smith', Vendor.first.contact) 233 | end 234 | end 235 | 236 | if ENV['AR_VERSION'].to_f >= 4.2 237 | it "imports values for jsonb fields" do 238 | vendors = [Vendor.new(name: 'Vendor 1', charge_code: '12345')] 239 | assert_difference "Vendor.count", +1 do 240 | Vendor.import vendors 241 | end 242 | assert_equal('12345', Vendor.first.charge_code) 243 | end 244 | end 245 | end 246 | 247 | if ENV['AR_VERSION'].to_f >= 4.2 248 | describe "with serializable fields" do 249 | it "imports default values as correct data type" do 250 | vendors = [Vendor.new(name: 'Vendor 1')] 251 | assert_difference "Vendor.count", +1 do 252 | Vendor.import vendors 253 | end 254 | assert_equal({}, Vendor.first.json_data) 255 | end 256 | end 257 | 258 | %w(json jsonb).each do |json_type| 259 | describe "with pure #{json_type} fields" do 260 | let(:data) { { a: :b } } 261 | let(:json_field_name) { "pure_#{json_type}_data" } 262 | it "imports the values from saved records" do 263 | vendor = Vendor.create!(name: 'Vendor 1', json_field_name => data) 264 | 265 | Vendor.import [vendor], on_duplicate_key_update: [json_field_name] 266 | assert_equal(data.as_json, vendor.reload[json_field_name]) 267 | end 268 | end 269 | end 270 | end 271 | 272 | describe "with enum field" do 273 | let(:vendor_type) { "retailer" } 274 | it "imports the correct values for enum fields" do 275 | vendor = Vendor.new(name: 'Vendor 1', vendor_type: vendor_type) 276 | assert_difference "Vendor.count", +1 do 277 | Vendor.import [vendor] 278 | end 279 | assert_equal(vendor_type, Vendor.first.vendor_type) 280 | end 281 | end 282 | 283 | describe "with binary field" do 284 | let(:binary_value) { "\xE0'c\xB2\xB0\xB3Bh\\\xC2M\xB1m\\I\xC4r".force_encoding('ASCII-8BIT') } 285 | it "imports the correct values for binary fields" do 286 | alarms = [Alarm.new(device_id: 1, alarm_type: 1, status: 1, secret_key: binary_value)] 287 | assert_difference "Alarm.count", +1 do 288 | Alarm.import alarms 289 | end 290 | assert_equal(binary_value, Alarm.first.secret_key) 291 | end 292 | end 293 | end 294 | 295 | def should_support_postgresql_upsert_functionality 296 | should_support_basic_on_duplicate_key_update 297 | should_support_on_duplicate_key_ignore 298 | 299 | describe "#import" do 300 | extend ActiveSupport::TestCase::ImportAssertions 301 | 302 | macro(:perform_import) { raise "supply your own #perform_import in a context below" } 303 | macro(:updated_topic) { Topic.find(@topic.id) } 304 | 305 | context "with :on_duplicate_key_ignore and validation checks turned off" do 306 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 307 | let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } 308 | let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 309 | 310 | setup do 311 | Topic.import columns, values, validate: false 312 | end 313 | 314 | it "should not update any records" do 315 | result = Topic.import columns, updated_values, on_duplicate_key_ignore: true, validate: false 316 | assert_equal [], result.ids 317 | end 318 | end 319 | 320 | context "with :on_duplicate_key_ignore and :recursive enabled" do 321 | let(:new_topic) { Build(1, :topic_with_book) } 322 | let(:mixed_topics) { Build(1, :topic_with_book) + new_topic + Build(1, :topic_with_book) } 323 | 324 | setup do 325 | Topic.import new_topic, recursive: true 326 | end 327 | 328 | # Recursive import depends on the primary keys of the parent model being returned 329 | # on insert. With on_duplicate_key_ignore enabled, not all ids will be returned 330 | # and it is possible that a model will be assigned the wrong id and then its children 331 | # would be associated with the wrong parent. 332 | it ":on_duplicate_key_ignore is ignored" do 333 | assert_raise ActiveRecord::RecordNotUnique do 334 | Topic.import mixed_topics, recursive: true, on_duplicate_key_ignore: true, validate: false 335 | end 336 | end 337 | end 338 | 339 | context "with :on_duplicate_key_update and validation checks turned off" do 340 | asssertion_group(:should_support_on_duplicate_key_update) do 341 | should_not_update_fields_not_mentioned 342 | should_update_foreign_keys 343 | should_not_update_created_at_on_timestamp_columns 344 | should_update_updated_at_on_timestamp_columns 345 | end 346 | 347 | context "using a hash" do 348 | context "with :columns :all" do 349 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 350 | let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] } 351 | 352 | macro(:perform_import) do |*opts| 353 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: :all }, validate: false) 354 | end 355 | 356 | setup do 357 | values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]] 358 | Topic.import columns + ['replies_count'], values, validate: false 359 | end 360 | 361 | it "should update all specified columns" do 362 | perform_import 363 | updated_topic = Topic.find(99) 364 | assert_equal 'Book - 2nd Edition', updated_topic.title 365 | assert_equal 'Jane Doe', updated_topic.author_name 366 | assert_equal 'janedoe@example.com', updated_topic.author_email_address 367 | assert_equal 57, updated_topic.parent_id 368 | assert_equal 3, updated_topic.replies_count 369 | end 370 | end 371 | 372 | context "with :columns a hash" do 373 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 374 | let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] } 375 | let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 376 | 377 | macro(:perform_import) do |*opts| 378 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: update_columns }, validate: false) 379 | end 380 | 381 | setup do 382 | Topic.import columns, values, validate: false 383 | @topic = Topic.find 99 384 | end 385 | 386 | it "should not modify the passed in :on_duplicate_key_update columns array" do 387 | assert_nothing_raised do 388 | columns = %w(title author_name).freeze 389 | Topic.import columns, [%w(foo, bar)], { on_duplicate_key_update: { columns: columns }.freeze }.freeze 390 | end 391 | end 392 | 393 | context "using string hash map" do 394 | let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } } 395 | should_support_on_duplicate_key_update 396 | should_update_fields_mentioned 397 | end 398 | 399 | context "using string hash map, but specifying column mismatches" do 400 | let(:update_columns) { { "title" => "author_email_address", "author_email_address" => "title", "parent_id" => "parent_id" } } 401 | should_support_on_duplicate_key_update 402 | should_update_fields_mentioned_with_hash_mappings 403 | end 404 | 405 | context "using symbol hash map" do 406 | let(:update_columns) { { title: :title, author_email_address: :author_email_address, parent_id: :parent_id } } 407 | should_support_on_duplicate_key_update 408 | should_update_fields_mentioned 409 | end 410 | 411 | context "using symbol hash map, but specifying column mismatches" do 412 | let(:update_columns) { { title: :author_email_address, author_email_address: :title, parent_id: :parent_id } } 413 | should_support_on_duplicate_key_update 414 | should_update_fields_mentioned_with_hash_mappings 415 | end 416 | end 417 | 418 | context 'with :index_predicate' do 419 | let(:columns) { %w( id device_id alarm_type status metadata ) } 420 | let(:values) { [[99, 17, 1, 1, 'foo']] } 421 | let(:updated_values) { [[99, 17, 1, 2, 'bar']] } 422 | 423 | macro(:perform_import) do |*opts| 424 | Alarm.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: [:device_id, :alarm_type], index_predicate: 'status <> 0', columns: [:status] }, validate: false) 425 | end 426 | 427 | macro(:updated_alarm) { Alarm.find(@alarm.id) } 428 | 429 | setup do 430 | Alarm.import columns, values, validate: false 431 | @alarm = Alarm.find 99 432 | end 433 | 434 | context 'supports on duplicate key update for partial indexes' do 435 | it 'should not update created_at timestamp columns' do 436 | Timecop.freeze Chronic.parse("5 minutes from now") do 437 | perform_import 438 | assert_in_delta @alarm.created_at.to_i, updated_alarm.created_at.to_i, 1 439 | end 440 | end 441 | 442 | it 'should update updated_at timestamp columns' do 443 | time = Chronic.parse("5 minutes from now") 444 | Timecop.freeze time do 445 | perform_import 446 | assert_in_delta time.to_i, updated_alarm.updated_at.to_i, 1 447 | end 448 | end 449 | 450 | it 'should not update fields not mentioned' do 451 | perform_import 452 | assert_equal 'foo', updated_alarm.metadata 453 | end 454 | 455 | it 'should update fields mentioned with hash mappings' do 456 | perform_import 457 | assert_equal 2, updated_alarm.status 458 | end 459 | end 460 | end 461 | 462 | context 'with :condition' do 463 | let(:columns) { %w( id device_id alarm_type status metadata) } 464 | let(:values) { [[99, 17, 1, 1, 'foo']] } 465 | let(:updated_values) { [[99, 17, 1, 1, 'bar']] } 466 | 467 | macro(:perform_import) do |*opts| 468 | Alarm.import( 469 | columns, 470 | updated_values, 471 | opts.extract_options!.merge( 472 | on_duplicate_key_update: { 473 | conflict_target: [:id], 474 | condition: "alarms.metadata NOT LIKE '%foo%'", 475 | columns: [:metadata] 476 | }, 477 | validate: false 478 | ) 479 | ) 480 | end 481 | 482 | macro(:updated_alarm) { Alarm.find(@alarm.id) } 483 | 484 | setup do 485 | Alarm.import columns, values, validate: false 486 | @alarm = Alarm.find 99 487 | end 488 | 489 | it 'should not update fields not matched' do 490 | perform_import 491 | assert_equal 'foo', updated_alarm.metadata 492 | end 493 | end 494 | 495 | context "with :constraint_name" do 496 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 497 | let(:values) { [[100, "Book", "John Doe", "john@doe.com", 17]] } 498 | let(:updated_values) { [[100, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 499 | 500 | macro(:perform_import) do |*opts| 501 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { constraint_name: :topics_pkey, columns: update_columns }, validate: false) 502 | end 503 | 504 | setup do 505 | Topic.import columns, values, validate: false 506 | @topic = Topic.find 100 507 | end 508 | 509 | let(:update_columns) { [:title, :author_email_address, :parent_id] } 510 | should_support_on_duplicate_key_update 511 | should_update_fields_mentioned 512 | end 513 | 514 | context "default to the primary key" do 515 | let(:columns) { %w( id title author_name author_email_address parent_id ) } 516 | let(:values) { [[100, "Book", "John Doe", "john@doe.com", 17]] } 517 | let(:updated_values) { [[100, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] } 518 | let(:update_columns) { [:title, :author_email_address, :parent_id] } 519 | 520 | setup do 521 | Topic.import columns, values, validate: false 522 | @topic = Topic.find 100 523 | end 524 | 525 | context "with no :conflict_target or :constraint_name" do 526 | macro(:perform_import) do |*opts| 527 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { columns: update_columns }, validate: false) 528 | end 529 | 530 | should_support_on_duplicate_key_update 531 | should_update_fields_mentioned 532 | end 533 | 534 | context "with empty value for :conflict_target" do 535 | macro(:perform_import) do |*opts| 536 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: [], columns: update_columns }, validate: false) 537 | end 538 | 539 | should_support_on_duplicate_key_update 540 | should_update_fields_mentioned 541 | end 542 | 543 | context "with empty value for :constraint_name" do 544 | macro(:perform_import) do |*opts| 545 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { constraint_name: '', columns: update_columns }, validate: false) 546 | end 547 | 548 | should_support_on_duplicate_key_update 549 | should_update_fields_mentioned 550 | end 551 | end 552 | 553 | context "with no :conflict_target or :constraint_name" do 554 | context "with no primary key" do 555 | it "raises ArgumentError" do 556 | error = assert_raises ArgumentError do 557 | Rule.import Build(3, :rules), on_duplicate_key_update: [:condition_text], validate: false 558 | end 559 | assert_match(/Expected :conflict_target or :constraint_name to be specified/, error.message) 560 | end 561 | end 562 | end 563 | 564 | context "with no :columns" do 565 | let(:columns) { %w( id title author_name author_email_address ) } 566 | let(:values) { [[100, "Book", "John Doe", "john@doe.com"]] } 567 | let(:updated_values) { [[100, "Title Should Not Change", "Author Should Not Change", "john@nogo.com"]] } 568 | 569 | macro(:perform_import) do |*opts| 570 | Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id }, validate: false) 571 | end 572 | 573 | setup do 574 | Topic.import columns, values, validate: false 575 | @topic = Topic.find 100 576 | end 577 | 578 | should_update_updated_at_on_timestamp_columns 579 | end 580 | end 581 | end 582 | end 583 | end 584 | -------------------------------------------------------------------------------- /README.markdown: -------------------------------------------------------------------------------- 1 | # Activerecord-Import [![Build Status](https://travis-ci.org/zdennis/activerecord-import.svg?branch=master)](https://travis-ci.org/zdennis/activerecord-import) 2 | 3 | Activerecord-Import is a library for bulk inserting data using ActiveRecord. 4 | 5 | One of its major features is following activerecord associations and generating the minimal 6 | number of SQL insert statements required, avoiding the N+1 insert problem. An example probably 7 | explains it best. Say you had a schema like this: 8 | 9 | - Publishers have Books 10 | - Books have Reviews 11 | 12 | and you wanted to bulk insert 100 new publishers with 10K books and 3 reviews per book. This library will follow the associations 13 | down and generate only 3 SQL insert statements - one for the publishers, one for the books, and one for the reviews. 14 | 15 | In contrast, the standard ActiveRecord save would generate 16 | 100 insert statements for the publishers, then it would visit each publisher and save all the books: 17 | 100 * 10,000 = 1,000,000 SQL insert statements 18 | and then the reviews: 19 | 100 * 10,000 * 3 = 3M SQL insert statements, 20 | 21 | That would be about 4M SQL insert statements vs 3, which results in vastly improved performance. In our case, it converted 22 | an 18 hour batch process to <2 hrs. 23 | 24 | The gem provides the following high-level features: 25 | 26 | * Works with raw columns and arrays of values (fastest) 27 | * Works with model objects (faster) 28 | * Performs validations (fast) 29 | * Performs on duplicate key updates (requires MySQL, SQLite 3.24.0+, or Postgres 9.5+) 30 | 31 | ## Table of Contents 32 | 33 | * [Examples](#examples) 34 | * [Introduction](#introduction) 35 | * [Columns and Arrays](#columns-and-arrays) 36 | * [Hashes](#hashes) 37 | * [ActiveRecord Models](#activerecord-models) 38 | * [Batching](#batching) 39 | * [Recursive](#recursive) 40 | * [Options](#options) 41 | * [Duplicate Key Ignore](#duplicate-key-ignore) 42 | * [Duplicate Key Update](#duplicate-key-update) 43 | * [Return Info](#return-info) 44 | * [Counter Cache](#counter-cache) 45 | * [ActiveRecord Timestamps](#activerecord-timestamps) 46 | * [Callbacks](#callbacks) 47 | * [Supported Adapters](#supported-adapters) 48 | * [Additional Adapters](#additional-adapters) 49 | * [Requiring](#requiring) 50 | * [Autoloading via Bundler](#autoloading-via-bundler) 51 | * [Manually Loading](#manually-loading) 52 | * [Load Path Setup](#load-path-setup) 53 | * [Conflicts With Other Gems](#conflicts-with-other-gems) 54 | * [More Information](#more-information) 55 | * [Contributing](#contributing) 56 | * [Running Tests](#running-tests) 57 | * [Issue Triage](#issue-triage) 58 | 59 | ### Examples 60 | 61 | #### Introduction 62 | 63 | This gem adds an `import` method (or `bulk_import`, for compatibility with gems like `elasticsearch-model`; see [Conflicts With Other Gems](#conflicts-with-other-gems)) to ActiveRecord classes. 64 | 65 | Without `activerecord-import`, you'd write something like this: 66 | 67 | ```ruby 68 | 10.times do |i| 69 | Book.create! name: "book #{i}" 70 | end 71 | ``` 72 | 73 | This would end up making 10 SQL calls. YUCK! With `activerecord-import`, you can instead do this: 74 | 75 | ```ruby 76 | books = [] 77 | 10.times do |i| 78 | books << Book.new(name: "book #{i}") 79 | end 80 | Book.import books # or use import! 81 | ``` 82 | 83 | and only have 1 SQL call. Much better! 84 | 85 | #### Columns and Arrays 86 | 87 | The `import` method can take an array of column names (string or symbols) and an array of arrays. Each child array represents an individual record and its list of values in the same order as the columns. This is the fastest import mechanism and also the most primitive. 88 | 89 | ```ruby 90 | columns = [ :title, :author ] 91 | values = [ ['Book1', 'George Orwell'], ['Book2', 'Bob Jones'] ] 92 | 93 | # Importing without model validations 94 | Book.import columns, values, validate: false 95 | 96 | # Import with model validations 97 | Book.import columns, values, validate: true 98 | 99 | # when not specified :validate defaults to true 100 | Book.import columns, values 101 | ``` 102 | 103 | #### Hashes 104 | 105 | The `import` method can take an array of hashes. The keys map to the column names in the database. 106 | 107 | ```ruby 108 | values = [{ title: 'Book1', author: 'George Orwell' }, { title: 'Book2', author: 'Bob Jones'}] 109 | 110 | # Importing without model validations 111 | Book.import values, validate: false 112 | 113 | # Import with model validations 114 | Book.import values, validate: true 115 | 116 | # when not specified :validate defaults to true 117 | Book.import values 118 | ``` 119 | #### Import Using Hashes and Explicit Column Names 120 | 121 | The `import` method can take an array of column names and an array of hash objects. The column names are used to determine what fields of data should be imported. The following example will only import books with the `title` field: 122 | 123 | ```ruby 124 | books = [ 125 | { title: "Book 1", author: "George Orwell" }, 126 | { title: "Book 2", author: "Bob Jones" } 127 | ] 128 | columns = [ :title ] 129 | 130 | # without validations 131 | Book.import columns, books, validate: false 132 | 133 | # with validations 134 | Book.import columns, books, validate: true 135 | 136 | # when not specified :validate defaults to true 137 | Book.import columns, books 138 | 139 | # result in table books 140 | # title | author 141 | #--------|-------- 142 | # Book 1 | NULL 143 | # Book 2 | NULL 144 | 145 | ``` 146 | 147 | Using hashes will only work if the columns are consistent in every hash of the array. If this does not hold, an exception will be raised. There are two workarounds: use the array to instantiate an array of ActiveRecord objects and then pass that into `import` or divide the array into multiple ones with consistent columns and import each one separately. 148 | 149 | See https://github.com/zdennis/activerecord-import/issues/507 for discussion. 150 | 151 | ```ruby 152 | arr = [ 153 | { bar: 'abc' }, 154 | { baz: 'xyz' }, 155 | { bar: '123', baz: '456' } 156 | ] 157 | 158 | # An exception will be raised 159 | Foo.import arr 160 | 161 | # better 162 | arr.map! { |args| Foo.new(args) } 163 | Foo.import arr 164 | 165 | # better 166 | arr.group_by(&:keys).each_value do |v| 167 | Foo.import v 168 | end 169 | ``` 170 | 171 | #### ActiveRecord Models 172 | 173 | The `import` method can take an array of models. The attributes will be pulled off from each model by looking at the columns available on the model. 174 | 175 | ```ruby 176 | books = [ 177 | Book.new(title: "Book 1", author: "George Orwell"), 178 | Book.new(title: "Book 2", author: "Bob Jones") 179 | ] 180 | 181 | # without validations 182 | Book.import books, validate: false 183 | 184 | # with validations 185 | Book.import books, validate: true 186 | 187 | # when not specified :validate defaults to true 188 | Book.import books 189 | ``` 190 | 191 | The `import` method can take an array of column names and an array of models. The column names are used to determine what fields of data should be imported. The following example will only import books with the `title` field: 192 | 193 | ```ruby 194 | books = [ 195 | Book.new(title: "Book 1", author: "George Orwell"), 196 | Book.new(title: "Book 2", author: "Bob Jones") 197 | ] 198 | columns = [ :title ] 199 | 200 | # without validations 201 | Book.import columns, books, validate: false 202 | 203 | # with validations 204 | Book.import columns, books, validate: true 205 | 206 | # when not specified :validate defaults to true 207 | Book.import columns, books 208 | 209 | # result in table books 210 | # title | author 211 | #--------|-------- 212 | # Book 1 | NULL 213 | # Book 2 | NULL 214 | 215 | ``` 216 | 217 | #### Batching 218 | 219 | The `import` method can take a `batch_size` option to control the number of rows to insert per INSERT statement. The default is the total number of records being inserted so there is a single INSERT statement. 220 | 221 | ```ruby 222 | books = [ 223 | Book.new(title: "Book 1", author: "George Orwell"), 224 | Book.new(title: "Book 2", author: "Bob Jones"), 225 | Book.new(title: "Book 1", author: "John Doe"), 226 | Book.new(title: "Book 2", author: "Richard Wright") 227 | ] 228 | columns = [ :title ] 229 | 230 | # 2 INSERT statements for 4 records 231 | Book.import columns, books, batch_size: 2 232 | ``` 233 | 234 | #### Recursive 235 | 236 | NOTE: This only works with PostgreSQL and ActiveRecord objects. This won't work with 237 | hashes or arrays as recursive inputs. 238 | 239 | Assume that Books has_many Reviews. 240 | 241 | ```ruby 242 | books = [] 243 | 10.times do |i| 244 | book = Book.new(name: "book #{i}") 245 | book.reviews.build(title: "Excellent") 246 | books << book 247 | end 248 | Book.import books, recursive: true 249 | ``` 250 | 251 | ### Options 252 | 253 | Key | Options | Default | Description 254 | ----------------------- | --------------------- | ------------------ | ----------- 255 | :validate | `true`/`false` | `true` | Whether or not to run `ActiveRecord` validations (uniqueness skipped). This option will always be true when using `import!`. 256 | :validate_uniqueness | `true`/`false` | `false` | Whether or not to run uniqueness validations, has potential pitfalls, use with caution (requires `>= v0.27.0`). 257 | :validate_with_context | `Symbol` |`:create`/`:update` | Allows passing an ActiveModel validation context for each model. Default is `:create` for new records and `:update` for existing ones. 258 | :on_duplicate_key_ignore| `true`/`false` | `false` | Allows skipping records with duplicate keys. See [here](https://github.com/zdennis/activerecord-import/#duplicate-key-ignore) for more details. 259 | :ignore | `true`/`false` | `false` | Alias for :on_duplicate_key_ignore. 260 | :on_duplicate_key_update| :all, `Array`, `Hash` | N/A | Allows upsert logic to be used. See [here](https://github.com/zdennis/activerecord-import/#duplicate-key-update) for more details. 261 | :synchronize | `Array` | N/A | An array of ActiveRecord instances. This synchronizes existing instances in memory with updates from the import. 262 | :timestamps | `true`/`false` | `true` | Enables/disables timestamps on imported records. 263 | :recursive | `true`/`false` | `false` | Imports has_many/has_one associations (PostgreSQL only). 264 | :batch_size | `Integer` | total # of records | Max number of records to insert per import 265 | :raise_error | `true`/`false` | `false` | Raises an exception at the first invalid record. This means there will not be a result object returned. The `import!` method is a shortcut for this. 266 | :all_or_none | `true`/`false` | `false` | Will not import any records if there is a record with validation errors. 267 | 268 | #### Duplicate Key Ignore 269 | 270 | [MySQL](http://dev.mysql.com/doc/refman/5.0/en/insert-on-duplicate.html), [SQLite](https://www.sqlite.org/lang_insert.html), and [PostgreSQL](https://www.postgresql.org/docs/current/static/sql-insert.html#SQL-ON-CONFLICT) (9.5+) support `on_duplicate_key_ignore` which allows you to skip records if a primary or unique key constraint is violated. 271 | 272 | For Postgres 9.5+ it adds `ON CONFLICT DO NOTHING`, for MySQL it uses `INSERT IGNORE`, and for SQLite it uses `INSERT OR IGNORE`. Cannot be enabled on a recursive import. For database adapters that normally support setting primary keys on imported objects, this option prevents that from occurring. 273 | 274 | ```ruby 275 | book = Book.create! title: "Book1", author: "George Orwell" 276 | book.title = "Updated Book Title" 277 | book.author = "Bob Barker" 278 | 279 | Book.import [book], on_duplicate_key_ignore: true 280 | 281 | book.reload.title # => "Book1" (stayed the same) 282 | book.reload.author # => "George Orwell" (stayed the same) 283 | ``` 284 | 285 | The option `:on_duplicate_key_ignore` is bypassed when `:recursive` is enabled for [PostgreSQL imports](https://github.com/zdennis/activerecord-import/wiki#recursive-example-postgresql-only). 286 | 287 | #### Duplicate Key Update 288 | 289 | MySQL, PostgreSQL (9.5+), and SQLite (3.24.0+) support `on duplicate key update` (also known as "upsert") which allows you to specify fields whose values should be updated if a primary or unique key constraint is violated. 290 | 291 | One big difference between MySQL and PostgreSQL support is that MySQL will handle any conflict that happens, but PostgreSQL requires that you specify which columns the conflict would occur over. SQLite models its upsert support after PostgreSQL. 292 | 293 | This will use MySQL's `ON DUPLICATE KEY UPDATE` or Postgres/SQLite `ON CONFLICT DO UPDATE` to do upsert. 294 | 295 | Basic Update 296 | 297 | ```ruby 298 | book = Book.create! title: "Book1", author: "George Orwell" 299 | book.title = "Updated Book Title" 300 | book.author = "Bob Barker" 301 | 302 | # MySQL version 303 | Book.import [book], on_duplicate_key_update: [:title] 304 | 305 | # PostgreSQL version 306 | Book.import [book], on_duplicate_key_update: {conflict_target: [:id], columns: [:title]} 307 | 308 | # PostgreSQL shorthand version (conflict target must be primary key) 309 | Book.import [book], on_duplicate_key_update: [:title] 310 | 311 | book.reload.title # => "Updated Book Title" (changed) 312 | book.reload.author # => "George Orwell" (stayed the same) 313 | ``` 314 | 315 | Using the value from another column 316 | 317 | ```ruby 318 | book = Book.create! title: "Book1", author: "George Orwell" 319 | book.title = "Updated Book Title" 320 | 321 | # MySQL version 322 | Book.import [book], on_duplicate_key_update: {author: :title} 323 | 324 | # PostgreSQL version (no shorthand version) 325 | Book.import [book], on_duplicate_key_update: { 326 | conflict_target: [:id], columns: {author: :title} 327 | } 328 | 329 | book.reload.title # => "Book1" (stayed the same) 330 | book.reload.author # => "Updated Book Title" (changed) 331 | ``` 332 | 333 | Using Custom SQL 334 | 335 | ```ruby 336 | book = Book.create! title: "Book1", author: "George Orwell" 337 | book.author = "Bob Barker" 338 | 339 | # MySQL version 340 | Book.import [book], on_duplicate_key_update: "author = values(author)" 341 | 342 | # PostgreSQL version 343 | Book.import [book], on_duplicate_key_update: { 344 | conflict_target: [:id], columns: "author = excluded.author" 345 | } 346 | 347 | # PostgreSQL shorthand version (conflict target must be primary key) 348 | Book.import [book], on_duplicate_key_update: "author = excluded.author" 349 | 350 | book.reload.title # => "Book1" (stayed the same) 351 | book.reload.author # => "Bob Barker" (changed) 352 | ``` 353 | 354 | PostgreSQL Using constraints 355 | 356 | ```ruby 357 | book = Book.create! title: "Book1", author: "George Orwell", edition: 3, published_at: nil 358 | book.published_at = Time.now 359 | 360 | # in migration 361 | execute <<-SQL 362 | ALTER TABLE books 363 | ADD CONSTRAINT for_upsert UNIQUE (title, author, edition); 364 | SQL 365 | 366 | # PostgreSQL version 367 | Book.import [book], on_duplicate_key_update: {constraint_name: :for_upsert, columns: [:published_at]} 368 | 369 | 370 | book.reload.title # => "Book1" (stayed the same) 371 | book.reload.author # => "George Orwell" (stayed the same) 372 | book.reload.edition # => 3 (stayed the same) 373 | book.reload.published_at # => 2017-10-09 (changed) 374 | ``` 375 | 376 | ```ruby 377 | Book.import books, validate_uniqueness: true 378 | ``` 379 | 380 | ### Return Info 381 | 382 | The `import` method returns a `Result` object that responds to `failed_instances` and `num_inserts`. Additionally, for users of Postgres, there will be two arrays `ids` and `results` that can be accessed`. 383 | 384 | ```ruby 385 | articles = [ 386 | Article.new(author_id: 1, title: 'First Article', content: 'This is the first article'), 387 | Article.new(author_id: 2, title: 'Second Article', content: ''), 388 | Article.new(author_id: 3, content: '') 389 | ] 390 | 391 | demo = Article.import(articles, returning: :title) # => # [#
] 395 | 396 | demo.num_inserts 397 | => 1, 398 | 399 | demo.ids 400 | => ["1", "2"] # for Postgres 401 | => [] # for other DBs 402 | 403 | demo.results 404 | => ["First Article", "Second Article"] # for Postgres 405 | => [] # for other DBs 406 | ``` 407 | 408 | ### Counter Cache 409 | 410 | When running `import`, `activerecord-import` does not automatically update counter cache columns. To update these columns, you will need to do one of the following: 411 | 412 | * Provide values to the column as an argument on your object that is passed in. 413 | * Manually update the column after the record has been imported. 414 | 415 | ### ActiveRecord Timestamps 416 | 417 | If you're familiar with ActiveRecord you're probably familiar with its timestamp columns: created_at, created_on, updated_at, updated_on, etc. When importing data the timestamp fields will continue to work as expected and each timestamp column will be set. 418 | 419 | Should you wish to specify those columns, you may use the option `timestamps: false`. 420 | 421 | However, it is also possible to set just `:created_at` in specific records. In this case despite using `timestamps: true`, `:created_at` will be updated only in records where that field is `nil`. Same rule applies for record associations when enabling the option `recursive: true`. 422 | 423 | If you are using custom time zones, these will be respected when performing imports as well as long as `ActiveRecord::Base.default_timezone` is set, which for practically all Rails apps it is 424 | 425 | ### Callbacks 426 | 427 | ActiveRecord callbacks related to [creating](http://guides.rubyonrails.org/active_record_callbacks.html#creating-an-object), [updating](http://guides.rubyonrails.org/active_record_callbacks.html#updating-an-object), or [destroying](http://guides.rubyonrails.org/active_record_callbacks.html#destroying-an-object) records (other than `before_validation` and `after_validation`) will NOT be called when calling the import method. This is because it is mass importing rows of data and doesn't necessarily have access to in-memory ActiveRecord objects. 428 | 429 | If you do have a collection of in-memory ActiveRecord objects you can do something like this: 430 | 431 | ```ruby 432 | books.each do |book| 433 | book.run_callbacks(:save) { false } 434 | book.run_callbacks(:create) { false } 435 | end 436 | Book.import(books) 437 | ``` 438 | 439 | This will run before_create and before_save callbacks on each item. The `false` argument is needed to prevent after_save being run, which wouldn't make sense prior to bulk import. Something to note in this example is that the before_create and before_save callbacks will run before the validation callbacks. 440 | 441 | If that is an issue, another possible approach is to loop through your models first to do validations and then only run callbacks on and import the valid models. 442 | 443 | ```ruby 444 | valid_books = [] 445 | invalid_books = [] 446 | 447 | books.each do |book| 448 | if book.valid? 449 | valid_books << book 450 | else 451 | invalid_books << book 452 | end 453 | end 454 | 455 | valid_books.each do |book| 456 | book.run_callbacks(:save) { false } 457 | book.run_callbacks(:create) { false } 458 | end 459 | 460 | Book.import valid_books, validate: false 461 | ``` 462 | 463 | ### Supported Adapters 464 | 465 | The following database adapters are currently supported: 466 | 467 | * MySQL - supports core import functionality plus on duplicate key update support (included in activerecord-import 0.1.0 and higher) 468 | * MySQL2 - supports core import functionality plus on duplicate key update support (included in activerecord-import 0.2.0 and higher) 469 | * PostgreSQL - supports core import functionality (included in activerecord-import 0.1.0 and higher) 470 | * SQLite3 - supports core import functionality (included in activerecord-import 0.1.0 and higher) 471 | * Oracle - supports core import functionality through DML trigger (available as an external gem: [activerecord-import-oracle_enhanced](https://github.com/keeguon/activerecord-import-oracle_enhanced) 472 | * SQL Server - supports core import functionality (available as an external gem: [activerecord-import-sqlserver](https://github.com/keeguon/activerecord-import-sqlserver) 473 | 474 | If your adapter isn't listed here, please consider creating an external gem as described in the README to provide support. If you do, feel free to update this wiki to include a link to the new adapter's repository! 475 | 476 | To test which features are supported by your adapter, use the following methods on a model class: 477 | * `supports_import?(*args)` 478 | * `supports_on_duplicate_key_update?` 479 | * `supports_setting_primary_key_of_imported_objects?` 480 | 481 | ### Additional Adapters 482 | 483 | Additional adapters can be provided by gems external to activerecord-import by providing an adapter that matches the naming convention setup by activerecord-import (and subsequently activerecord) for dynamically loading adapters. This involves also providing a folder on the load path that follows the activerecord-import naming convention to allow activerecord-import to dynamically load the file. 484 | 485 | When `ActiveRecord::Import.require_adapter("fake_name")` is called the require will be: 486 | 487 | ```ruby 488 | require 'activerecord-import/active_record/adapters/fake_name_adapter' 489 | ``` 490 | 491 | This allows an external gem to dynamically add an adapter without the need to add any file/code to the core activerecord-import gem. 492 | 493 | ### Requiring 494 | 495 | Note: These instructions will only work if you are using version 0.2.0 or higher. 496 | 497 | #### Autoloading via Bundler 498 | 499 | If you are using Rails or otherwise autoload your dependencies via Bundler, all you need to do add the gem to your `Gemfile` like so: 500 | 501 | ```ruby 502 | gem 'activerecord-import' 503 | ``` 504 | 505 | #### Manually Loading 506 | 507 | You may want to manually load activerecord-import for one reason or another. First, add the `require: false` argument like so: 508 | 509 | ```ruby 510 | gem 'activerecord-import', require: false 511 | ``` 512 | 513 | This will allow you to load up activerecord-import in the file or files where you are using it and only load the parts you need. 514 | If you are doing this within Rails and ActiveRecord has established a database connection (such as within a controller), you will need to do extra initialization work: 515 | 516 | ```ruby 517 | require 'activerecord-import/base' 518 | # load the appropriate database adapter (postgresql, mysql2, sqlite3, etc) 519 | require 'activerecord-import/active_record/adapters/postgresql_adapter' 520 | ``` 521 | 522 | If your gem dependencies aren’t autoloaded, and your script will be establishing a database connection, then simply require activerecord-import after ActiveRecord has been loaded, i.e.: 523 | 524 | ```ruby 525 | require 'active_record' 526 | require 'activerecord-import' 527 | ``` 528 | 529 | ### Load Path Setup 530 | To understand how rubygems loads code you can reference the following: 531 | 532 | http://guides.rubygems.org/patterns/#loading-code 533 | 534 | And an example of how active_record dynamically load adapters: 535 | 536 | https://github.com/rails/rails/blob/master/activerecord/lib/active_record/connection_adapters/connection_specification.rb 537 | 538 | In summary, when a gem is loaded rubygems adds the `lib` folder of the gem to the global load path `$LOAD_PATH` so that all `require` lookups will not propagate through all of the folders on the load path. When a `require` is issued each folder on the `$LOAD_PATH` is checked for the file and/or folder referenced. This allows a gem (like activerecord-import) to define push the activerecord-import folder (or namespace) on the `$LOAD_PATH` and any adapters provided by activerecord-import will be found by rubygems when the require is issued. 539 | 540 | If `fake_name` adapter is needed by a gem (potentially called `activerecord-import-fake_name`) then the folder structure should look as follows: 541 | 542 | ```bash 543 | activerecord-import-fake_name/ 544 | |-- activerecord-import-fake_name.gemspec 545 | |-- lib 546 | | |-- activerecord-import-fake_name.rb 547 | | |-- activerecord-import-fake_name 548 | | | |-- version.rb 549 | | |-- activerecord-import 550 | | | |-- active_record 551 | | | | |-- adapters 552 | | | | |-- fake_name_adapter.rb 553 | ``` 554 | 555 | When rubygems pushes the `lib` folder onto the load path a `require` will now find `activerecord-import/active_record/adapters/fake_name_adapter` as it runs through the lookup process for a ruby file under that path in `$LOAD_PATH` 556 | 557 | 558 | ### Conflicts With Other Gems 559 | 560 | Activerecord-Import adds the `.import` method onto `ActiveRecord::Base`. There are other gems, such as `elasticsearch-rails`, that do the same thing. In conflicts such as this, there is an aliased method named `.bulk_import` that can be used interchangeably. 561 | 562 | If you are using the `apartment` gem, there is a weird triple interaction between that gem, `activerecord-import`, and `activerecord` involving caching of the `sequence_name` of a model. This can be worked around by explcitly setting this value within the model. For example: 563 | 564 | ```ruby 565 | class Post < ActiveRecord::Base 566 | self.sequence_name = "posts_seq" 567 | end 568 | ``` 569 | 570 | Another way to work around the issue is to call `.reset_sequence_name` on the model. For example: 571 | 572 | ```ruby 573 | schemas.all.each do |schema| 574 | Apartment::Tenant.switch! schema.name 575 | ActiveRecord::Base.transaction do 576 | Post.reset_sequence_name 577 | 578 | Post.import posts 579 | end 580 | end 581 | ``` 582 | 583 | See https://github.com/zdennis/activerecord-import/issues/233 for further discussion. 584 | 585 | ### More Information 586 | 587 | For more information on Activerecord-Import please see its wiki: https://github.com/zdennis/activerecord-import/wiki 588 | 589 | To document new information, please add to the README instead of the wiki. See https://github.com/zdennis/activerecord-import/issues/397 for discussion. 590 | 591 | ### Contributing 592 | 593 | #### Running Tests 594 | 595 | The first thing you need to do is set up your database(s): 596 | 597 | * copy `test/database.yml.sample` to `test/database.yml` 598 | * modify `test/database.yml` for your database settings 599 | * create databases as needed 600 | 601 | After that, you can run the tests. They run against multiple tests and ActiveRecord versions. 602 | 603 | This is one example of how to run the tests: 604 | 605 | ```ruby 606 | rm Gemfile.lock 607 | AR_VERSION=4.2 bundle install 608 | AR_VERSION=4.2 bundle exec rake test:postgresql test:sqlite3 test:mysql2 609 | ``` 610 | 611 | Once you have pushed up your changes, you can find your CI results [here](https://travis-ci.org/zdennis/activerecord-import/). 612 | 613 | ## Issue Triage [![Open Source Helpers](https://www.codetriage.com/zdennis/activerecord-import/badges/users.svg)](https://www.codetriage.com/zdennis/activerecord-import) 614 | 615 | You can triage issues which may include reproducing bug reports or asking for vital information, such as version numbers or reproduction instructions. If you would like to start triaging issues, one easy way to get started is to [subscribe to activerecord-import on CodeTriage](https://www.codetriage.com/zdennis/activerecord-import). 616 | 617 | # License 618 | 619 | This is licensed under the MIT license. 620 | 621 | # Author 622 | 623 | Zach Dennis (zach.dennis@gmail.com) 624 | --------------------------------------------------------------------------------