├── VERSION
├── spec
├── spec.opts
├── rcov.opts
├── unit
│ ├── sql_spec.rb
│ ├── sql
│ │ ├── column_spec.rb
│ │ ├── table_spec.rb
│ │ ├── table_modifier_spec.rb
│ │ ├── postgres_spec.rb
│ │ ├── table_creator_spec.rb
│ │ └── sqlite_extensions_spec.rb
│ └── migration_spec.rb
├── spec_helper.rb
├── isolated
│ ├── require_after_setup_spec.rb
│ ├── require_before_setup_spec.rb
│ └── require_spec.rb
└── integration
│ ├── auto_upgrade_spec.rb
│ ├── migration_runner_spec.rb
│ ├── migration_spec.rb
│ ├── sql_spec.rb
│ └── auto_migration_spec.rb
├── lib
├── dm-migrations.rb
├── dm-migrations
│ ├── sql
│ │ ├── column.rb
│ │ ├── table.rb
│ │ ├── sqlite.rb
│ │ ├── mysql.rb
│ │ ├── table_modifier.rb
│ │ ├── postgres.rb
│ │ └── table_creator.rb
│ ├── exceptions
│ │ └── duplicate_migration.rb
│ ├── sql.rb
│ ├── adapters
│ │ ├── dm-yaml-adapter.rb
│ │ ├── dm-sqlite-adapter.rb
│ │ ├── dm-postgres-adapter.rb
│ │ ├── dm-sqlserver-adapter.rb
│ │ ├── dm-mysql-adapter.rb
│ │ ├── dm-do-adapter.rb
│ │ └── dm-oracle-adapter.rb
│ ├── migration_runner.rb
│ ├── auto_migration.rb
│ └── migration.rb
└── spec
│ ├── example
│ └── migration_example_group.rb
│ └── matchers
│ └── migration_matchers.rb
├── db
└── migrations
│ ├── config.rb
│ ├── 2_add_dob_to_people.rb
│ └── 1_create_people_table.rb
├── tasks
├── yard.rake
├── yardstick.rake
└── spec.rake
├── .gitignore
├── README.rdoc
├── Rakefile
├── LICENSE
├── examples
├── sample_migration.rb
└── sample_migration_spec.rb
├── Gemfile
└── dm-migrations.gemspec
/VERSION:
--------------------------------------------------------------------------------
1 | 1.0.2
2 |
--------------------------------------------------------------------------------
/spec/spec.opts:
--------------------------------------------------------------------------------
1 | --colour
2 | --loadby random
3 | --format profile
4 | --backtrace
5 |
--------------------------------------------------------------------------------
/lib/dm-migrations.rb:
--------------------------------------------------------------------------------
1 | require 'dm-core'
2 | require 'dm-migrations/migration'
3 | require 'dm-migrations/auto_migration'
4 |
--------------------------------------------------------------------------------
/spec/rcov.opts:
--------------------------------------------------------------------------------
1 | --exclude "spec,^/"
2 | --sort coverage
3 | --callsites
4 | --xrefs
5 | --profile
6 | --text-summary
7 |
--------------------------------------------------------------------------------
/spec/unit/sql_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe 'SQL module' do
4 |
5 | it 'doesnt really do anything'
6 |
7 | end
8 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql/column.rb:
--------------------------------------------------------------------------------
1 | module SQL
2 | class Column
3 | attr_accessor :name, :type, :not_null, :default_value, :primary_key, :unique
4 | end
5 | end
6 |
--------------------------------------------------------------------------------
/lib/dm-migrations/exceptions/duplicate_migration.rb:
--------------------------------------------------------------------------------
1 | module DataMapper
2 | module Migrations
3 | class DuplicateMigration < StandardError
4 | end
5 | end
6 | end
7 |
--------------------------------------------------------------------------------
/db/migrations/config.rb:
--------------------------------------------------------------------------------
1 | DataMapper::Logger.new(STDOUT, :debug)
2 | DataMapper.logger.debug( "Starting Migration" )
3 |
4 | DataMapper.setup(:default, 'postgres://postgres@localhost/dm_core_test')
5 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/sql/table_creator'
2 | require 'dm-migrations/sql/table_modifier'
3 | require 'dm-migrations/sql/sqlite'
4 | require 'dm-migrations/sql/mysql'
5 | require 'dm-migrations/sql/postgres'
6 |
--------------------------------------------------------------------------------
/tasks/yard.rake:
--------------------------------------------------------------------------------
1 | begin
2 | require 'yard'
3 |
4 | YARD::Rake::YardocTask.new
5 | rescue LoadError
6 | task :yard do
7 | abort 'YARD is not available. In order to run yard, you must: gem install yard'
8 | end
9 | end
10 |
--------------------------------------------------------------------------------
/db/migrations/2_add_dob_to_people.rb:
--------------------------------------------------------------------------------
1 | migration 2, :add_dob_to_people do
2 | up do
3 | modify_table :people do
4 | add_column :dob, DateTime, :allow_nil => true
5 | end
6 | end
7 |
8 | down do
9 | modify_table :people do
10 | drop_column :dob
11 | end
12 | end
13 | end
14 |
--------------------------------------------------------------------------------
/db/migrations/1_create_people_table.rb:
--------------------------------------------------------------------------------
1 | migration 1, :create_people_table do
2 | up do
3 | create_table :people do
4 | column :id, Integer, :serial => true
5 | column :name, String, :size => 50
6 | column :age, Integer
7 | end
8 | end
9 | down do
10 | drop_table :people
11 | end
12 | end
13 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql/table.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/sql/column'
2 |
3 | module SQL
4 | class Table
5 | attr_accessor :name, :columns
6 |
7 | def to_s
8 | name
9 | end
10 |
11 | def column(column_name)
12 | @columns.select { |c| c.name == column_name.to_s }.first
13 | end
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/spec/unit/sql/column_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe SQL::Column do
4 | before do
5 | @column = SQL::Column.new
6 | end
7 |
8 | %w{name type not_null default_value primary_key unique}.each do |meth|
9 | it "should have a ##{meth} attribute" do
10 | @column.should respond_to(meth.intern)
11 | end
12 | end
13 |
14 | end
15 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## MAC OS
2 | .DS_Store
3 |
4 | ## TEXTMATE
5 | *.tmproj
6 | tmtags
7 |
8 | ## EMACS
9 | *~
10 | \#*
11 | .\#*
12 |
13 | ## VIM
14 | *.swp
15 |
16 | ## Rubinius
17 | *.rbc
18 |
19 | ## PROJECT::GENERAL
20 | *.gem
21 | coverage
22 | rdoc
23 | pkg
24 | tmp
25 | doc
26 | log
27 | .yardoc
28 | measurements
29 |
30 | ## BUNDLER
31 | .bundle
32 | Gemfile.*
33 |
34 | ## PROJECT::SPECIFIC
35 | spec/db/
36 |
--------------------------------------------------------------------------------
/spec/spec_helper.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations'
2 | require 'dm-migrations/migration_runner'
3 |
4 | require 'dm-core/spec/setup'
5 | require 'dm-core/spec/lib/adapter_helpers'
6 | require 'dm-core/spec/lib/spec_helper'
7 | require 'dm-core/spec/lib/pending_helpers'
8 |
9 | Spec::Runner.configure do |config|
10 |
11 | config.extend(DataMapper::Spec::Adapters::Helpers)
12 | config.include(DataMapper::Spec::PendingHelpers)
13 |
14 | config.after :all do
15 | DataMapper::Spec.cleanup_models
16 | end
17 |
18 | end
19 |
--------------------------------------------------------------------------------
/tasks/yardstick.rake:
--------------------------------------------------------------------------------
1 | begin
2 | require 'pathname'
3 | require 'yardstick/rake/measurement'
4 | require 'yardstick/rake/verify'
5 |
6 | # yardstick_measure task
7 | Yardstick::Rake::Measurement.new
8 |
9 | # verify_measurements task
10 | Yardstick::Rake::Verify.new do |verify|
11 | verify.threshold = 100
12 | end
13 | rescue LoadError
14 | %w[ yardstick_measure verify_measurements ].each do |name|
15 | task name.to_s do
16 | abort "Yardstick is not available. In order to run #{name}, you must: gem install yardstick"
17 | end
18 | end
19 | end
20 |
--------------------------------------------------------------------------------
/README.rdoc:
--------------------------------------------------------------------------------
1 | = dm-migrations
2 |
3 | DataMapper plugin for writing and specing migrations.
4 |
5 | == Example
6 |
7 | require 'dm-migrations/migration_runner'
8 |
9 | DataMapper.setup(:default, "sqlite3::memory")
10 |
11 | DataMapper::Logger.new(STDOUT, :debug)
12 | DataMapper.logger.debug( "Starting Migration" )
13 |
14 | migration 1, :create_people_table do
15 | up do
16 | create_table :people do
17 | column :id, Integer, :serial => true
18 | end
19 | end
20 |
21 | down do
22 | drop_table :people
23 | end
24 | end
25 |
26 | migrate_up!
27 |
28 | For more, see the examples directory.
29 |
--------------------------------------------------------------------------------
/lib/dm-migrations/adapters/dm-yaml-adapter.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/auto_migration'
2 | require 'dm-migrations/adapters/dm-do-adapter'
3 |
4 | module DataMapper
5 | module Migrations
6 | module YamlAdapter
7 |
8 | def self.included(base)
9 | DataMapper.extend(Migrations::SingletonMethods)
10 | [ :Repository, :Model ].each do |name|
11 | DataMapper.const_get(name).send(:include, Migrations.const_get(name))
12 | end
13 | end
14 |
15 | # @api semipublic
16 | def destroy_model_storage(model)
17 | yaml_file(model).unlink if yaml_file(model).file?
18 | true
19 | end
20 |
21 | end
22 | end
23 | end
24 |
--------------------------------------------------------------------------------
/spec/unit/sql/table_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe SQL::Table do
4 | before do
5 | @table = SQL::Table.new
6 | end
7 |
8 | %w{name columns}.each do |meth|
9 | it "should have a ##{meth} attribute" do
10 | @table.should respond_to(meth.intern)
11 | end
12 | end
13 |
14 | it 'should #to_s as the name' do
15 | @table.name = "table_name"
16 | @table.to_s.should == "table_name"
17 | end
18 |
19 | it 'should find a column by name' do
20 | column_a = mock('column', :name => 'id')
21 | column_b = mock('column', :name => 'login')
22 | @table.columns = [column_a, column_b]
23 |
24 | @table.column('id').should == column_a
25 | end
26 |
27 |
28 | end
29 |
--------------------------------------------------------------------------------
/spec/isolated/require_after_setup_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec'
2 | require 'isolated/require_spec'
3 | require 'dm-core/spec/setup'
4 |
5 | # To really test this behavior, this spec needs to be run in isolation and not
6 | # as part of the typical rake spec run, which requires dm-transactions upfront
7 |
8 | if %w[ postgres mysql sqlite oracle sqlserver ].include?(ENV['ADAPTER'])
9 |
10 | describe "require 'dm-migrations' after calling DataMapper.setup" do
11 |
12 | before(:all) do
13 |
14 | @adapter = DataMapper::Spec.adapter
15 | require 'dm-migrations'
16 |
17 | class ::Person
18 | include DataMapper::Resource
19 | property :id, Serial
20 | end
21 |
22 | @model = Person
23 |
24 | end
25 |
26 | it_should_behave_like "require 'dm-migrations'"
27 |
28 | end
29 |
30 | end
31 |
--------------------------------------------------------------------------------
/spec/isolated/require_before_setup_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec'
2 | require 'isolated/require_spec'
3 | require 'dm-core/spec/setup'
4 |
5 | # To really test this behavior, this spec needs to be run in isolation and not
6 | # as part of the typical rake spec run, which requires dm-transactions upfront
7 |
8 | if %w[ postgres mysql sqlite oracle sqlserver ].include?(ENV['ADAPTER'])
9 |
10 | describe "require 'dm-migrations' before calling DataMapper.setup" do
11 |
12 | before(:all) do
13 |
14 | require 'dm-migrations'
15 | @adapter = DataMapper::Spec.adapter
16 |
17 | class ::Person
18 | include DataMapper::Resource
19 | property :id, Serial
20 | end
21 |
22 | @model = Person
23 |
24 | end
25 |
26 | it_should_behave_like "require 'dm-migrations'"
27 |
28 | end
29 |
30 | end
31 |
--------------------------------------------------------------------------------
/Rakefile:
--------------------------------------------------------------------------------
1 | require 'rubygems'
2 | require 'rake'
3 |
4 | begin
5 | gem 'jeweler', '~> 1.4.0'
6 | require 'jeweler'
7 |
8 | Jeweler::Tasks.new do |gem|
9 | gem.name = 'dm-migrations'
10 | gem.summary = 'DataMapper plugin for writing and speccing migrations'
11 | gem.description = gem.summary
12 | gem.email = 'psadauskas [a] gmail [d] com'
13 | gem.homepage = 'http://github.com/datamapper/%s' % gem.name
14 | gem.authors = [ 'Paul Sadauskas' ]
15 | gem.has_rdoc = 'yard'
16 |
17 | gem.rubyforge_project = 'datamapper'
18 |
19 | gem.add_dependency 'dm-core', '~> 1.0.2'
20 |
21 | gem.add_development_dependency 'rspec', '~> 1.3'
22 | end
23 |
24 | Jeweler::GemcutterTasks.new
25 |
26 | FileList['tasks/**/*.rake'].each { |task| import task }
27 | rescue LoadError
28 | puts 'Jeweler (or a dependency) not available. Install it with: gem install jeweler -v 1.4.0'
29 | end
30 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2010 Paul Sadauskas
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining
4 | a copy of this software and associated documentation files (the
5 | "Software"), to deal in the Software without restriction, including
6 | without limitation the rights to use, copy, modify, merge, publish,
7 | distribute, sublicense, and/or sell copies of the Software, and to
8 | permit persons to whom the Software is furnished to do so, subject to
9 | the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be
12 | included in all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/tasks/spec.rake:
--------------------------------------------------------------------------------
1 | spec_defaults = lambda do |spec|
2 | spec.pattern = 'spec/**/*_spec.rb'
3 | spec.libs << 'lib' << 'spec'
4 | spec.spec_opts << '--options' << 'spec/spec.opts'
5 | end
6 |
7 | begin
8 | require 'spec/rake/spectask'
9 |
10 | Spec::Rake::SpecTask.new(:spec, &spec_defaults)
11 | rescue LoadError
12 | task :spec do
13 | abort 'rspec is not available. In order to run spec, you must: gem install rspec'
14 | end
15 | end
16 |
17 | begin
18 | require 'rcov'
19 | require 'spec/rake/verify_rcov'
20 |
21 | Spec::Rake::SpecTask.new(:rcov) do |rcov|
22 | spec_defaults.call(rcov)
23 | rcov.rcov = true
24 | rcov.rcov_opts = File.read('spec/rcov.opts').split(/\s+/)
25 | end
26 |
27 | RCov::VerifyTask.new(:verify_rcov => :rcov) do |rcov|
28 | rcov.threshold = 100
29 | end
30 | rescue LoadError
31 | %w[ rcov verify_rcov ].each do |name|
32 | task name do
33 | abort "rcov is not available. In order to run #{name}, you must: gem install rcov"
34 | end
35 | end
36 | end
37 |
38 | task :spec => :check_dependencies
39 | task :rcov => :check_dependencies
40 |
41 | task :default => :spec
42 |
--------------------------------------------------------------------------------
/spec/integration/auto_upgrade_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | require 'dm-migrations/auto_migration'
4 |
5 | describe DataMapper::Migrations do
6 | def capture_log(mod)
7 | original, mod.logger = mod.logger, DataObjects::Logger.new(@log = StringIO.new, :debug)
8 | yield
9 | ensure
10 | @log.rewind
11 | @output = @log.readlines.map do |line|
12 | line.chomp.gsub(/\A.+?~ \(\d+\.?\d*\)\s+/, '')
13 | end
14 |
15 | mod.logger = original
16 | end
17 |
18 | supported_by :postgres do
19 | before :all do
20 | module ::Blog
21 | class Article
22 | include DataMapper::Resource
23 |
24 | property :id, Serial
25 | end
26 | end
27 |
28 | @model = ::Blog::Article
29 | end
30 |
31 | describe '#auto_upgrade' do
32 | it 'should create an index' do
33 | @model.auto_migrate!
34 | @property = @model.property(:name, String, :index => true)
35 | @response = capture_log(DataObjects::Postgres) { @model.auto_upgrade! }
36 | @output[-2].should == "CREATE INDEX \"index_blog_articles_name\" ON \"blog_articles\" (\"name\")"
37 | end
38 | end
39 | end
40 | end
41 |
42 |
--------------------------------------------------------------------------------
/spec/isolated/require_spec.rb:
--------------------------------------------------------------------------------
1 | shared_examples_for "require 'dm-migrations'" do
2 |
3 | it "should include the migration api in the DataMapper namespace" do
4 | DataMapper.respond_to?(:migrate! ).should be(true)
5 | DataMapper.respond_to?(:auto_migrate! ).should be(true)
6 | DataMapper.respond_to?(:auto_upgrade! ).should be(true)
7 | DataMapper.respond_to?(:auto_migrate_up!, true).should be(true)
8 | DataMapper.respond_to?(:auto_migrate_down!, true).should be(true)
9 | end
10 |
11 | %w[ Repository Model ].each do |name|
12 | it "should include the migration api in DataMapper::#{name}" do
13 | (DataMapper.const_get(name) < DataMapper::Migrations.const_get(name)).should be(true)
14 | end
15 | end
16 |
17 | it "should include the migration api into the adapter" do
18 | @adapter.respond_to?(:storage_exists? ).should be(true)
19 | @adapter.respond_to?(:field_exists? ).should be(true)
20 | @adapter.respond_to?(:upgrade_model_storage).should be(true)
21 | @adapter.respond_to?(:create_model_storage ).should be(true)
22 | @adapter.respond_to?(:destroy_model_storage).should be(true)
23 | end
24 |
25 | end
26 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql/sqlite.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/sql/table'
2 |
3 | require 'fileutils'
4 |
5 | module SQL
6 | module Sqlite
7 |
8 | def supports_schema_transactions?
9 | true
10 | end
11 |
12 | def table(table_name)
13 | SQL::Sqlite::Table.new(self, table_name)
14 | end
15 |
16 | def recreate_database
17 | DataMapper.logger.info "Dropping #{@uri.path}"
18 | FileUtils.rm_f(@uri.path)
19 | # do nothing, sqlite will automatically create the database file
20 | end
21 |
22 | def table_options
23 | ''
24 | end
25 |
26 | def supports_serial?
27 | true
28 | end
29 |
30 | class Table < SQL::Table
31 | def initialize(adapter, table_name)
32 | @columns = []
33 | adapter.table_info(table_name).each do |col_struct|
34 | @columns << SQL::Sqlite::Column.new(col_struct)
35 | end
36 | end
37 | end
38 |
39 | class Column < SQL::Column
40 | def initialize(col_struct)
41 | @name, @type, @default_value, @primary_key = col_struct.name, col_struct.type, col_struct.dflt_value, col_struct.pk
42 |
43 | @not_null = col_struct.notnull == 0
44 | end
45 | end
46 | end
47 | end
48 |
--------------------------------------------------------------------------------
/examples/sample_migration.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/migration_runner'
2 |
3 | DataMapper.setup(:default, "sqlite3::memory")
4 |
5 | DataMapper::Logger.new(STDOUT, :debug)
6 | DataMapper.logger.debug( "Starting Migration" )
7 |
8 | migration 1, :create_people_table do
9 | up do
10 | create_table :people do
11 | column :id, Integer, :serial => true
12 | column :name, String, :size => 50
13 | column :age, Integer
14 | end
15 | end
16 | down do
17 | drop_table :people
18 | end
19 | end
20 |
21 | migration 2, :add_dob_to_people do
22 | up do
23 | modify_table :people do
24 | add_column :dob, DateTime, :allow_nil => true
25 | end
26 | end
27 |
28 | down do
29 | modify_table :people do
30 | drop_column :dob
31 | end
32 | end
33 | end
34 |
35 | # migrate_down!
36 | # migrate_up!
37 | #
38 | # class Person
39 | # include DataMapper::Resource
40 | #
41 | # property :id, Serial
42 | # property :name, String, :size => 50
43 | # property :age, Integer
44 | # property :dob, DateTime, :default => Time.now
45 | #
46 | # end
47 | #
48 | # Person.create(:name => "Mark Bates", :age => 31)
49 | # puts Person.first.inspect
50 | # puts Person.all.inspect
51 |
52 | if $0 == __FILE__
53 | if $*.first == "down"
54 | migrate_down!
55 | else
56 | migrate_up!
57 | end
58 | end
59 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql/mysql.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/sql/table'
2 |
3 | module SQL
4 | module Mysql
5 |
6 | def supports_schema_transactions?
7 | false
8 | end
9 |
10 | def table(table_name)
11 | SQL::Mysql::Table.new(self, table_name)
12 | end
13 |
14 | def recreate_database
15 | execute "DROP DATABASE #{schema_name}"
16 | execute "CREATE DATABASE #{schema_name}"
17 | execute "USE #{schema_name}"
18 | end
19 |
20 | def supports_serial?
21 | true
22 | end
23 |
24 | def table_options
25 | " ENGINE = InnoDB CHARACTER SET #{character_set} COLLATE #{collation}"
26 | end
27 |
28 | def property_schema_statement(connection, schema)
29 | if supports_serial? && schema[:serial]
30 | statement = "#{schema[:quote_column_name]} SERIAL PRIMARY KEY"
31 | else
32 | super
33 | end
34 | end
35 |
36 | class Table
37 | def initialize(adapter, table_name)
38 | @columns = []
39 | adapter.table_info(table_name).each do |col_struct|
40 | @columns << SQL::Mysql::Column.new(col_struct)
41 | end
42 | end
43 | end
44 |
45 | class Column
46 | def initialize(col_struct)
47 | @name, @type, @default_value, @primary_key = col_struct.name, col_struct.type, col_struct.dflt_value, col_struct.pk
48 |
49 | @not_null = col_struct.notnull == 0
50 | end
51 | end
52 | end
53 | end
54 |
--------------------------------------------------------------------------------
/examples/sample_migration_spec.rb:
--------------------------------------------------------------------------------
1 | require 'pathname'
2 |
3 | dir = Pathname(__FILE__).dirname.expand_path
4 |
5 | require dir + 'sample_migration'
6 | require dir + '../lib/spec/example/migration_example_group'
7 |
8 | describe :create_people_table, :type => :migration do
9 |
10 | before do
11 | run_migration
12 | end
13 |
14 | it 'should create a people table' do
15 | repository(:default).should have_table(:people)
16 | end
17 |
18 | it 'should have an id column as the primary key' do
19 | table(:people).should have_column(:id)
20 | table(:people).column(:id).type.should == 'integer'
21 | #table(:people).column(:id).should be_primary_key
22 | end
23 |
24 | it 'should have a name column as a string' do
25 | table(:people).should have_column(:name)
26 | table(:people).column(:name).type.should == 'character varying'
27 | table(:people).column(:name).should permit_null
28 | end
29 |
30 | it 'should have a nullable age column as a int' do
31 | table(:people).should have_column(:age)
32 | table(:people).column(:age).type.should == 'integer'
33 | table(:people).column(:age).should permit_null
34 | end
35 |
36 | end
37 |
38 | describe :add_dob_to_people, :type => :migration do
39 |
40 | before do
41 | run_migration
42 | end
43 |
44 | it 'should add a dob column as a timestamp' do
45 | table(:people).should have_column(:dob)
46 | table(:people).column(:dob).type.should == 'timestamp without time zone'
47 | table(:people).column(:dob).should permit_null
48 | end
49 |
50 | end
51 |
--------------------------------------------------------------------------------
/spec/unit/sql/table_modifier_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe 'SQL module' do
4 | describe 'TableModifier' do
5 | before do
6 | @adapter = mock('adapter')
7 | @adapter.stub!(:quote_name).and_return(%{'users'})
8 | @tc = SQL::TableModifier.new(@adapter, :users) { }
9 | end
10 |
11 | describe 'initialization' do
12 | it 'should set @adapter to the adapter' do
13 | @tc.instance_variable_get("@adapter").should == @adapter
14 | end
15 |
16 | it 'should set @table_name to the stringified table name' do
17 | @tc.instance_variable_get("@table_name").should == 'users'
18 | end
19 |
20 | it 'should set @opts to the options hash' do
21 | @tc.instance_variable_get("@opts").should == {}
22 | end
23 |
24 | it 'should set @statements to an empty array' do
25 | @tc.instance_variable_get("@statements").should == []
26 | end
27 |
28 | it 'should evaluate the given block' do
29 | block = proc { column :foo, :bar }
30 | col = mock('column')
31 | SQL::TableCreator::Column.should_receive(:new).with(@adapter, :foo, :bar, {}).and_return(col)
32 | tc = SQL::TableCreator.new(@adapter, 'users', {}, &block)
33 | tc.instance_variable_get("@columns").should == [col]
34 | end
35 | end
36 |
37 | it 'should have a table_name' do
38 | @tc.should respond_to(:table_name)
39 | @tc.table_name.should == 'users'
40 | end
41 |
42 | it 'should use the adapter to quote the table name' do
43 | @adapter.should_receive(:quote_name).with('users').and_return(%{'users'})
44 | @tc.quoted_table_name.should == %{'users'}
45 | end
46 |
47 | end
48 |
49 | end
50 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql/table_modifier.rb:
--------------------------------------------------------------------------------
1 | module SQL
2 | class TableModifier
3 | attr_accessor :table_name, :opts, :statements, :adapter
4 |
5 | def initialize(adapter, table_name, opts = {}, &block)
6 | @adapter = adapter
7 | @table_name = table_name.to_s
8 | @opts = (opts)
9 |
10 | @statements = []
11 |
12 | self.instance_eval &block
13 | end
14 |
15 | def add_column(name, type, opts = {})
16 | column = SQL::TableCreator::Column.new(@adapter, name, type, opts)
17 | @statements << "ALTER TABLE #{quoted_table_name} ADD COLUMN #{column.to_sql}"
18 | end
19 |
20 | def drop_column(name)
21 | # raise NotImplemented for SQLite3. Can't ALTER TABLE, need to copy table.
22 | # We'd have to inspect it, and we can't, since we aren't executing any queries yet.
23 | # TODO instead of building the SQL queries when executing the block, create AddColumn,
24 | # AlterColumn and DropColumn objects that get #to_sql'd
25 | if name.is_a?(Array)
26 | name.each{ |n| drop_column(n) }
27 | else
28 | @statements << "ALTER TABLE #{quoted_table_name} DROP COLUMN #{quote_column_name(name)}"
29 | end
30 | end
31 | alias_method :drop_columns, :drop_column
32 |
33 | def rename_column(name, new_name, opts = {})
34 | # raise NotImplemented for SQLite3
35 | @statements << "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(name)} TO #{quote_column_name(new_name)}"
36 | end
37 |
38 | def change_column(name, type, opts = {})
39 | # raise NotImplemented for SQLite3
40 | @statements << "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quote_column_name(name)} TYPE #{type}"
41 | end
42 |
43 | def quote_column_name(name)
44 | @adapter.send(:quote_name, name.to_s)
45 | end
46 |
47 | def quoted_table_name
48 | @adapter.send(:quote_name, table_name)
49 | end
50 | end
51 | end
52 |
--------------------------------------------------------------------------------
/lib/spec/example/migration_example_group.rb:
--------------------------------------------------------------------------------
1 | require 'spec/matchers/migration_matchers'
2 |
3 | require 'spec'
4 |
5 | module Spec
6 | module Example
7 | class MigrationExampleGroup < Spec::Example::ExampleGroup
8 | include Spec::Matchers::Migration
9 |
10 | before(:all) do
11 | if this_migration.adapter.supports_schema_transactions?
12 | run_prereq_migrations
13 | end
14 | end
15 |
16 | before(:each) do
17 | if ! this_migration.adapter.supports_schema_transactions?
18 | run_prereq_migrations
19 | else
20 | this_migration.adapter.begin_transaction
21 | end
22 | end
23 |
24 | after(:each) do
25 | if this_migration.adapter.supports_schema_transactions?
26 | this_migration.adapter.rollback_transaction
27 | end
28 | end
29 |
30 | after(:all) do
31 | this_migration.adapter.recreate_database
32 | end
33 |
34 | def run_prereq_migrations
35 | "running n-1 migrations"
36 | all_databases.each do |db|
37 | db.adapter.recreate_database
38 | end
39 | @@migrations.sort.each do |migration|
40 | break if migration.name.to_s == migration_name.to_s
41 | migration.perform_up
42 | end
43 | end
44 |
45 | def run_migration
46 | this_migration.perform_up
47 | end
48 |
49 | def migration_name
50 | @migration_name ||= self.class.instance_variable_get("@description_text").to_s
51 | end
52 |
53 | def all_databases
54 | @@migrations.map { |m| m.database }.uniq
55 | end
56 |
57 | def this_migration
58 | @@migrations.select { |m| m.name.to_s == migration_name }.first
59 | end
60 |
61 | def select(sql)
62 | this_migration.adapter.select(sql)
63 | end
64 |
65 | def table(table_name)
66 | this_migration.adapter.table(table_name)
67 | end
68 |
69 | Spec::Example::ExampleGroupFactory.register(:migration, self)
70 |
71 | end
72 | end
73 | end
74 |
--------------------------------------------------------------------------------
/Gemfile:
--------------------------------------------------------------------------------
1 | require 'pathname'
2 |
3 | source 'http://rubygems.org'
4 |
5 | SOURCE = ENV['SOURCE'] ? ENV['SOURCE'].to_sym : :git
6 | REPO_POSTFIX = SOURCE == :path ? '' : '.git'
7 | DATAMAPPER = SOURCE == :path ? Pathname(__FILE__).dirname.parent : 'http://github.com/datamapper'
8 | DM_VERSION = '~> 1.0.2'
9 |
10 | group :runtime do # Runtime dependencies (as in the gemspec)
11 |
12 | if ENV['EXTLIB']
13 | gem 'extlib', '~> 0.9.15', SOURCE => "#{DATAMAPPER}/extlib#{REPO_POSTFIX}", :require => nil
14 | else
15 | gem 'activesupport', '~> 3.0.3', :require => nil
16 | end
17 |
18 | gem 'dm-core', DM_VERSION, SOURCE => "#{DATAMAPPER}/dm-core#{REPO_POSTFIX}"
19 |
20 | end
21 |
22 | group(:development) do # Development dependencies (as in the gemspec)
23 |
24 | gem 'rake', '~> 0.8.7'
25 | gem 'rspec', '~> 1.3.1'
26 | gem 'jeweler', '~> 1.4.0'
27 |
28 | end
29 |
30 | group :quality do # These gems contain rake tasks that check the quality of the source code
31 |
32 | gem 'rcov', '~> 0.9.8', :platforms => :mri_18
33 | gem 'yard', '~> 0.5'
34 | gem 'yardstick', '~> 0.1'
35 |
36 | end
37 |
38 | group :datamapper do # We need this because we want to pin these dependencies to their git master sources
39 |
40 | adapters = ENV['ADAPTER'] || ENV['ADAPTERS']
41 | adapters = adapters.to_s.tr(',', ' ').split.uniq - %w[ in_memory ]
42 |
43 | DO_VERSION = '~> 0.10.2'
44 | DM_DO_ADAPTERS = %w[ sqlite postgres mysql oracle sqlserver ]
45 |
46 | if (do_adapters = DM_DO_ADAPTERS & adapters).any?
47 | options = {}
48 | options[:git] = "#{DATAMAPPER}/do#{REPO_POSTFIX}" if ENV['DO_GIT'] == 'true'
49 |
50 | gem 'data_objects', DO_VERSION, options.dup
51 |
52 | do_adapters.each do |adapter|
53 | adapter = 'sqlite3' if adapter == 'sqlite'
54 | gem "do_#{adapter}", DO_VERSION, options.dup
55 | end
56 |
57 | gem 'dm-do-adapter', DM_VERSION, SOURCE => "#{DATAMAPPER}/dm-do-adapter#{REPO_POSTFIX}"
58 | end
59 |
60 | adapters.each do |adapter|
61 | gem "dm-#{adapter}-adapter", DM_VERSION, SOURCE => "#{DATAMAPPER}/dm-#{adapter}-adapter#{REPO_POSTFIX}"
62 | end
63 |
64 | plugins = ENV['PLUGINS'] || ENV['PLUGIN']
65 | plugins = plugins.to_s.tr(',', ' ').split.uniq
66 |
67 | plugins.each do |plugin|
68 | gem plugin, DM_VERSION, SOURCE => "#{DATAMAPPER}/#{plugin}#{REPO_POSTFIX}"
69 | end
70 |
71 | end
72 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql/postgres.rb:
--------------------------------------------------------------------------------
1 | module SQL
2 | module Postgres
3 |
4 | def supports_schema_transactions?
5 | true
6 | end
7 |
8 | def table(table_name)
9 | SQL::Postgres::Table.new(self, table_name)
10 | end
11 |
12 | def recreate_database
13 | execute 'DROP SCHEMA IF EXISTS test CASCADE'
14 | execute 'CREATE SCHEMA test'
15 | execute 'SET search_path TO test'
16 | end
17 |
18 | def supports_serial?
19 | true
20 | end
21 |
22 | def property_schema_statement(connection, schema)
23 | if supports_serial? && schema[:serial]
24 | statement = "#{schema[:quote_column_name]} SERIAL PRIMARY KEY"
25 | else
26 | statement = super
27 | if schema.has_key?(:sequence_name)
28 | statement << " DEFAULT nextval('#{schema[:sequence_name]}') NOT NULL"
29 | end
30 | statement
31 | end
32 | statement
33 | end
34 |
35 | def table_options
36 | ''
37 | end
38 |
39 | class Table < SQL::Table
40 | def initialize(adapter, table_name)
41 | @adapter, @name = adapter, table_name
42 | @columns = []
43 | adapter.query_table(table_name).each do |col_struct|
44 | @columns << SQL::Postgres::Column.new(col_struct)
45 | end
46 |
47 | query_column_constraints
48 | end
49 |
50 | def query_column_constraints
51 | @adapter.select(
52 | "SELECT * FROM information_schema.table_constraints WHERE table_name='#{@name}' AND table_schema=current_schema()"
53 | ).each do |table_constraint|
54 | @adapter.select(
55 | "SELECT * FROM information_schema.constraint_column_usage WHERE constraint_name='#{table_constraint.constraint_name}' AND table_schema=current_schema()"
56 | ).each do |constrained_column|
57 | @columns.each do |column|
58 | if column.name == constrained_column.column_name
59 | case table_constraint.constraint_type
60 | when "UNIQUE" then column.unique = true
61 | when "PRIMARY KEY" then column.primary_key = true
62 | end
63 | end
64 | end
65 | end
66 | end
67 | end
68 | end
69 |
70 | class Column < SQL::Column
71 | def initialize(col_struct)
72 | @name, @type, @default_value = col_struct.column_name, col_struct.data_type, col_struct.column_default
73 |
74 | @not_null = col_struct.is_nullable != "YES"
75 | end
76 | end
77 | end
78 | end
79 |
--------------------------------------------------------------------------------
/lib/spec/matchers/migration_matchers.rb:
--------------------------------------------------------------------------------
1 | module Spec
2 | module Matchers
3 | module Migration
4 |
5 | def have_table(table_name)
6 | HaveTableMatcher.new(table_name)
7 | end
8 |
9 | def have_column(column_name)
10 | HaveColumnMatcher.new(column_name)
11 | end
12 |
13 | def permit_null
14 | NullableColumnMatcher.new
15 | end
16 |
17 | def be_primary_key
18 | PrimaryKeyMatcher.new
19 | end
20 |
21 | class HaveTableMatcher
22 |
23 | attr_accessor :table_name, :repository
24 |
25 | def initialize(table_name)
26 | @table_name = table_name
27 | end
28 |
29 | def matches?(repository)
30 | repository.adapter.storage_exists?(table_name)
31 | end
32 |
33 | def failure_message
34 | %(expected #{repository} to have table '#{table_name}')
35 | end
36 |
37 | def negative_failure_message
38 | %(expected #{repository} to not have table '#{table_name}')
39 | end
40 |
41 | end
42 |
43 | class HaveColumnMatcher
44 |
45 | attr_accessor :table, :column_name
46 |
47 | def initialize(column_name)
48 | @column_name = column_name
49 | end
50 |
51 | def matches?(table)
52 | @table = table
53 | table.columns.map { |c| c.name }.include?(column_name.to_s)
54 | end
55 |
56 | def failure_message
57 | %(expected #{table} to have column '#{column_name}')
58 | end
59 |
60 | def negative_failure_message
61 | %(expected #{table} to not have column '#{column_name}')
62 | end
63 |
64 | end
65 |
66 | class NullableColumnMatcher
67 |
68 | attr_accessor :column
69 |
70 | def matches?(column)
71 | @column = column
72 | ! column.not_null
73 | end
74 |
75 | def failure_message
76 | %(expected #{column.name} to permit NULL)
77 | end
78 |
79 | def negative_failure_message
80 | %(expected #{column.name} to be NOT NULL)
81 | end
82 |
83 | end
84 |
85 | class PrimaryKeyMatcher
86 |
87 | attr_accessor :column
88 |
89 | def matches?(column)
90 | @column = column
91 | column.primary_key
92 | end
93 |
94 | def failure_message
95 | %(expected #{column.name} to be PRIMARY KEY)
96 | end
97 |
98 | def negative_failure_message
99 | %(expected #{column.name} to not be PRIMARY KEY)
100 | end
101 |
102 | end
103 |
104 | end
105 | end
106 | end
107 |
--------------------------------------------------------------------------------
/lib/dm-migrations/adapters/dm-sqlite-adapter.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/auto_migration'
2 | require 'dm-migrations/adapters/dm-do-adapter'
3 |
4 | module DataMapper
5 | module Migrations
6 | module SqliteAdapter
7 |
8 | include DataObjectsAdapter
9 |
10 | # @api private
11 | def self.included(base)
12 | base.extend DataObjectsAdapter::ClassMethods
13 | base.extend ClassMethods
14 | end
15 |
16 | # @api semipublic
17 | def storage_exists?(storage_name)
18 | table_info(storage_name).any?
19 | end
20 |
21 | # @api semipublic
22 | def field_exists?(storage_name, column_name)
23 | table_info(storage_name).any? do |row|
24 | row.name == column_name
25 | end
26 | end
27 |
28 | module SQL #:nodoc:
29 | # private ## This cannot be private for current migrations
30 |
31 | # @api private
32 | def supports_serial?
33 | @supports_serial ||= sqlite_version >= '3.1.0'
34 | end
35 |
36 | # @api private
37 | def supports_drop_table_if_exists?
38 | @supports_drop_table_if_exists ||= sqlite_version >= '3.3.0'
39 | end
40 |
41 | # @api private
42 | def table_info(table_name)
43 | select("PRAGMA table_info(#{quote_name(table_name)})")
44 | end
45 |
46 | # @api private
47 | def create_table_statement(connection, model, properties)
48 | statement = <<-SQL.compress_lines
49 | CREATE TABLE #{quote_name(model.storage_name(name))}
50 | (#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')}
51 | SQL
52 |
53 | # skip adding the primary key if one of the columns is serial. In
54 | # SQLite the serial column must be the primary key, so it has already
55 | # been defined
56 | unless properties.any? { |property| property.serial? }
57 | statement << ", PRIMARY KEY(#{properties.key.map { |property| quote_name(property.field) }.join(', ')})"
58 | end
59 |
60 | statement << ')'
61 | statement
62 | end
63 |
64 | # @api private
65 | def property_schema_statement(connection, schema)
66 | statement = super
67 |
68 | if supports_serial? && schema[:serial]
69 | statement << ' PRIMARY KEY AUTOINCREMENT'
70 | end
71 |
72 | statement
73 | end
74 |
75 | # @api private
76 | def sqlite_version
77 | @sqlite_version ||= select('SELECT sqlite_version(*)').first.freeze
78 | end
79 | end # module SQL
80 |
81 | include SQL
82 |
83 | module ClassMethods
84 | # Types for SQLite 3 databases.
85 | #
86 | # @return [Hash] types for SQLite 3 databases.
87 | #
88 | # @api private
89 | def type_map
90 | @type_map ||= super.merge(Class => { :primitive => 'VARCHAR' }).freeze
91 | end
92 | end
93 |
94 | end
95 | end
96 | end
97 |
--------------------------------------------------------------------------------
/lib/dm-migrations/sql/table_creator.rb:
--------------------------------------------------------------------------------
1 | require 'dm-core'
2 |
3 | module SQL
4 | class TableCreator
5 |
6 | extend DataMapper::Property::Lookup
7 |
8 | attr_accessor :table_name, :opts
9 |
10 | def initialize(adapter, table_name, opts = {}, &block)
11 | @adapter = adapter
12 | @table_name = table_name.to_s
13 | @opts = opts
14 |
15 | @columns = []
16 |
17 | self.instance_eval &block
18 | end
19 |
20 | def quoted_table_name
21 | @adapter.send(:quote_name, table_name)
22 | end
23 |
24 | def column(name, type, opts = {})
25 | @columns << Column.new(@adapter, name, type, opts)
26 | end
27 |
28 | def to_sql
29 | "CREATE TABLE #{quoted_table_name} (#{@columns.map{ |c| c.to_sql }.join(', ')})#{@adapter.table_options}"
30 | end
31 |
32 | # A helper for using the native NOW() SQL function in a default
33 | def now
34 | SqlExpr.new('NOW()')
35 | end
36 |
37 | # A helper for using the native UUID() SQL function in a default
38 | def uuid
39 | SqlExpr.new('UUID()')
40 | end
41 |
42 | class SqlExpr
43 | attr_accessor :sql
44 | def initialize(sql)
45 | @sql = sql
46 | end
47 |
48 | def to_s
49 | @sql.to_s
50 | end
51 | end
52 |
53 | class Column
54 | attr_accessor :name, :type
55 |
56 | def initialize(adapter, name, type, opts = {})
57 | @adapter = adapter
58 | @name = name.to_s
59 | @opts = opts
60 | @type = build_type(type)
61 | end
62 |
63 | def to_sql
64 | type
65 | end
66 |
67 | private
68 |
69 | def build_type(type_class)
70 | schema = { :name => @name, :quote_column_name => quoted_name }.merge(@opts)
71 |
72 | [ :nullable, :nullable? ].each do |option|
73 | next if (value = schema.delete(option)).nil?
74 | warn "#{option.inspect} is deprecated, use :allow_nil instead"
75 | schema[:allow_nil] = value unless schema.key?(:allow_nil)
76 | end
77 |
78 | unless schema.key?(:allow_nil)
79 | schema[:allow_nil] = !schema[:not_null]
80 | end
81 |
82 | schema[:length] ||= schema.delete(:size) if schema.key?(:size)
83 |
84 | if type_class.kind_of?(String)
85 | schema[:primitive] = type_class
86 | else
87 | primitive = type_class.respond_to?(:primitive) ? type_class.primitive : type_class
88 | options = @adapter.class.type_map[primitive].dup
89 |
90 | if type_class.respond_to?(:options) && type_class.options.kind_of?(options.class)
91 | options.update(type_class.options)
92 | end
93 |
94 | schema = options.update(schema)
95 | end
96 |
97 | @adapter.send(:with_connection) do |connection|
98 | @adapter.property_schema_statement(connection, schema)
99 | end
100 | end
101 |
102 | def quoted_name
103 | @adapter.send(:quote_name, name)
104 | end
105 | end
106 | end
107 | end
108 |
--------------------------------------------------------------------------------
/spec/unit/sql/postgres_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | # a dummy class to include the module into
4 | class PostgresExtension
5 | include SQL::Postgres
6 | end
7 |
8 | describe "Postgres Extensions" do
9 | before do
10 | @pe = PostgresExtension.new
11 | end
12 |
13 | it 'should support schema-level transactions' do
14 | @pe.supports_schema_transactions?.should be(true)
15 | end
16 |
17 | it 'should support the serial column attribute' do
18 | @pe.supports_serial?.should be(true)
19 | end
20 |
21 | it 'should create a table object from the name' do
22 | table = mock('Postgres Table')
23 | SQL::Postgres::Table.should_receive(:new).with(@pe, 'users').and_return(table)
24 |
25 | @pe.table('users').should == table
26 | end
27 |
28 | describe 'recreating the database' do
29 | end
30 |
31 | describe 'Table' do
32 | before do
33 | @cs1 = mock('Column Struct')
34 | @cs2 = mock('Column Struct')
35 | @adapter = mock('adapter', :select => [])
36 | @adapter.stub!(:query_table).with('users').and_return([@cs1, @cs2])
37 |
38 | @col1 = mock('Postgres Column')
39 | @col2 = mock('Postgres Column')
40 | end
41 |
42 | it 'should initialize columns by querying the table' do
43 | SQL::Postgres::Column.should_receive(:new).with(@cs1).and_return(@col1)
44 | SQL::Postgres::Column.should_receive(:new).with(@cs2).and_return(@col2)
45 | @adapter.should_receive(:query_table).with('users').and_return([@cs1,@cs2])
46 | SQL::Postgres::Table.new(@adapter, 'users')
47 | end
48 |
49 | it 'should create Postgres Column objects from the returned column structs' do
50 | SQL::Postgres::Column.should_receive(:new).with(@cs1).and_return(@col1)
51 | SQL::Postgres::Column.should_receive(:new).with(@cs2).and_return(@col2)
52 | SQL::Postgres::Table.new(@adapter, 'users')
53 | end
54 |
55 | it 'should set the @columns to the looked-up columns' do
56 | SQL::Postgres::Column.should_receive(:new).with(@cs1).and_return(@col1)
57 | SQL::Postgres::Column.should_receive(:new).with(@cs2).and_return(@col2)
58 | t = SQL::Postgres::Table.new(@adapter, 'users')
59 | t.columns.should == [@col1, @col2]
60 | end
61 |
62 | describe '#query_column_constraints' do
63 |
64 | end
65 |
66 | end
67 |
68 | describe 'Column' do
69 | before do
70 | @cs = mock('Struct',
71 | :column_name => 'id',
72 | :data_type => 'integer',
73 | :column_default => 123,
74 | :is_nullable => 'NO')
75 | @c = SQL::Postgres::Column.new(@cs)
76 | end
77 |
78 | it 'should set the name from the column_name value' do
79 | @c.name.should == 'id'
80 | end
81 |
82 | it 'should set the type from the data_type value' do
83 | @c.type.should == 'integer'
84 | end
85 |
86 | it 'should set the default_value from the column_default value' do
87 | @c.default_value.should == 123
88 | end
89 |
90 | it 'should set not_null based on the is_nullable value' do
91 | @c.not_null.should == true
92 | end
93 |
94 | end
95 |
96 |
97 | end
98 |
--------------------------------------------------------------------------------
/spec/integration/migration_runner_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe 'The migration runner' do
4 |
5 | supported_by :postgres, :mysql, :sqlite, :oracle, :sqlserver do
6 |
7 | before(:all) do
8 | @adapter = DataMapper::Spec.adapter
9 | @repository = DataMapper.repository(@adapter.name)
10 | end
11 |
12 | describe 'empty migration runner' do
13 | it "should return an empty array if no migrations have been defined" do
14 | migrations.should be_kind_of(Array)
15 | migrations.size.should == 0
16 | end
17 | end
18 |
19 | describe 'migration runnner' do
20 | # set up some 'global' setup and teardown tasks
21 | before(:each) do
22 | # FIXME workaround because dm-migrations can only handle the :default repo
23 | #DataMapper::Repository.adapters[:default] = DataMapper::Repository.adapters[adapter.to_sym]
24 | migration( 1, :create_people_table) { }
25 | end
26 |
27 | after(:each) do
28 | migrations.clear
29 | end
30 |
31 | describe '#migration' do
32 |
33 | it 'should create a new migration object, and add it to the list of migrations' do
34 | migrations.should be_kind_of(Array)
35 | migrations.size.should == 1
36 | migrations.first.name.should == "create_people_table"
37 | end
38 |
39 | it 'should allow multiple migrations to be added' do
40 | migration( 2, :add_dob_to_people) { }
41 | migration( 2, :add_favorite_pet_to_people) { }
42 | migration( 3, :add_something_else_to_people) { }
43 | migrations.size.should == 4
44 | end
45 |
46 | it 'should raise an error on adding with a duplicated name' do
47 | lambda { migration( 1, :create_people_table) { } }.should raise_error(RuntimeError, /Migration name conflict/)
48 | end
49 |
50 | end
51 |
52 | describe '#migrate_up! and #migrate_down!' do
53 | before(:each) do
54 | migration( 2, :add_dob_to_people) { }
55 | migration( 2, :add_favorite_pet_to_people) { }
56 | migration( 3, :add_something_else_to_people) { }
57 | end
58 |
59 | it 'calling migrate_up! should migrate up all the migrations' do
60 | # add our expectation that migrate_up should be called
61 | migrations.each do |m|
62 | m.should_receive(:perform_up)
63 | end
64 | migrate_up!
65 | end
66 |
67 | it 'calling migrate_up! with an arguement should only migrate to that level' do
68 | migrations.each do |m|
69 | if m.position <= 2
70 | m.should_receive(:perform_up)
71 | else
72 | m.should_not_receive(:perform_up)
73 | end
74 | end
75 | migrate_up!(2)
76 | end
77 |
78 | it 'calling migrate_down! should migrate down all the migrations' do
79 | # add our expectation that migrate_up should be called
80 | migrations.each do |m|
81 | m.should_receive(:perform_down)
82 | end
83 | migrate_down!
84 | end
85 |
86 | end
87 | end
88 | end
89 | end
90 |
--------------------------------------------------------------------------------
/spec/unit/sql/table_creator_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe 'SQL module' do
4 | describe 'TableCreator' do
5 | before do
6 | @adapter = mock('adapter')
7 | @adapter.stub!(:quote_name).and_return(%{'users'})
8 | @tc = SQL::TableCreator.new(@adapter, 'users') { }
9 | end
10 |
11 | describe 'initialization' do
12 | it 'should set @adapter to the adapter' do
13 | @tc.instance_variable_get("@adapter").should == @adapter
14 | end
15 |
16 | it 'should set @table_name to the stringified table name' do
17 | @tc.instance_variable_get("@table_name").should == 'users'
18 | end
19 |
20 | it 'should set @opts to the options hash' do
21 | @tc.instance_variable_get("@opts").should == {}
22 | end
23 |
24 | it 'should set @columns to an empty array' do
25 | @tc.instance_variable_get("@columns").should == []
26 | end
27 |
28 | it 'should evaluate the given block' do
29 | block = proc { column :foo, :bar }
30 | col = mock('column')
31 | SQL::TableCreator::Column.should_receive(:new).with(@adapter, :foo, :bar, {}).and_return(col)
32 | tc = SQL::TableCreator.new(@adapter, 'users', {}, &block)
33 | tc.instance_variable_get("@columns").should == [col]
34 | end
35 | end
36 |
37 | it 'should have a table_name' do
38 | @tc.should respond_to(:table_name)
39 | @tc.table_name.should == 'users'
40 | end
41 |
42 | it 'should use the adapter to quote the table name' do
43 | @adapter.should_receive(:quote_name).with('users').and_return(%{'users'})
44 | @tc.quoted_table_name.should == %{'users'}
45 | end
46 |
47 | it 'should initialze a new column and add it to the list of columns' do
48 | col = mock('column')
49 | SQL::TableCreator::Column.should_receive(:new).with(@adapter, :foo, :bar, {}).and_return(col)
50 | @tc.column(:foo, :bar)
51 | @tc.instance_variable_get("@columns").should == [col]
52 | end
53 |
54 | it 'should output an SQL CREATE statement to build itself' do
55 | @adapter.stub!(:table_options).and_return("")
56 | @tc.to_sql.should ==
57 | %{CREATE TABLE 'users' ()}
58 | end
59 |
60 | describe 'Column' do
61 | before do
62 | connection = mock('Connection')
63 |
64 | @adapter.stub!(:quote_column_name).and_return(%{'id'})
65 | @adapter.class.stub!(:type_map).and_return(Integer => {:type => 'int'})
66 | @adapter.stub!(:property_schema_statement).and_return("SOME SQL")
67 | @adapter.stub!(:with_connection).and_yield(connection)
68 | @c = SQL::TableCreator::Column.new(@adapter, 'id', Integer, :serial => true)
69 | end
70 |
71 | describe 'initialization' do
72 | it 'should set @adapter to the adapter' do
73 | @c.instance_variable_get("@adapter").should == @adapter
74 | end
75 |
76 | it 'should set @name to the stringified name' do
77 | @c.instance_variable_get("@name").should == 'id'
78 | end
79 |
80 | # TODO make this really the type, not this sql bullshit
81 | it 'should set @type to the type' do
82 | @c.instance_variable_get("@type").should == "SOME SQL"
83 | end
84 |
85 | it 'should set @opts to the options hash' do
86 | @c.instance_variable_get("@opts").should == {:serial => true}
87 | end
88 |
89 | end
90 |
91 | end
92 | end
93 |
94 | end
95 |
--------------------------------------------------------------------------------
/lib/dm-migrations/migration_runner.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/migration'
2 |
3 | module DataMapper
4 | module MigrationRunner
5 | # Creates a new migration, and adds it to the list of migrations to be run.
6 | # Migrations can be defined in any order, they will be sorted and run in the
7 | # correct order.
8 | #
9 | # The order that migrations are run in is set by the first argument. It is not
10 | # neccessary that this be unique; migrations with the same version number are
11 | # expected to be able to be run in any order.
12 | #
13 | # The second argument is the name of the migration. This name is used internally
14 | # to track if the migration has been run. It is required that this name be unique
15 | # across all migrations.
16 | #
17 | # Addtionally, it accepts a number of options:
18 | # * :database If you defined several DataMapper::database instances use this
19 | # to choose which one to run the migration gagainst. Defaults to :default.
20 | # Migrations are tracked individually per database.
21 | # * :verbose true/false, defaults to true. Determines if the migration should
22 | # output its status messages when it runs.
23 | #
24 | # Example of a simple migration:
25 | #
26 | # migration( 1, :create_people_table ) do
27 | # up do
28 | # create_table :people do
29 | # column :id, Integer, :serial => true
30 | # column :name, String, :size => 50
31 | # column :age, Integer
32 | # end
33 | # end
34 | # down do
35 | # drop_table :people
36 | # end
37 | # end
38 | #
39 | # Its recommended that you stick with raw SQL for migrations that manipulate data. If
40 | # you write a migration using a model, then later change the model, there's a
41 | # possibility the migration will no longer work. Using SQL will always work.
42 | def migration( number, name, opts = {}, &block )
43 | raise "Migration name conflict: '#{name}'" if migrations.map { |m| m.name }.include?(name.to_s)
44 |
45 | migrations << DataMapper::Migration.new( number, name.to_s, opts, &block )
46 | end
47 |
48 | # Run all migrations that need to be run. In most cases, this would be called by a
49 | # rake task as part of a larger project, but this provides the ability to run them
50 | # in a script or test.
51 | #
52 | # has an optional argument 'level' which if supplied, only performs the migrations
53 | # with a position less than or equal to the level.
54 | def migrate_up!(level = nil)
55 | migrations.sort.each do |migration|
56 | if level.nil?
57 | migration.perform_up()
58 | else
59 | migration.perform_up() if migration.position <= level.to_i
60 | end
61 | end
62 | end
63 |
64 | # Run all the down steps for the migrations that have already been run.
65 | #
66 | # has an optional argument 'level' which, if supplied, only performs the
67 | # down migrations with a postion greater than the level.
68 | def migrate_down!(level = nil)
69 | migrations.sort.reverse.each do |migration|
70 | if level.nil?
71 | migration.perform_down()
72 | else
73 | migration.perform_down() if migration.position > level.to_i
74 | end
75 | end
76 | end
77 |
78 | def migrations
79 | @@migrations ||= []
80 | end
81 |
82 | end
83 | end
84 |
85 | include DataMapper::MigrationRunner
86 |
--------------------------------------------------------------------------------
/spec/unit/sql/sqlite_extensions_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | # a dummy class to include the module into
4 | class SqliteExtension
5 | include SQL::Sqlite
6 | end
7 |
8 | describe "SQLite3 Extensions" do
9 | before do
10 | @se = SqliteExtension.new
11 | end
12 |
13 | it 'should support schema-level transactions' do
14 | @se.supports_schema_transactions?.should be(true)
15 | end
16 |
17 | it 'should support the serial column attribute' do
18 | @se.supports_serial?.should be(true)
19 | end
20 |
21 | it 'should create a table object from the name' do
22 | table = mock('SQLite3 Table')
23 | SQL::Sqlite::Table.should_receive(:new).with(@se, 'users').and_return(table)
24 |
25 | @se.table('users').should == table
26 | end
27 |
28 | describe 'recreating the database' do
29 | before do
30 | uri = mock('URI', :path => '/foo/bar.db')
31 | @se.instance_variable_set('@uri', uri)
32 | end
33 |
34 | it 'should rm the db file' do
35 | FileUtils.should_receive(:rm_f).with('/foo/bar.db')
36 | @se.recreate_database
37 | end
38 |
39 | end
40 |
41 | describe 'Table' do
42 | before do
43 | @cs1 = mock('Column Struct')
44 | @cs2 = mock('Column Struct')
45 | @adapter = mock('adapter')
46 | @adapter.stub!(:table_info).with('users').and_return([@cs1, @cs2])
47 |
48 | @col1 = mock('SQLite3 Column')
49 | @col2 = mock('SQLite3 Column')
50 | end
51 |
52 | it 'should initialize columns by querying the table' do
53 | SQL::Sqlite::Column.should_receive(:new).with(@cs1).and_return(@col1)
54 | SQL::Sqlite::Column.should_receive(:new).with(@cs2).and_return(@col2)
55 | @adapter.should_receive(:table_info).with('users').and_return([@cs1,@cs2])
56 | SQL::Sqlite::Table.new(@adapter, 'users')
57 | end
58 |
59 | it 'should create SQLite3 Column objects from the returned column structs' do
60 | SQL::Sqlite::Column.should_receive(:new).with(@cs1).and_return(@col1)
61 | SQL::Sqlite::Column.should_receive(:new).with(@cs2).and_return(@col2)
62 | SQL::Sqlite::Table.new(@adapter, 'users')
63 | end
64 |
65 | it 'should set the @columns to the looked-up columns' do
66 | SQL::Sqlite::Column.should_receive(:new).with(@cs1).and_return(@col1)
67 | SQL::Sqlite::Column.should_receive(:new).with(@cs2).and_return(@col2)
68 | t = SQL::Sqlite::Table.new(@adapter, 'users')
69 | t.columns.should == [ @col1, @col2 ]
70 | end
71 |
72 | end
73 |
74 | describe 'Column' do
75 | before do
76 | @cs = mock('Struct',
77 | :name => 'id',
78 | :type => 'integer',
79 | :dflt_value => 123,
80 | :pk => true,
81 | :notnull => 0)
82 | @c = SQL::Sqlite::Column.new(@cs)
83 | end
84 |
85 | it 'should set the name from the name value' do
86 | @c.name.should == 'id'
87 | end
88 |
89 | it 'should set the type from the type value' do
90 | @c.type.should == 'integer'
91 | end
92 |
93 | it 'should set the default_value from the dflt_value value' do
94 | @c.default_value.should == 123
95 | end
96 |
97 | it 'should set the primary_key from the pk value' do
98 | @c.primary_key.should == true
99 | end
100 |
101 | it 'should set not_null based on the notnull value' do
102 | @c.not_null.should == true
103 | end
104 |
105 | end
106 |
107 |
108 | end
109 |
--------------------------------------------------------------------------------
/dm-migrations.gemspec:
--------------------------------------------------------------------------------
1 | # Generated by jeweler
2 | # DO NOT EDIT THIS FILE DIRECTLY
3 | # Instead, edit Jeweler::Tasks in rakefile, and run the gemspec command
4 | # -*- encoding: utf-8 -*-
5 |
6 | Gem::Specification.new do |s|
7 | s.name = %q{dm-migrations}
8 | s.version = "1.0.2"
9 |
10 | s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11 | s.authors = ["Paul Sadauskas"]
12 | s.date = %q{2011-01-13}
13 | s.description = %q{DataMapper plugin for writing and speccing migrations}
14 | s.email = %q{psadauskas [a] gmail [d] com}
15 | s.extra_rdoc_files = [
16 | "LICENSE",
17 | "README.rdoc"
18 | ]
19 | s.files = [
20 | ".gitignore",
21 | "Gemfile",
22 | "LICENSE",
23 | "README.rdoc",
24 | "Rakefile",
25 | "VERSION",
26 | "db/migrations/1_create_people_table.rb",
27 | "db/migrations/2_add_dob_to_people.rb",
28 | "db/migrations/config.rb",
29 | "dm-migrations.gemspec",
30 | "examples/sample_migration.rb",
31 | "examples/sample_migration_spec.rb",
32 | "lib/dm-migrations.rb",
33 | "lib/dm-migrations/adapters/dm-do-adapter.rb",
34 | "lib/dm-migrations/adapters/dm-mysql-adapter.rb",
35 | "lib/dm-migrations/adapters/dm-oracle-adapter.rb",
36 | "lib/dm-migrations/adapters/dm-postgres-adapter.rb",
37 | "lib/dm-migrations/adapters/dm-sqlite-adapter.rb",
38 | "lib/dm-migrations/adapters/dm-sqlserver-adapter.rb",
39 | "lib/dm-migrations/adapters/dm-yaml-adapter.rb",
40 | "lib/dm-migrations/auto_migration.rb",
41 | "lib/dm-migrations/exceptions/duplicate_migration.rb",
42 | "lib/dm-migrations/migration.rb",
43 | "lib/dm-migrations/migration_runner.rb",
44 | "lib/dm-migrations/sql.rb",
45 | "lib/dm-migrations/sql/column.rb",
46 | "lib/dm-migrations/sql/mysql.rb",
47 | "lib/dm-migrations/sql/postgres.rb",
48 | "lib/dm-migrations/sql/sqlite.rb",
49 | "lib/dm-migrations/sql/table.rb",
50 | "lib/dm-migrations/sql/table_creator.rb",
51 | "lib/dm-migrations/sql/table_modifier.rb",
52 | "lib/spec/example/migration_example_group.rb",
53 | "lib/spec/matchers/migration_matchers.rb",
54 | "spec/integration/auto_migration_spec.rb",
55 | "spec/integration/migration_runner_spec.rb",
56 | "spec/integration/migration_spec.rb",
57 | "spec/integration/sql_spec.rb",
58 | "spec/isolated/require_after_setup_spec.rb",
59 | "spec/isolated/require_before_setup_spec.rb",
60 | "spec/isolated/require_spec.rb",
61 | "spec/rcov.opts",
62 | "spec/spec.opts",
63 | "spec/spec_helper.rb",
64 | "spec/unit/migration_spec.rb",
65 | "spec/unit/sql/column_spec.rb",
66 | "spec/unit/sql/postgres_spec.rb",
67 | "spec/unit/sql/sqlite_extensions_spec.rb",
68 | "spec/unit/sql/table_creator_spec.rb",
69 | "spec/unit/sql/table_modifier_spec.rb",
70 | "spec/unit/sql/table_spec.rb",
71 | "spec/unit/sql_spec.rb",
72 | "tasks/spec.rake",
73 | "tasks/yard.rake",
74 | "tasks/yardstick.rake"
75 | ]
76 | s.has_rdoc = %q{yard}
77 | s.homepage = %q{http://github.com/datamapper/dm-migrations}
78 | s.rdoc_options = ["--charset=UTF-8"]
79 | s.require_paths = ["lib"]
80 | s.rubyforge_project = %q{datamapper}
81 | s.rubygems_version = %q{1.3.7}
82 | s.summary = %q{DataMapper plugin for writing and speccing migrations}
83 | s.test_files = [
84 | "spec/integration/auto_migration_spec.rb",
85 | "spec/integration/migration_runner_spec.rb",
86 | "spec/integration/migration_spec.rb",
87 | "spec/integration/sql_spec.rb",
88 | "spec/isolated/require_after_setup_spec.rb",
89 | "spec/isolated/require_before_setup_spec.rb",
90 | "spec/isolated/require_spec.rb",
91 | "spec/spec_helper.rb",
92 | "spec/unit/migration_spec.rb",
93 | "spec/unit/sql/column_spec.rb",
94 | "spec/unit/sql/postgres_spec.rb",
95 | "spec/unit/sql/sqlite_extensions_spec.rb",
96 | "spec/unit/sql/table_creator_spec.rb",
97 | "spec/unit/sql/table_modifier_spec.rb",
98 | "spec/unit/sql/table_spec.rb",
99 | "spec/unit/sql_spec.rb",
100 | "examples/sample_migration.rb",
101 | "examples/sample_migration_spec.rb"
102 | ]
103 |
104 | if s.respond_to? :specification_version then
105 | current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
106 | s.specification_version = 3
107 |
108 | if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
109 | s.add_runtime_dependency(%q, ["~> 1.0.2"])
110 | s.add_development_dependency(%q, ["~> 1.3"])
111 | else
112 | s.add_dependency(%q, ["~> 1.0.2"])
113 | s.add_dependency(%q, ["~> 1.3"])
114 | end
115 | else
116 | s.add_dependency(%q, ["~> 1.0.2"])
117 | s.add_dependency(%q, ["~> 1.3"])
118 | end
119 | end
120 |
121 |
--------------------------------------------------------------------------------
/lib/dm-migrations/adapters/dm-postgres-adapter.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/auto_migration'
2 | require 'dm-migrations/adapters/dm-do-adapter'
3 |
4 | module DataMapper
5 | module Migrations
6 | module PostgresAdapter
7 |
8 | include DataObjectsAdapter
9 |
10 | # @api private
11 | def self.included(base)
12 | base.extend DataObjectsAdapter::ClassMethods
13 | base.extend ClassMethods
14 | end
15 |
16 | # @api semipublic
17 | def upgrade_model_storage(model)
18 | without_notices { super }
19 | end
20 |
21 | # @api semipublic
22 | def create_model_storage(model)
23 | without_notices { super }
24 | end
25 |
26 | # @api semipublic
27 | def destroy_model_storage(model)
28 | if supports_drop_table_if_exists?
29 | without_notices { super }
30 | else
31 | super
32 | end
33 | end
34 |
35 | module SQL #:nodoc:
36 | # private ## This cannot be private for current migrations
37 |
38 | # @api private
39 | def supports_drop_table_if_exists?
40 | @supports_drop_table_if_exists ||= postgres_version >= '8.2'
41 | end
42 |
43 | # @api private
44 | def schema_name
45 | @schema_name ||= select('SELECT current_schema()').first.freeze
46 | end
47 |
48 | # @api private
49 | def postgres_version
50 | @postgres_version ||= select('SELECT version()').first.split[1].freeze
51 | end
52 |
53 | # @api private
54 | def without_notices
55 | # execute the block with NOTICE messages disabled
56 | begin
57 | execute('SET client_min_messages = warning')
58 | yield
59 | ensure
60 | execute('RESET client_min_messages')
61 | end
62 | end
63 |
64 | # @api private
65 | def property_schema_hash(property)
66 | schema = super
67 |
68 | primitive = property.primitive
69 |
70 | # Postgres does not support precision and scale for Float
71 | if primitive == Float
72 | schema.delete(:precision)
73 | schema.delete(:scale)
74 | end
75 |
76 | if property.kind_of?(Property::Integer)
77 | min = property.min
78 | max = property.max
79 |
80 | schema[:primitive] = integer_column_statement(min..max) if min && max
81 | end
82 |
83 | if schema[:serial]
84 | schema[:primitive] = serial_column_statement(min..max)
85 | end
86 |
87 | schema
88 | end
89 |
90 | private
91 |
92 | # Return SQL statement for the integer column
93 | #
94 | # @param [Range] range
95 | # the min/max allowed integers
96 | #
97 | # @return [String]
98 | # the statement to create the integer column
99 | #
100 | # @api private
101 | def integer_column_statement(range)
102 | min = range.first
103 | max = range.last
104 |
105 | smallint = 2**15
106 | integer = 2**31
107 | bigint = 2**63
108 |
109 | if min >= -smallint && max < smallint then 'SMALLINT'
110 | elsif min >= -integer && max < integer then 'INTEGER'
111 | elsif min >= -bigint && max < bigint then 'BIGINT'
112 | else
113 | raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
114 | end
115 | end
116 |
117 | # Return SQL statement for the serial column
118 | #
119 | # @param [Integer] max
120 | # the max allowed integer
121 | #
122 | # @return [String]
123 | # the statement to create the serial column
124 | #
125 | # @api private
126 | def serial_column_statement(range)
127 | max = range.last
128 |
129 | if max.nil? || max < 2**31 then 'SERIAL'
130 | elsif max < 2**63 then 'BIGSERIAL'
131 | else
132 | raise ArgumentError, "min #{range.first} and max #{max} exceeds supported range"
133 | end
134 | end
135 | end # module SQL
136 |
137 | include SQL
138 |
139 | module ClassMethods
140 | # Types for PostgreSQL databases.
141 | #
142 | # @return [Hash] types for PostgreSQL databases.
143 | #
144 | # @api private
145 | def type_map
146 | precision = Property::Numeric::DEFAULT_PRECISION
147 | scale = Property::Decimal::DEFAULT_SCALE
148 |
149 | @type_map ||= super.merge(
150 | Property::Binary => { :primitive => 'BYTEA' },
151 | BigDecimal => { :primitive => 'NUMERIC', :precision => precision, :scale => scale },
152 | Float => { :primitive => 'DOUBLE PRECISION' }
153 | ).freeze
154 | end
155 | end
156 |
157 | end
158 | end
159 | end
160 |
--------------------------------------------------------------------------------
/spec/integration/migration_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe "A Migration" do
4 |
5 | supported_by :postgres, :mysql, :sqlite, :oracle, :sqlserver do
6 |
7 | describe DataMapper::Migration, 'interface' do
8 |
9 | before(:all) do
10 | @adapter = DataMapper::Spec.adapter
11 | end
12 |
13 | before do
14 | @migration = DataMapper::Migration.new(1, :create_people_table, :verbose => false) { }
15 | end
16 |
17 | it "should have a postition attribute" do
18 | @migration.should respond_to(:position)
19 |
20 | @migration.position.should == 1
21 | end
22 |
23 | it "should have a name attribute" do
24 | @migration.should respond_to(:name)
25 |
26 | @migration.name.should == :create_people_table
27 | end
28 |
29 | it "should have a :repository option" do
30 | m = DataMapper::Migration.new(2, :create_dogs_table, :repository => :alternate) {}
31 |
32 | m.instance_variable_get(:@repository).should == :alternate
33 | end
34 |
35 | it "should use the default repository by default" do
36 | @migration.instance_variable_get(:@repository).should == :default
37 | end
38 |
39 | it "should still support a :database option" do
40 | m = DataMapper::Migration.new(2, :create_legacy_table, :database => :legacy) {}
41 |
42 | m.instance_variable_get(:@repository).should == :legacy
43 | end
44 |
45 | it "should have a verbose option" do
46 | m = DataMapper::Migration.new(2, :create_dogs_table, :verbose => false) {}
47 | m.instance_variable_get(:@verbose).should == false
48 | end
49 |
50 | it "should be verbose by default" do
51 | m = DataMapper::Migration.new(2, :create_dogs_table) {}
52 | m.instance_variable_get(:@verbose).should == true
53 | end
54 |
55 | it "should be sortable, first by position, then name" do
56 | m1 = DataMapper::Migration.new(1, :create_people_table) {}
57 | m2 = DataMapper::Migration.new(2, :create_dogs_table) {}
58 | m3 = DataMapper::Migration.new(2, :create_cats_table) {}
59 | m4 = DataMapper::Migration.new(4, :create_birds_table) {}
60 |
61 | [m1, m2, m3, m4].sort.should == [m1, m3, m2, m4]
62 | end
63 |
64 | adapter = DataMapper::Spec.adapter_name
65 |
66 | expected_module_lambda = {
67 | :sqlite => lambda { SQL::Sqlite },
68 | :mysql => lambda { SQL::Mysql },
69 | :postgres => lambda { SQL::Postgres }
70 | }[adapter.to_sym]
71 |
72 | expected_module = expected_module_lambda ? expected_module_lambda.call : nil
73 |
74 | if expected_module
75 | it "should extend with #{expected_module} when adapter is #{adapter}" do
76 | migration = DataMapper::Migration.new(1, :"#{adapter}_adapter_test") { }
77 | (class << migration.adapter; self; end).included_modules.should include(expected_module)
78 | end
79 | end
80 | end
81 |
82 | describe DataMapper::Migration, 'defining actions' do
83 | before do
84 | @migration = DataMapper::Migration.new(1, :create_people_table, :verbose => false) { }
85 | end
86 |
87 | it "should have an #up method" do
88 | @migration.should respond_to(:up)
89 | end
90 |
91 | it "should save the block passed into the #up method in @up_action" do
92 | action = lambda {}
93 | @migration.up(&action)
94 |
95 | @migration.instance_variable_get(:@up_action).should == action
96 | end
97 |
98 | it "should have a #down method" do
99 | @migration.should respond_to(:down)
100 | end
101 |
102 | it "should save the block passed into the #down method in @down_action" do
103 | action = lambda {}
104 | @migration.down(&action)
105 |
106 | @migration.instance_variable_get(:@down_action).should == action
107 | end
108 |
109 | it "should make available an #execute method" do
110 | @migration.should respond_to(:execute)
111 | end
112 |
113 | it "should run the sql passed into the #execute method"
114 | # TODO: Find out how to stub the DataMapper::database.execute method
115 | end
116 |
117 | describe DataMapper::Migration, "output" do
118 | before do
119 | @migration = DataMapper::Migration.new(1, :create_people_table) { }
120 | @migration.stub!(:write) # so that we don't actually write anything to the console!
121 | end
122 |
123 | it "should #say a string with an indent" do
124 | @migration.should_receive(:write).with(" Foobar")
125 | @migration.say("Foobar", 2)
126 | end
127 |
128 | it "should #say with a default indent of 4" do
129 | @migration.should_receive(:write).with(" Foobar")
130 | @migration.say("Foobar")
131 | end
132 |
133 | it "should #say_with_time the running time of a block" do
134 | @migration.should_receive(:write).with(/Block/)
135 | @migration.should_receive(:write).with(/-> [\d]+/)
136 |
137 | @migration.say_with_time("Block"){ }
138 | end
139 |
140 | end
141 |
142 | end
143 |
144 | end
145 |
--------------------------------------------------------------------------------
/lib/dm-migrations/adapters/dm-sqlserver-adapter.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/auto_migration'
2 | require 'dm-migrations/adapters/dm-do-adapter'
3 |
4 | module DataMapper
5 | module Migrations
6 | module SqlserverAdapter
7 |
8 | DEFAULT_CHARACTER_SET = 'utf8'.freeze
9 |
10 | include DataObjectsAdapter
11 |
12 | # @api private
13 | def self.included(base)
14 | base.extend DataObjectsAdapter::ClassMethods
15 | base.extend ClassMethods
16 | end
17 |
18 | # @api semipublic
19 | def storage_exists?(storage_name)
20 | select("SELECT name FROM sysobjects WHERE name LIKE ?", storage_name).first == storage_name
21 | end
22 |
23 | # @api semipublic
24 | def field_exists?(storage_name, field_name)
25 | result = select("SELECT c.name FROM sysobjects as o JOIN syscolumns AS c ON o.id = c.id WHERE o.name = #{quote_name(storage_name)} AND c.name LIKE ?", field_name).first
26 | result ? result.field == field_name : false
27 | end
28 |
29 | module SQL #:nodoc:
30 | # private ## This cannot be private for current migrations
31 |
32 | # @api private
33 | def supports_serial?
34 | true
35 | end
36 |
37 | # @api private
38 | def supports_drop_table_if_exists?
39 | false
40 | end
41 |
42 | # @api private
43 | def schema_name
44 | # TODO: is there a cleaner way to find out the current DB we are connected to?
45 | @options[:path].split('/').last
46 | end
47 |
48 | # TODO: update dkubb/dm-more/dm-migrations to use schema_name and remove this
49 |
50 | alias_method :db_name, :schema_name
51 |
52 | # @api private
53 | def create_table_statement(connection, model, properties)
54 | statement = <<-SQL.compress_lines
55 | CREATE TABLE #{quote_name(model.storage_name(name))}
56 | (#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')}
57 | SQL
58 |
59 | unless properties.any? { |property| property.serial? }
60 | statement << ", PRIMARY KEY(#{properties.key.map { |property| quote_name(property.field) }.join(', ')})"
61 | end
62 |
63 | statement << ')'
64 | statement
65 | end
66 |
67 | # @api private
68 | def property_schema_hash(property)
69 | schema = super
70 |
71 | if property.kind_of?(Property::Integer)
72 | min = property.min
73 | max = property.max
74 |
75 | schema[:primitive] = integer_column_statement(min..max) if min && max
76 | end
77 |
78 | if schema[:primitive] == 'TEXT'
79 | schema.delete(:default)
80 | end
81 |
82 | schema
83 | end
84 |
85 | # @api private
86 | def property_schema_statement(connection, schema)
87 | if supports_serial? && schema[:serial]
88 | statement = quote_name(schema[:name])
89 | statement << " #{schema[:primitive]}"
90 |
91 | length = schema[:length]
92 |
93 | if schema[:precision] && schema[:scale]
94 | statement << "(#{[ :precision, :scale ].map { |key| connection.quote_value(schema[key]) }.join(', ')})"
95 | elsif length
96 | statement << "(#{connection.quote_value(length)})"
97 | end
98 |
99 | statement << ' IDENTITY'
100 | else
101 | statement = super
102 | end
103 |
104 | statement
105 | end
106 |
107 | # @api private
108 | def character_set
109 | @character_set ||= show_variable('character_set_connection') || DEFAULT_CHARACTER_SET
110 | end
111 |
112 | # @api private
113 | def collation
114 | @collation ||= show_variable('collation_connection') || DEFAULT_COLLATION
115 | end
116 |
117 | # @api private
118 | def show_variable(name)
119 | raise "SqlserverAdapter#show_variable: Not implemented"
120 | end
121 |
122 | private
123 |
124 | # Return SQL statement for the integer column
125 | #
126 | # @param [Range] range
127 | # the min/max allowed integers
128 | #
129 | # @return [String]
130 | # the statement to create the integer column
131 | #
132 | # @api private
133 | def integer_column_statement(range)
134 | min = range.first
135 | max = range.last
136 |
137 | smallint = 2**15
138 | integer = 2**31
139 | bigint = 2**63
140 |
141 | if min >= 0 && max < 2**8 then 'TINYINT'
142 | elsif min >= -smallint && max < smallint then 'SMALLINT'
143 | elsif min >= -integer && max < integer then 'INT'
144 | elsif min >= -bigint && max < bigint then 'BIGINT'
145 | else
146 | raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
147 | end
148 | end
149 |
150 | end # module SQL
151 |
152 | include SQL
153 |
154 | module ClassMethods
155 | # Types for Sqlserver databases.
156 | #
157 | # @return [Hash] types for Sqlserver databases.
158 | #
159 | # @api private
160 | def type_map
161 | length = Property::String::DEFAULT_LENGTH
162 | precision = Property::Numeric::DEFAULT_PRECISION
163 | scale = Property::Decimal::DEFAULT_SCALE
164 |
165 | @type_map ||= super.merge(
166 | DateTime => { :primitive => 'DATETIME' },
167 | Date => { :primitive => 'SMALLDATETIME' },
168 | Time => { :primitive => 'SMALLDATETIME' },
169 | TrueClass => { :primitive => 'BIT', },
170 | Property::Text => { :primitive => 'NVARCHAR', :length => 'max' }
171 | ).freeze
172 | end
173 | end
174 |
175 | end
176 | end
177 | end
178 |
--------------------------------------------------------------------------------
/lib/dm-migrations/auto_migration.rb:
--------------------------------------------------------------------------------
1 | require 'dm-core'
2 |
3 | module DataMapper
4 | module Migrations
5 | module SingletonMethods
6 |
7 | # destructively migrates the repository upwards to match model definitions
8 | #
9 | # @param [Symbol] name repository to act on, :default is the default
10 | #
11 | # @api public
12 | def migrate!(repository_name = nil)
13 | repository(repository_name).migrate!
14 | end
15 |
16 | # drops and recreates the repository upwards to match model definitions
17 | #
18 | # @param [Symbol] name repository to act on, :default is the default
19 | #
20 | # @api public
21 | def auto_migrate!(repository_name = nil)
22 | repository_execute(:auto_migrate!, repository_name)
23 | end
24 |
25 | # @api public
26 | def auto_upgrade!(repository_name = nil)
27 | repository_execute(:auto_upgrade!, repository_name)
28 | end
29 |
30 | private
31 |
32 | # @api semipublic
33 | def auto_migrate_down!(repository_name)
34 | repository_execute(:auto_migrate_down!, repository_name)
35 | end
36 |
37 | # @api semipublic
38 | def auto_migrate_up!(repository_name)
39 | repository_execute(:auto_migrate_up!, repository_name)
40 | end
41 |
42 | # @api private
43 | def repository_execute(method, repository_name)
44 | DataMapper::Model.descendants.each do |model|
45 | model.send(method, repository_name || model.default_repository_name)
46 | end
47 | end
48 | end
49 |
50 | module Repository
51 | # Determine whether a particular named storage exists in this repository
52 | #
53 | # @param [String]
54 | # storage_name name of the storage to test for
55 | #
56 | # @return [Boolean]
57 | # true if the data-store +storage_name+ exists
58 | #
59 | # @api semipublic
60 | def storage_exists?(storage_name)
61 | adapter = self.adapter
62 | if adapter.respond_to?(:storage_exists?)
63 | adapter.storage_exists?(storage_name)
64 | end
65 | end
66 |
67 | # @api semipublic
68 | def upgrade_model_storage(model)
69 | adapter = self.adapter
70 | if adapter.respond_to?(:upgrade_model_storage)
71 | adapter.upgrade_model_storage(model)
72 | end
73 | end
74 |
75 | # @api semipublic
76 | def create_model_storage(model)
77 | adapter = self.adapter
78 | if adapter.respond_to?(:create_model_storage)
79 | adapter.create_model_storage(model)
80 | end
81 | end
82 |
83 | # @api semipublic
84 | def destroy_model_storage(model)
85 | adapter = self.adapter
86 | if adapter.respond_to?(:destroy_model_storage)
87 | adapter.destroy_model_storage(model)
88 | end
89 | end
90 |
91 | # Destructively automigrates the data-store to match the model.
92 | # First migrates all models down and then up.
93 | # REPEAT: THIS IS DESTRUCTIVE
94 | #
95 | # @api public
96 | def auto_migrate!
97 | DataMapper.auto_migrate!(name)
98 | end
99 |
100 | # Safely migrates the data-store to match the model
101 | # preserving data already in the data-store
102 | #
103 | # @api public
104 | def auto_upgrade!
105 | DataMapper.auto_upgrade!(name)
106 | end
107 | end # module Repository
108 |
109 | module Model
110 |
111 | # @api private
112 | def self.included(mod)
113 | mod.descendants.each { |model| model.extend self }
114 | end
115 |
116 | # @api semipublic
117 | def storage_exists?(repository_name = default_repository_name)
118 | repository(repository_name).storage_exists?(storage_name(repository_name))
119 | end
120 |
121 | # Destructively automigrates the data-store to match the model
122 | # REPEAT: THIS IS DESTRUCTIVE
123 | #
124 | # @param Symbol repository_name the repository to be migrated
125 | #
126 | # @api public
127 | def auto_migrate!(repository_name = self.repository_name)
128 | assert_valid(true)
129 | auto_migrate_down!(repository_name)
130 | auto_migrate_up!(repository_name)
131 | end
132 |
133 | # Safely migrates the data-store to match the model
134 | # preserving data already in the data-store
135 | #
136 | # @param Symbol repository_name the repository to be migrated
137 | #
138 | # @api public
139 | def auto_upgrade!(repository_name = self.repository_name)
140 | assert_valid(true)
141 | base_model = self.base_model
142 | if base_model == self
143 | repository(repository_name).upgrade_model_storage(self)
144 | else
145 | base_model.auto_upgrade!(repository_name)
146 | end
147 | end
148 |
149 | # Destructively migrates the data-store down, which basically
150 | # deletes all the models.
151 | # REPEAT: THIS IS DESTRUCTIVE
152 | #
153 | # @param Symbol repository_name the repository to be migrated
154 | #
155 | # @api private
156 | def auto_migrate_down!(repository_name = self.repository_name)
157 | assert_valid(true)
158 | base_model = self.base_model
159 | if base_model == self
160 | repository(repository_name).destroy_model_storage(self)
161 | else
162 | base_model.auto_migrate_down!(repository_name)
163 | end
164 | end
165 |
166 | # Auto migrates the data-store to match the model
167 | #
168 | # @param Symbol repository_name the repository to be migrated
169 | #
170 | # @api private
171 | def auto_migrate_up!(repository_name = self.repository_name)
172 | assert_valid(true)
173 | base_model = self.base_model
174 | if base_model == self
175 | repository(repository_name).create_model_storage(self)
176 | else
177 | base_model.auto_migrate_up!(repository_name)
178 | end
179 | end
180 |
181 | end # module Model
182 |
183 | def self.include_migration_api
184 | DataMapper.extend(SingletonMethods)
185 | [ :Repository, :Model ].each do |name|
186 | DataMapper.const_get(name).send(:include, const_get(name))
187 | end
188 | DataMapper::Model.append_extensions(Model)
189 | Adapters::AbstractAdapter.descendants.each do |adapter_class|
190 | Adapters.include_migration_api(DataMapper::Inflector.demodulize(adapter_class.name))
191 | end
192 | end
193 |
194 | end
195 |
196 | module Adapters
197 |
198 | def self.include_migration_api(const_name)
199 | require auto_migration_extensions(const_name)
200 | if Migrations.const_defined?(const_name)
201 | adapter = const_get(const_name)
202 | adapter.send(:include, migration_module(const_name))
203 | end
204 | rescue LoadError
205 | # Silently ignore the fact that no adapter extensions could be required
206 | # This means that the adapter in use doesn't support migrations
207 | end
208 |
209 | def self.migration_module(const_name)
210 | Migrations.const_get(const_name)
211 | end
212 |
213 | class << self
214 | private
215 |
216 | # @api private
217 | def auto_migration_extensions(const_name)
218 | name = adapter_name(const_name)
219 | name = 'do' if name == 'dataobjects'
220 | "dm-migrations/adapters/dm-#{name}-adapter"
221 | end
222 |
223 | end
224 |
225 | extendable do
226 | # @api private
227 | def const_added(const_name)
228 | include_migration_api(const_name)
229 | super
230 | end
231 | end
232 |
233 | end # module Adapters
234 |
235 | Migrations.include_migration_api
236 |
237 | end # module DataMapper
238 |
--------------------------------------------------------------------------------
/spec/integration/sql_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe "SQL generation" do
4 |
5 | supported_by :postgres, :mysql, :sqlite, :oracle, :sqlserver do
6 |
7 | describe DataMapper::Migration, "#create_table helper" do
8 | before :all do
9 |
10 | @adapter = DataMapper::Spec.adapter
11 | @repository = DataMapper.repository(@adapter.name)
12 |
13 | case DataMapper::Spec.adapter_name.to_sym
14 | when :sqlite then @adapter.extend(SQL::Sqlite)
15 | when :mysql then @adapter.extend(SQL::Mysql)
16 | when :postgres then @adapter.extend(SQL::Postgres)
17 | end
18 |
19 | end
20 |
21 | before do
22 | @creator = DataMapper::Migration::TableCreator.new(@adapter, :people) do
23 | column :id, DataMapper::Property::Serial
24 | column :name, 'VARCHAR(50)', :allow_nil => false
25 | column :long_string, String, :size => 200
26 | end
27 | end
28 |
29 | it "should have a #create_table helper" do
30 | @migration = DataMapper::Migration.new(1, :create_people_table, :verbose => false) { }
31 | @migration.should respond_to(:create_table)
32 | end
33 |
34 | it "should have a table_name" do
35 | @creator.table_name.should == "people"
36 | end
37 |
38 | it "should have an adapter" do
39 | @creator.instance_eval("@adapter").should == @adapter
40 | end
41 |
42 | it "should have an options hash" do
43 | @creator.opts.should be_kind_of(Hash)
44 | @creator.opts.should == {}
45 | end
46 |
47 | it "should have an array of columns" do
48 | @creator.instance_eval("@columns").should be_kind_of(Array)
49 | @creator.instance_eval("@columns").size.should == 3
50 | @creator.instance_eval("@columns").first.should be_kind_of(DataMapper::Migration::TableCreator::Column)
51 | end
52 |
53 | it "should quote the table name for the adapter" do
54 | @creator.quoted_table_name.should == (DataMapper::Spec.adapter_name.to_sym == :mysql ? '`people`' : '"people"')
55 | end
56 |
57 | it "should allow for custom options" do
58 | columns = @creator.instance_eval("@columns")
59 | col = columns.detect{|c| c.name == "long_string"}
60 | col.instance_eval("@type").should include("200")
61 | end
62 |
63 | it "should generate a NOT NULL column when :allow_nil is false" do
64 | @creator.instance_eval("@columns")[1].type.should match(/NOT NULL/)
65 | end
66 |
67 | case DataMapper::Spec.adapter_name.to_sym
68 | when :mysql
69 | it "should create an InnoDB database for MySQL" do
70 | #can't get an exact == comparison here because character set and collation may differ per connection
71 | @creator.to_sql.should match(/^CREATE TABLE `people` \(`id` SERIAL PRIMARY KEY, `name` VARCHAR\(50\) NOT NULL, `long_string` VARCHAR\(200\)\) ENGINE = InnoDB CHARACTER SET \w+ COLLATE \w+\z/)
72 | end
73 | when :postgres
74 | it "should output a CREATE TABLE statement when sent #to_sql" do
75 | @creator.to_sql.should == %q{CREATE TABLE "people" ("id" SERIAL PRIMARY KEY, "name" VARCHAR(50) NOT NULL, "long_string" VARCHAR(200))}
76 | end
77 | when :sqlite3
78 | it "should output a CREATE TABLE statement when sent #to_sql" do
79 | @creator.to_sql.should == %q{CREATE TABLE "people" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" VARCHAR(50) NOT NULL, "long_string" VARCHAR(200))}
80 | end
81 | end
82 | end
83 |
84 | describe DataMapper::Migration, "#modify_table helper" do
85 | before do
86 | @migration = DataMapper::Migration.new(1, :create_people_table, :verbose => false) { }
87 | end
88 |
89 | it "should have a #modify_table helper" do
90 | @migration.should respond_to(:modify_table)
91 | end
92 |
93 | end
94 |
95 | describe DataMapper::Migration, "other helpers" do
96 | before do
97 | @migration = DataMapper::Migration.new(1, :create_people_table, :verbose => false) { }
98 | end
99 |
100 | it "should have a #drop_table helper" do
101 | @migration.should respond_to(:drop_table)
102 | end
103 |
104 | end
105 |
106 | describe DataMapper::Migration, "version tracking" do
107 | before(:each) do
108 | @migration = DataMapper::Migration.new(1, :create_people_table, :verbose => false) do
109 | up { :ran_up }
110 | down { :ran_down }
111 | end
112 |
113 | @migration.send(:create_migration_info_table_if_needed)
114 | end
115 |
116 | after(:each) { DataMapper::Spec.adapter.execute("DROP TABLE migration_info") rescue nil }
117 |
118 | def insert_migration_record
119 | DataMapper::Spec.adapter.execute("INSERT INTO migration_info (migration_name) VALUES ('create_people_table')")
120 | end
121 |
122 | it "should know if the migration_info table exists" do
123 | @migration.send(:migration_info_table_exists?).should be(true)
124 | end
125 |
126 | it "should know if the migration_info table does not exist" do
127 | DataMapper::Spec.adapter.execute("DROP TABLE migration_info") rescue nil
128 | @migration.send(:migration_info_table_exists?).should be(false)
129 | end
130 |
131 | it "should be able to find the migration_info record for itself" do
132 | insert_migration_record
133 | @migration.send(:migration_record).should_not be_empty
134 | end
135 |
136 | it "should know if a migration needs_up?" do
137 | @migration.send(:needs_up?).should be(true)
138 | insert_migration_record
139 | @migration.send(:needs_up?).should be(false)
140 | end
141 |
142 | it "should know if a migration needs_down?" do
143 | @migration.send(:needs_down?).should be(false)
144 | insert_migration_record
145 | @migration.send(:needs_down?).should be(true)
146 | end
147 |
148 | it "should properly quote the migration_info table via the adapter for use in queries" do
149 | @migration.send(:migration_info_table).should == @migration.quote_table_name("migration_info")
150 | end
151 |
152 | it "should properly quote the migration_info.migration_name column via the adapter for use in queries" do
153 | @migration.send(:migration_name_column).should == @migration.quote_column_name("migration_name")
154 | end
155 |
156 | it "should properly quote the migration's name for use in queries"
157 | # TODO how to i call the adapter's #escape_sql method?
158 |
159 | it "should create the migration_info table if it doesn't exist" do
160 | DataMapper::Spec.adapter.execute("DROP TABLE migration_info")
161 | @migration.send(:migration_info_table_exists?).should be(false)
162 | @migration.send(:create_migration_info_table_if_needed)
163 | @migration.send(:migration_info_table_exists?).should be(true)
164 | end
165 |
166 | it "should insert a record into the migration_info table on up" do
167 | @migration.send(:migration_record).should be_empty
168 | @migration.perform_up.should == :ran_up
169 | @migration.send(:migration_record).should_not be_empty
170 | end
171 |
172 | it "should remove a record from the migration_info table on down" do
173 | insert_migration_record
174 | @migration.send(:migration_record).should_not be_empty
175 | @migration.perform_down.should == :ran_down
176 | @migration.send(:migration_record).should be_empty
177 | end
178 |
179 | it "should not run the up action if the record exists in the table" do
180 | insert_migration_record
181 | @migration.perform_up.should_not == :ran_up
182 | end
183 |
184 | it "should not run the down action if the record does not exist in the table" do
185 | @migration.perform_down.should_not == :ran_down
186 | end
187 |
188 | end
189 | end
190 | end
191 |
--------------------------------------------------------------------------------
/lib/dm-migrations/adapters/dm-mysql-adapter.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/auto_migration'
2 | require 'dm-migrations/adapters/dm-do-adapter'
3 |
4 | module DataMapper
5 | module Migrations
6 | module MysqlAdapter
7 |
8 | DEFAULT_ENGINE = 'InnoDB'.freeze
9 | DEFAULT_CHARACTER_SET = 'utf8'.freeze
10 | DEFAULT_COLLATION = 'utf8_unicode_ci'.freeze
11 |
12 | include DataObjectsAdapter
13 |
14 | # @api private
15 | def self.included(base)
16 | base.extend DataObjectsAdapter::ClassMethods
17 | base.extend ClassMethods
18 | end
19 |
20 | # @api semipublic
21 | def storage_exists?(storage_name)
22 | select('SHOW TABLES LIKE ?', storage_name).first == storage_name
23 | end
24 |
25 | # @api semipublic
26 | def field_exists?(storage_name, field)
27 | result = select("SHOW COLUMNS FROM #{quote_name(storage_name)} LIKE ?", field).first
28 | result ? result.field == field : false
29 | end
30 |
31 | module SQL #:nodoc:
32 | # private ## This cannot be private for current migrations
33 |
34 | VALUE_METHOD = RUBY_PLATFORM[/java/] ? :variable_value : :value
35 |
36 | # @api private
37 | def supports_serial?
38 | true
39 | end
40 |
41 | # @api private
42 | def supports_drop_table_if_exists?
43 | true
44 | end
45 |
46 | # @api private
47 | def schema_name
48 | # TODO: is there a cleaner way to find out the current DB we are connected to?
49 | normalized_uri.path.split('/').last
50 | end
51 |
52 | # @api private
53 | def create_table_statement(connection, model, properties)
54 | "#{super} ENGINE = #{DEFAULT_ENGINE} CHARACTER SET #{character_set} COLLATE #{collation}"
55 | end
56 |
57 | # @api private
58 | def property_schema_hash(property)
59 | schema = super
60 |
61 | if property.kind_of?(Property::Text)
62 | schema[:primitive] = text_column_statement(property.length)
63 | schema.delete(:default)
64 | end
65 |
66 | if property.kind_of?(Property::Integer)
67 | min = property.min
68 | max = property.max
69 |
70 | schema[:primitive] = integer_column_statement(min..max) if min && max
71 | end
72 |
73 | schema
74 | end
75 |
76 | # @api private
77 | def property_schema_statement(connection, schema)
78 | statement = super
79 |
80 | if supports_serial? && schema[:serial]
81 | statement << ' AUTO_INCREMENT'
82 | end
83 |
84 | statement
85 | end
86 |
87 | # @api private
88 | def character_set
89 | @character_set ||= show_variable('character_set_connection') || DEFAULT_CHARACTER_SET
90 | end
91 |
92 | # @api private
93 | def collation
94 | @collation ||= show_variable('collation_connection') || DEFAULT_COLLATION
95 | end
96 |
97 | # @api private
98 | def show_variable(name)
99 | result = select('SHOW VARIABLES LIKE ?', name).first
100 | result ? result.send(VALUE_METHOD).freeze : nil
101 | end
102 |
103 | private
104 |
105 | # Return SQL statement for the text column
106 | #
107 | # @param [Integer] length
108 | # the max allowed length
109 | #
110 | # @return [String]
111 | # the statement to create the text column
112 | #
113 | # @api private
114 | def text_column_statement(length)
115 | if length < 2**8 then 'TINYTEXT'
116 | elsif length < 2**16 then 'TEXT'
117 | elsif length < 2**24 then 'MEDIUMTEXT'
118 | elsif length < 2**32 then 'LONGTEXT'
119 |
120 | # http://www.postgresql.org/files/documentation/books/aw_pgsql/node90.html
121 | # Implies that PostgreSQL doesn't have a size limit on text
122 | # fields, so this param validation happens here instead of
123 | # DM::Property#initialize.
124 | else
125 | raise ArgumentError, "length of #{length} exceeds maximum size supported"
126 | end
127 | end
128 |
129 | # Return SQL statement for the integer column
130 | #
131 | # @param [Range] range
132 | # the min/max allowed integers
133 | #
134 | # @return [String]
135 | # the statement to create the integer column
136 | #
137 | # @api private
138 | def integer_column_statement(range)
139 | '%s(%d)%s' % [
140 | integer_column_type(range),
141 | integer_display_size(range),
142 | integer_statement_sign(range),
143 | ]
144 | end
145 |
146 | # Return the integer column type
147 | #
148 | # Use the smallest available column type that will satisfy the
149 | # allowable range of numbers
150 | #
151 | # @param [Range] range
152 | # the min/max allowed integers
153 | #
154 | # @return [String]
155 | # the column type
156 | #
157 | # @api private
158 | def integer_column_type(range)
159 | if range.first < 0
160 | signed_integer_column_type(range)
161 | else
162 | unsigned_integer_column_type(range)
163 | end
164 | end
165 |
166 | # Return the signed integer column type
167 | #
168 | # @param [Range] range
169 | # the min/max allowed integers
170 | #
171 | # @return [String]
172 | #
173 | # @api private
174 | def signed_integer_column_type(range)
175 | min = range.first
176 | max = range.last
177 |
178 | tinyint = 2**7
179 | smallint = 2**15
180 | integer = 2**31
181 | mediumint = 2**23
182 | bigint = 2**63
183 |
184 | if min >= -tinyint && max < tinyint then 'TINYINT'
185 | elsif min >= -smallint && max < smallint then 'SMALLINT'
186 | elsif min >= -mediumint && max < mediumint then 'MEDIUMINT'
187 | elsif min >= -integer && max < integer then 'INT'
188 | elsif min >= -bigint && max < bigint then 'BIGINT'
189 | else
190 | raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
191 | end
192 | end
193 |
194 | # Return the unsigned integer column type
195 | #
196 | # @param [Range] range
197 | # the min/max allowed integers
198 | #
199 | # @return [String]
200 | #
201 | # @api private
202 | def unsigned_integer_column_type(range)
203 | max = range.last
204 |
205 | if max < 2**8 then 'TINYINT'
206 | elsif max < 2**16 then 'SMALLINT'
207 | elsif max < 2**24 then 'MEDIUMINT'
208 | elsif max < 2**32 then 'INT'
209 | elsif max < 2**64 then 'BIGINT'
210 | else
211 | raise ArgumentError, "min #{range.first} and max #{max} exceeds supported range"
212 | end
213 | end
214 |
215 | # Return the integer column display size
216 | #
217 | # Adjust the display size to match the maximum number of
218 | # expected digits. This is more for documentation purposes
219 | # and does not affect what can actually be stored in a
220 | # specific column
221 | #
222 | # @param [Range] range
223 | # the min/max allowed integers
224 | #
225 | # @return [Integer]
226 | # the display size for the integer
227 | #
228 | # @api private
229 | def integer_display_size(range)
230 | [ range.first.to_s.length, range.last.to_s.length ].max
231 | end
232 |
233 | # Return the integer sign statement
234 | #
235 | # @param [Range] range
236 | # the min/max allowed integers
237 | #
238 | # @return [String, nil]
239 | # statement if unsigned, nil if signed
240 | #
241 | # @api private
242 | def integer_statement_sign(range)
243 | ' UNSIGNED' unless range.first < 0
244 | end
245 |
246 | # @api private
247 | def indexes(model)
248 | filter_indexes(model, super)
249 | end
250 |
251 | # @api private
252 | def unique_indexes(model)
253 | filter_indexes(model, super)
254 | end
255 |
256 | # Filter out any indexes with an unindexable column in MySQL
257 | #
258 | # @api private
259 | def filter_indexes(model, indexes)
260 | field_map = model.properties(name).field_map
261 | indexes.select do |index_name, fields|
262 | fields.all? { |field| !field_map[field].kind_of?(Property::Text) }
263 | end
264 | end
265 | end # module SQL
266 |
267 | include SQL
268 |
269 | module ClassMethods
270 | # Types for MySQL databases.
271 | #
272 | # @return [Hash] types for MySQL databases.
273 | #
274 | # @api private
275 | def type_map
276 | @type_map ||= super.merge(
277 | DateTime => { :primitive => 'DATETIME' },
278 | Time => { :primitive => 'DATETIME' }
279 | ).freeze
280 | end
281 | end
282 |
283 | end
284 | end
285 | end
286 |
--------------------------------------------------------------------------------
/lib/dm-migrations/migration.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/exceptions/duplicate_migration'
2 | require 'dm-migrations/sql'
3 |
4 | require 'benchmark'
5 |
6 | module DataMapper
7 | class Migration
8 | include SQL
9 |
10 | # The position or version the migration belongs to
11 | attr_reader :position
12 |
13 | # The name of the migration
14 | attr_reader :name
15 |
16 | # The repository the migration operates on
17 | attr_reader :repository
18 |
19 | #
20 | # Creates a new migration.
21 | #
22 | # @param [Symbol, String, Integer] position
23 | # The position or version the migration belongs to.
24 | #
25 | # @param [Symbol] name
26 | # The name of the migration.
27 | #
28 | # @param [Hash] options
29 | # Additional options for the migration.
30 | #
31 | # @option options [Boolean] :verbose (true)
32 | # Enables or disables verbose output.
33 | #
34 | # @option options [Symbol] :repository (:default)
35 | # The DataMapper repository the migration will operate on.
36 | #
37 | def initialize(position, name, options = {}, &block)
38 | @position = position
39 | @name = name
40 | @options = options
41 | @verbose = options.fetch(:verbose, true)
42 | @up_action = nil
43 | @down_action = nil
44 |
45 | @repository = if options.key?(:database)
46 | warn 'Using the :database option with migrations is deprecated, use :repository instead'
47 | options[:database]
48 | else
49 | options.fetch(:repository, :default)
50 | end
51 |
52 | instance_eval(&block)
53 | end
54 |
55 | #
56 | # The repository the migration will operate on.
57 | #
58 | # @return [Symbol, nil]
59 | # The name of the DataMapper repository the migration will run against.
60 | #
61 | # @deprecated Use {#repository} instead.
62 | #
63 | # @since 1.0.1.
64 | #
65 | def database
66 | warn "Using the DataMapper::Migration#database method is deprecated, use #repository instead"
67 | @repository
68 | end
69 |
70 | #
71 | # The adapter the migration will use.
72 | #
73 | # @return [DataMapper::Adapter]
74 | # The adapter the migration will operate on.
75 | #
76 | # @since 1.0.1
77 | #
78 | def adapter
79 | setup! unless setup?
80 |
81 | @adapter
82 | end
83 |
84 | # define the actions that should be performed on an up migration
85 | def up(&block)
86 | @up_action = block
87 | end
88 |
89 | # define the actions that should be performed on a down migration
90 | def down(&block)
91 | @down_action = block
92 | end
93 |
94 | # perform the migration by running the code in the #up block
95 | def perform_up
96 | result = nil
97 |
98 | if needs_up?
99 | # TODO: fix this so it only does transactions for databases that support create/drop
100 | # database.transaction.commit do
101 | if @up_action
102 | say_with_time "== Performing Up Migration ##{position}: #{name}", 0 do
103 | result = @up_action.call
104 | end
105 | end
106 |
107 | update_migration_info(:up)
108 | # end
109 | end
110 |
111 | result
112 | end
113 |
114 | # un-do the migration by running the code in the #down block
115 | def perform_down
116 | result = nil
117 |
118 | if needs_down?
119 | # TODO: fix this so it only does transactions for databases that support create/drop
120 | # database.transaction.commit do
121 | if @down_action
122 | say_with_time "== Performing Down Migration ##{position}: #{name}", 0 do
123 | result = @down_action.call
124 | end
125 | end
126 |
127 | update_migration_info(:down)
128 | # end
129 | end
130 |
131 | result
132 | end
133 |
134 | # execute raw SQL
135 | def execute(sql, *bind_values)
136 | say_with_time(sql) do
137 | adapter.execute(sql, *bind_values)
138 | end
139 | end
140 |
141 | def create_table(table_name, opts = {}, &block)
142 | execute TableCreator.new(adapter, table_name, opts, &block).to_sql
143 | end
144 |
145 | def drop_table(table_name, opts = {})
146 | execute "DROP TABLE #{adapter.send(:quote_name, table_name.to_s)}"
147 | end
148 |
149 | def modify_table(table_name, opts = {}, &block)
150 | TableModifier.new(adapter, table_name, opts, &block).statements.each do |sql|
151 | execute(sql)
152 | end
153 | end
154 |
155 | def create_index(table_name, *columns_and_options)
156 | if columns_and_options.last.is_a?(Hash)
157 | opts = columns_and_options.pop
158 | else
159 | opts = {}
160 | end
161 | columns = columns_and_options.flatten
162 |
163 | opts[:name] ||= "#{opts[:unique] ? 'unique_' : ''}index_#{table_name}_#{columns.join('_')}"
164 |
165 | execute <<-SQL.compress_lines
166 | CREATE #{opts[:unique] ? 'UNIQUE ' : '' }INDEX #{quote_column_name(opts[:name])} ON
167 | #{quote_table_name(table_name)} (#{columns.map { |c| quote_column_name(c) }.join(', ') })
168 | SQL
169 | end
170 |
171 | # Orders migrations by position, so we know what order to run them in.
172 | # First order by position, then by name, so at least the order is predictable.
173 | def <=> other
174 | if self.position == other.position
175 | self.name.to_s <=> other.name.to_s
176 | else
177 | self.position <=> other.position
178 | end
179 | end
180 |
181 | # Output some text. Optional indent level
182 | def say(message, indent = 4)
183 | write "#{" " * indent} #{message}"
184 | end
185 |
186 | # Time how long the block takes to run, and output it with the message.
187 | def say_with_time(message, indent = 2)
188 | say(message, indent)
189 | result = nil
190 | time = Benchmark.measure { result = yield }
191 | say("-> %.4fs" % time.real, indent)
192 | result
193 | end
194 |
195 | # output the given text, but only if verbose mode is on
196 | def write(text="")
197 | puts text if @verbose
198 | end
199 |
200 | # Inserts or removes a row into the `migration_info` table, so we can mark this migration as run, or un-done
201 | def update_migration_info(direction)
202 | save, @verbose = @verbose, false
203 |
204 | create_migration_info_table_if_needed
205 |
206 | if direction.to_sym == :up
207 | execute("INSERT INTO #{migration_info_table} (#{migration_name_column}) VALUES (#{quoted_name})")
208 | elsif direction.to_sym == :down
209 | execute("DELETE FROM #{migration_info_table} WHERE #{migration_name_column} = #{quoted_name}")
210 | end
211 | @verbose = save
212 | end
213 |
214 | def create_migration_info_table_if_needed
215 | save, @verbose = @verbose, false
216 | unless migration_info_table_exists?
217 | execute("CREATE TABLE #{migration_info_table} (#{migration_name_column} VARCHAR(255) UNIQUE)")
218 | end
219 | @verbose = save
220 | end
221 |
222 | # Quote the name of the migration for use in SQL
223 | def quoted_name
224 | "'#{name}'"
225 | end
226 |
227 | def migration_info_table_exists?
228 | adapter.storage_exists?('migration_info')
229 | end
230 |
231 | # Fetch the record for this migration out of the migration_info table
232 | def migration_record
233 | return [] unless migration_info_table_exists?
234 | adapter.select("SELECT #{migration_name_column} FROM #{migration_info_table} WHERE #{migration_name_column} = #{quoted_name}")
235 | end
236 |
237 | # True if the migration needs to be run
238 | def needs_up?
239 | return true unless migration_info_table_exists?
240 | migration_record.empty?
241 | end
242 |
243 | # True if the migration has already been run
244 | def needs_down?
245 | return false unless migration_info_table_exists?
246 | ! migration_record.empty?
247 | end
248 |
249 | # Quoted table name, for the adapter
250 | def migration_info_table
251 | @migration_info_table ||= quote_table_name('migration_info')
252 | end
253 |
254 | # Quoted `migration_name` column, for the adapter
255 | def migration_name_column
256 | @migration_name_column ||= quote_column_name('migration_name')
257 | end
258 |
259 | def quote_table_name(table_name)
260 | # TODO: Fix this for 1.9 - can't use this hack to access a private method
261 | adapter.send(:quote_name, table_name.to_s)
262 | end
263 |
264 | def quote_column_name(column_name)
265 | # TODO: Fix this for 1.9 - can't use this hack to access a private method
266 | adapter.send(:quote_name, column_name.to_s)
267 | end
268 |
269 | protected
270 |
271 | #
272 | # Determines whether the migration has been setup.
273 | #
274 | # @return [Boolean]
275 | # Specifies whether the migration has been setup.
276 | #
277 | # @since 1.0.1
278 | #
279 | def setup?
280 | !(@adapter.nil?)
281 | end
282 |
283 | #
284 | # Sets up the migration.
285 | #
286 | # @since 1.0.1
287 | #
288 | def setup!
289 | @adapter = DataMapper.repository(@repository).adapter
290 |
291 | case @adapter.class.name
292 | when /Sqlite/ then @adapter.extend(SQL::Sqlite)
293 | when /Mysql/ then @adapter.extend(SQL::Mysql)
294 | when /Postgres/ then @adapter.extend(SQL::Postgres)
295 | else
296 | raise(RuntimeError,"Unsupported Migration Adapter #{@adapter.class}",caller)
297 | end
298 | end
299 | end
300 | end
301 |
--------------------------------------------------------------------------------
/lib/dm-migrations/adapters/dm-do-adapter.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/auto_migration'
2 |
3 | module DataMapper
4 | module Migrations
5 |
6 | module DataObjectsAdapter
7 |
8 | # Returns whether the storage_name exists.
9 | #
10 | # @param [String] storage_name
11 | # a String defining the name of a storage, for example a table name.
12 | #
13 | # @return [Boolean]
14 | # true if the storage exists
15 | #
16 | # @api semipublic
17 | def storage_exists?(storage_name)
18 | statement = <<-SQL.compress_lines
19 | SELECT COUNT(*)
20 | FROM "information_schema"."tables"
21 | WHERE "table_type" = 'BASE TABLE'
22 | AND "table_schema" = ?
23 | AND "table_name" = ?
24 | SQL
25 |
26 | select(statement, schema_name, storage_name).first > 0
27 | end
28 |
29 | # Returns whether the field exists.
30 | #
31 | # @param [String] storage_name
32 | # a String defining the name of a storage, for example a table name.
33 | # @param [String] field
34 | # a String defining the name of a field, for example a column name.
35 | #
36 | # @return [Boolean]
37 | # true if the field exists.
38 | #
39 | # @api semipublic
40 | def field_exists?(storage_name, column_name)
41 | statement = <<-SQL.compress_lines
42 | SELECT COUNT(*)
43 | FROM "information_schema"."columns"
44 | WHERE "table_schema" = ?
45 | AND "table_name" = ?
46 | AND "column_name" = ?
47 | SQL
48 |
49 | select(statement, schema_name, storage_name, column_name).first > 0
50 | end
51 |
52 | # @api semipublic
53 | def upgrade_model_storage(model)
54 | name = self.name
55 | properties = model.properties_with_subclasses(name)
56 |
57 | if success = create_model_storage(model)
58 | return properties
59 | end
60 |
61 | table_name = model.storage_name(name)
62 |
63 | with_connection do |connection|
64 | properties.map do |property|
65 | schema_hash = property_schema_hash(property)
66 | next if field_exists?(table_name, schema_hash[:name])
67 |
68 | statement = alter_table_add_column_statement(connection, table_name, schema_hash)
69 | command = connection.create_command(statement)
70 | command.execute_non_query
71 |
72 | # For simple :index => true columns, add an appropriate index.
73 | # Upgrading doesn't know how to deal with complex indexes yet.
74 | if property.options[:index] === true
75 | statement = create_index_statement(model, property.name, [property.field])
76 | command = connection.create_command(statement)
77 | command.execute_non_query
78 | end
79 |
80 | property
81 | end.compact
82 | end
83 | end
84 |
85 | # @api semipublic
86 | def create_model_storage(model)
87 | name = self.name
88 | properties = model.properties_with_subclasses(name)
89 |
90 | return false if storage_exists?(model.storage_name(name))
91 | return false if properties.empty?
92 |
93 | with_connection do |connection|
94 | statements = [ create_table_statement(connection, model, properties) ]
95 | statements.concat(create_index_statements(model))
96 | statements.concat(create_unique_index_statements(model))
97 |
98 | statements.each do |statement|
99 | command = connection.create_command(statement)
100 | command.execute_non_query
101 | end
102 | end
103 |
104 | true
105 | end
106 |
107 | # @api semipublic
108 | def destroy_model_storage(model)
109 | return true unless supports_drop_table_if_exists? || storage_exists?(model.storage_name(name))
110 | execute(drop_table_statement(model))
111 | true
112 | end
113 |
114 | module SQL #:nodoc:
115 | # private ## This cannot be private for current migrations
116 |
117 | # Adapters that support AUTO INCREMENT fields for CREATE TABLE
118 | # statements should overwrite this to return true
119 | #
120 | # @api private
121 | def supports_serial?
122 | false
123 | end
124 |
125 | # @api private
126 | def supports_drop_table_if_exists?
127 | false
128 | end
129 |
130 | # @api private
131 | def schema_name
132 | raise NotImplementedError, "#{self.class}#schema_name not implemented"
133 | end
134 |
135 | # @api private
136 | def alter_table_add_column_statement(connection, table_name, schema_hash)
137 | "ALTER TABLE #{quote_name(table_name)} #{add_column_statement} #{property_schema_statement(connection, schema_hash)}"
138 | end
139 |
140 | # @api private
141 | def create_table_statement(connection, model, properties)
142 | statement = <<-SQL.compress_lines
143 | CREATE TABLE #{quote_name(model.storage_name(name))}
144 | (#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')},
145 | PRIMARY KEY(#{ properties.key.map { |property| quote_name(property.field) }.join(', ')}))
146 | SQL
147 |
148 | statement
149 | end
150 |
151 | # @api private
152 | def drop_table_statement(model)
153 | table_name = quote_name(model.storage_name(name))
154 | if supports_drop_table_if_exists?
155 | "DROP TABLE IF EXISTS #{table_name}"
156 | else
157 | "DROP TABLE #{table_name}"
158 | end
159 | end
160 |
161 | # @api private
162 | def create_index_statements(model)
163 | name = self.name
164 | table_name = model.storage_name(name)
165 |
166 | indexes(model).map do |index_name, fields|
167 | create_index_statement(model, index_name, fields)
168 | end
169 | end
170 |
171 | # @api private
172 | def create_index_statement(model, index_name, fields)
173 | table_name = model.storage_name(name)
174 |
175 | <<-SQL.compress_lines
176 | CREATE INDEX #{quote_name("index_#{table_name}_#{index_name}")} ON
177 | #{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
178 | SQL
179 | end
180 |
181 | # @api private
182 | def create_unique_index_statements(model)
183 | name = self.name
184 | table_name = model.storage_name(name)
185 | key = model.key(name).map { |property| property.field }
186 | unique_indexes = unique_indexes(model).reject { |index_name, fields| fields == key }
187 |
188 | unique_indexes.map do |index_name, fields|
189 | <<-SQL.compress_lines
190 | CREATE UNIQUE INDEX #{quote_name("unique_#{table_name}_#{index_name}")} ON
191 | #{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
192 | SQL
193 | end
194 | end
195 |
196 | # @api private
197 | def property_schema_hash(property)
198 | primitive = property.primitive
199 | type_map = self.class.type_map
200 |
201 | schema = (type_map[property.class] || type_map[primitive]).merge(:name => property.field)
202 |
203 | schema_primitive = schema[:primitive]
204 |
205 | if primitive == String && schema_primitive != 'TEXT' && schema_primitive != 'CLOB' && schema_primitive != 'NVARCHAR'
206 | schema[:length] = property.length
207 | elsif primitive == BigDecimal || primitive == Float
208 | schema[:precision] = property.precision
209 | schema[:scale] = property.scale
210 | end
211 |
212 | schema[:allow_nil] = property.allow_nil?
213 | schema[:serial] = property.serial?
214 |
215 | default = property.default
216 |
217 | if default.nil? || default.respond_to?(:call)
218 | # remove the default if the property does not allow nil
219 | schema.delete(:default) unless schema[:allow_nil]
220 | else
221 | schema[:default] = property.dump(default)
222 | end
223 |
224 | schema
225 | end
226 |
227 | # @api private
228 | def property_schema_statement(connection, schema)
229 | statement = quote_name(schema[:name])
230 | statement << " #{schema[:primitive]}"
231 |
232 | length = schema[:length]
233 |
234 | if schema[:precision] && schema[:scale]
235 | statement << "(#{[ :precision, :scale ].map { |key| connection.quote_value(schema[key]) }.join(', ')})"
236 | elsif length == 'max'
237 | statement << '(max)'
238 | elsif length
239 | statement << "(#{connection.quote_value(length)})"
240 | end
241 |
242 | statement << " DEFAULT #{connection.quote_value(schema[:default])}" if schema.key?(:default)
243 | statement << ' NOT NULL' unless schema[:allow_nil]
244 | statement
245 | end
246 |
247 | # @api private
248 | def indexes(model)
249 | model.properties(name).indexes
250 | end
251 |
252 | # @api private
253 | def unique_indexes(model)
254 | model.properties(name).unique_indexes
255 | end
256 |
257 | # @api private
258 | def add_column_statement
259 | 'ADD COLUMN'
260 | end
261 | end # module SQL
262 |
263 | include SQL
264 |
265 | module ClassMethods
266 | # Default types for all data object based adapters.
267 | #
268 | # @return [Hash] default types for data objects adapters.
269 | #
270 | # @api private
271 | def type_map
272 | length = Property::String::DEFAULT_LENGTH
273 | precision = Property::Numeric::DEFAULT_PRECISION
274 | scale = Property::Decimal::DEFAULT_SCALE
275 |
276 | @type_map ||= {
277 | Property::Binary => { :primitive => 'BLOB' },
278 | Object => { :primitive => 'TEXT' },
279 | Integer => { :primitive => 'INTEGER' },
280 | String => { :primitive => 'VARCHAR', :length => length },
281 | Class => { :primitive => 'VARCHAR', :length => length },
282 | BigDecimal => { :primitive => 'DECIMAL', :precision => precision, :scale => scale },
283 | Float => { :primitive => 'FLOAT', :precision => precision },
284 | DateTime => { :primitive => 'TIMESTAMP' },
285 | Date => { :primitive => 'DATE' },
286 | Time => { :primitive => 'TIMESTAMP' },
287 | TrueClass => { :primitive => 'BOOLEAN' },
288 | Property::Text => { :primitive => 'TEXT' },
289 | }.freeze
290 | end
291 | end
292 | end
293 |
294 | end
295 | end
296 |
--------------------------------------------------------------------------------
/lib/dm-migrations/adapters/dm-oracle-adapter.rb:
--------------------------------------------------------------------------------
1 | require 'dm-migrations/auto_migration'
2 | require 'dm-migrations/adapters/dm-do-adapter'
3 |
4 | module DataMapper
5 | module Migrations
6 | module OracleAdapter
7 |
8 | include DataObjectsAdapter
9 |
10 | # @api private
11 | def self.included(base)
12 | base.extend DataObjectsAdapter::ClassMethods
13 | base.extend ClassMethods
14 | end
15 |
16 | # @api semipublic
17 | def storage_exists?(storage_name)
18 | statement = <<-SQL.compress_lines
19 | SELECT COUNT(*)
20 | FROM all_tables
21 | WHERE owner = ?
22 | AND table_name = ?
23 | SQL
24 |
25 | select(statement, schema_name, oracle_upcase(storage_name)).first > 0
26 | end
27 |
28 | # @api semipublic
29 | def sequence_exists?(sequence_name)
30 | return false unless sequence_name
31 | statement = <<-SQL.compress_lines
32 | SELECT COUNT(*)
33 | FROM all_sequences
34 | WHERE sequence_owner = ?
35 | AND sequence_name = ?
36 | SQL
37 |
38 | select(statement, schema_name, oracle_upcase(sequence_name)).first > 0
39 | end
40 |
41 | # @api semipublic
42 | def field_exists?(storage_name, field_name)
43 | statement = <<-SQL.compress_lines
44 | SELECT COUNT(*)
45 | FROM all_tab_columns
46 | WHERE owner = ?
47 | AND table_name = ?
48 | AND column_name = ?
49 | SQL
50 |
51 | select(statement, schema_name, oracle_upcase(storage_name), oracle_upcase(field_name)).first > 0
52 | end
53 |
54 | # @api semipublic
55 | def storage_fields(storage_name)
56 | statement = <<-SQL.compress_lines
57 | SELECT column_name
58 | FROM all_tab_columns
59 | WHERE owner = ?
60 | AND table_name = ?
61 | SQL
62 |
63 | select(statement, schema_name, oracle_upcase(storage_name))
64 | end
65 |
66 | def drop_table_statement(model)
67 | table_name = quote_name(model.storage_name(name))
68 | "DROP TABLE #{table_name} CASCADE CONSTRAINTS"
69 | end
70 |
71 |
72 | # @api semipublic
73 | def create_model_storage(model)
74 | name = self.name
75 | properties = model.properties_with_subclasses(name)
76 | table_name = model.storage_name(name)
77 | truncate_or_delete = self.class.auto_migrate_with
78 | table_is_truncated = truncate_or_delete && @truncated_tables && @truncated_tables[table_name]
79 |
80 | return false if storage_exists?(table_name) && !table_is_truncated
81 | return false if properties.empty?
82 |
83 | with_connection do |connection|
84 | # if table was truncated then check if all columns for properties are present
85 | # TODO: check all other column definition options
86 | if table_is_truncated && storage_has_all_fields?(table_name, properties)
87 | @truncated_tables[table_name] = nil
88 | else
89 | # forced drop of table if properties are different
90 | if truncate_or_delete
91 | destroy_model_storage(model, true)
92 | end
93 |
94 | statements = [ create_table_statement(connection, model, properties) ]
95 | statements.concat(create_index_statements(model))
96 | statements.concat(create_unique_index_statements(model))
97 | statements.concat(create_sequence_statements(model))
98 |
99 | statements.each do |statement|
100 | command = connection.create_command(statement)
101 | command.execute_non_query
102 | end
103 | end
104 |
105 | end
106 |
107 | true
108 | end
109 |
110 | # @api semipublic
111 | def destroy_model_storage(model, forced = false)
112 | table_name = model.storage_name(name)
113 | klass = self.class
114 | truncate_or_delete = klass.auto_migrate_with
115 | if storage_exists?(table_name)
116 | if truncate_or_delete && !forced
117 | case truncate_or_delete
118 | when :truncate
119 | execute(truncate_table_statement(model))
120 | when :delete
121 | execute(delete_table_statement(model))
122 | else
123 | raise ArgumentError, "Unsupported auto_migrate_with option"
124 | end
125 | @truncated_tables ||= {}
126 | @truncated_tables[table_name] = true
127 | else
128 | execute(drop_table_statement(model))
129 | @truncated_tables[table_name] = nil if @truncated_tables
130 | end
131 | end
132 | # added destroy of sequences
133 | reset_sequences = klass.auto_migrate_reset_sequences
134 | table_is_truncated = @truncated_tables && @truncated_tables[table_name]
135 | unless truncate_or_delete && !reset_sequences && !forced
136 | if sequence_exists?(model_sequence_name(model))
137 | statement = if table_is_truncated && !forced
138 | reset_sequence_statement(model)
139 | else
140 | drop_sequence_statement(model)
141 | end
142 | execute(statement) if statement
143 | end
144 | end
145 | true
146 | end
147 |
148 | private
149 |
150 | def storage_has_all_fields?(table_name, properties)
151 | properties.map { |property| oracle_upcase(property.field) }.sort == storage_fields(table_name).sort
152 | end
153 |
154 | # If table or column name contains just lowercase characters then do uppercase
155 | # as uppercase version will be used in Oracle data dictionary tables
156 | def oracle_upcase(name)
157 | name =~ /[A-Z]/ ? name : name.upcase
158 | end
159 |
160 | module SQL #:nodoc:
161 | # private ## This cannot be private for current migrations
162 |
163 | # @api private
164 | def schema_name
165 | @schema_name ||= select("SELECT SYS_CONTEXT('userenv','current_schema') FROM dual").first.freeze
166 | end
167 |
168 | # @api private
169 | def create_sequence_statements(model)
170 | name = self.name
171 | table_name = model.storage_name(name)
172 | serial = model.serial(name)
173 |
174 | statements = []
175 | if sequence_name = model_sequence_name(model)
176 | sequence_name = quote_name(sequence_name)
177 | column_name = quote_name(serial.field)
178 |
179 | statements << <<-SQL.compress_lines
180 | CREATE SEQUENCE #{sequence_name} NOCACHE
181 | SQL
182 |
183 | # create trigger only if custom sequence name was not specified
184 | unless serial.options[:sequence]
185 | statements << <<-SQL.compress_lines
186 | CREATE OR REPLACE TRIGGER #{quote_name(default_trigger_name(table_name))}
187 | BEFORE INSERT ON #{quote_name(table_name)} FOR EACH ROW
188 | BEGIN
189 | IF inserting THEN
190 | IF :new.#{column_name} IS NULL THEN
191 | SELECT #{sequence_name}.NEXTVAL INTO :new.#{column_name} FROM dual;
192 | END IF;
193 | END IF;
194 | END;
195 | SQL
196 | end
197 | end
198 |
199 | statements
200 | end
201 |
202 | # @api private
203 | def drop_sequence_statement(model)
204 | if sequence_name = model_sequence_name(model)
205 | "DROP SEQUENCE #{quote_name(sequence_name)}"
206 | else
207 | nil
208 | end
209 | end
210 |
211 | # @api private
212 | def reset_sequence_statement(model)
213 | if sequence_name = model_sequence_name(model)
214 | sequence_name = quote_name(sequence_name)
215 | <<-SQL.compress_lines
216 | DECLARE
217 | cval INTEGER;
218 | BEGIN
219 | SELECT #{sequence_name}.NEXTVAL INTO cval FROM dual;
220 | EXECUTE IMMEDIATE 'ALTER SEQUENCE #{sequence_name} INCREMENT BY -' || cval || ' MINVALUE 0';
221 | SELECT #{sequence_name}.NEXTVAL INTO cval FROM dual;
222 | EXECUTE IMMEDIATE 'ALTER SEQUENCE #{sequence_name} INCREMENT BY 1';
223 | END;
224 | SQL
225 | else
226 | nil
227 | end
228 |
229 | end
230 |
231 | # @api private
232 | def truncate_table_statement(model)
233 | "TRUNCATE TABLE #{quote_name(model.storage_name(name))}"
234 | end
235 |
236 | # @api private
237 | def delete_table_statement(model)
238 | "DELETE FROM #{quote_name(model.storage_name(name))}"
239 | end
240 |
241 | private
242 |
243 | def model_sequence_name(model)
244 | name = self.name
245 | table_name = model.storage_name(name)
246 | serial = model.serial(name)
247 |
248 | if serial
249 | serial.options[:sequence] || default_sequence_name(table_name)
250 | else
251 | nil
252 | end
253 | end
254 |
255 | def default_sequence_name(table_name)
256 | # truncate table name if necessary to fit in max length of identifier
257 | "#{table_name[0,self.class::IDENTIFIER_MAX_LENGTH-4]}_seq"
258 | end
259 |
260 | def default_trigger_name(table_name)
261 | # truncate table name if necessary to fit in max length of identifier
262 | "#{table_name[0,self.class::IDENTIFIER_MAX_LENGTH-4]}_pkt"
263 | end
264 |
265 | # @api private
266 | def add_column_statement
267 | 'ADD'
268 | end
269 |
270 | end # module SQL
271 |
272 | include SQL
273 |
274 | module ClassMethods
275 | # Types for Oracle databases.
276 | #
277 | # @return [Hash] types for Oracle databases.
278 | #
279 | # @api private
280 | def type_map
281 | length = Property::String::DEFAULT_LENGTH
282 | precision = Property::Numeric::DEFAULT_PRECISION
283 | scale = Property::Decimal::DEFAULT_SCALE
284 |
285 | @type_map ||= {
286 | Integer => { :primitive => 'NUMBER', :precision => precision, :scale => 0 },
287 | String => { :primitive => 'VARCHAR2', :length => length },
288 | Class => { :primitive => 'VARCHAR2', :length => length },
289 | BigDecimal => { :primitive => 'NUMBER', :precision => precision, :scale => nil },
290 | Float => { :primitive => 'BINARY_FLOAT', },
291 | DateTime => { :primitive => 'DATE' },
292 | Date => { :primitive => 'DATE' },
293 | Time => { :primitive => 'DATE' },
294 | TrueClass => { :primitive => 'NUMBER', :precision => 1, :scale => 0 },
295 | Property::Text => { :primitive => 'CLOB' },
296 | }.freeze
297 | end
298 |
299 | # Use table truncate or delete for auto_migrate! to speed up test execution
300 | #
301 | # @param [Symbol] :truncate, :delete or :drop_and_create (or nil)
302 | # do not specify parameter to return current value
303 | #
304 | # @return [Symbol] current value of auto_migrate_with option (nil returned for :drop_and_create)
305 | #
306 | # @api semipublic
307 | def auto_migrate_with(value = :not_specified)
308 | return @auto_migrate_with if value == :not_specified
309 | value = nil if value == :drop_and_create
310 | raise ArgumentError unless [nil, :truncate, :delete].include?(value)
311 | @auto_migrate_with = value
312 | end
313 |
314 | # Set if sequences will or will not be reset during auto_migrate!
315 | #
316 | # @param [TrueClass, FalseClass] reset sequences?
317 | # do not specify parameter to return current value
318 | #
319 | # @return [Symbol] current value of auto_migrate_reset_sequences option (default value is true)
320 | #
321 | # @api semipublic
322 | def auto_migrate_reset_sequences(value = :not_specified)
323 | return @auto_migrate_reset_sequences.nil? ? true : @auto_migrate_reset_sequences if value == :not_specified
324 | raise ArgumentError unless [true, false].include?(value)
325 | @auto_migrate_reset_sequences = value
326 | end
327 |
328 | end
329 |
330 | end
331 | end
332 | end
333 |
--------------------------------------------------------------------------------
/spec/unit/migration_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | describe 'Migration' do
4 | supported_by :postgres, :mysql, :sqlite do
5 | before do
6 | @adapter = mock('adapter', :class => DataMapper::Spec.adapter.class)
7 | @repo = mock('DataMapper.repository', :adapter => @adapter)
8 | DataMapper.stub!(:repository).and_return(@repo)
9 | @m = DataMapper::Migration.new(1, :do_nothing, {}) {}
10 | @m.stub!(:write) # silence any output
11 | end
12 |
13 | [:position, :name, :database, :adapter].each do |meth|
14 | it "should respond to ##{meth}" do
15 | @m.should respond_to(meth)
16 | end
17 | end
18 |
19 | describe 'initialization' do
20 | it 'should set @position from the given position' do
21 | @m.instance_variable_get(:@position).should == 1
22 | end
23 |
24 | it 'should set @name from the given name' do
25 | @m.instance_variable_get(:@name).should == :do_nothing
26 | end
27 |
28 | it 'should set @options from the options hash' do
29 | @m.instance_variable_get(:@options).should == {}
30 | end
31 |
32 | it 'should set @repository from the default repository if no :repository option is given' do
33 | m = DataMapper::Migration.new(1, :do_nothing, {}) {}
34 |
35 | m.instance_variable_get(:@repository).should == :default
36 | end
37 |
38 | it 'should set @repository to the specified :repository option' do
39 | m = DataMapper::Migration.new(1, :do_nothing, :repository => :foobar) {}
40 |
41 | m.instance_variable_get(:@repository).should == :foobar
42 | end
43 |
44 | it 'should set @verbose from the options hash' do
45 | m = DataMapper::Migration.new(1, :do_nothing, :verbose => false) {}
46 | m.instance_variable_get(:@verbose).should be(false)
47 | end
48 |
49 | it 'should set @verbose to true by default' do
50 | @m.instance_variable_get(:@verbose).should be(true)
51 | end
52 |
53 | it 'should set the @up_action to nil' do
54 | @m.instance_variable_get(:@up_action).should be_nil
55 | end
56 |
57 | it 'should set the @down_action to nil' do
58 | @m.instance_variable_get(:@down_action).should be_nil
59 | end
60 |
61 | it 'should evaluate the given block'
62 |
63 | end
64 |
65 | it 'should set the @up_action when #up is called with a block' do
66 | action = lambda {}
67 | @m.up(&action)
68 | @m.instance_variable_get(:@up_action).should == action
69 | end
70 |
71 | it 'should set the @up_action when #up is called with a block' do
72 | action = lambda {}
73 | @m.down(&action)
74 | @m.instance_variable_get(:@down_action).should == action
75 | end
76 |
77 | describe 'adapter' do
78 | before(:each) do
79 | @m.instance_variable_set(:@adapter, nil)
80 | end
81 |
82 | it 'should determine the class of the adapter to be extended' do
83 | @adapter.should_receive(:class).and_return(DataMapper::Spec.adapter.class)
84 |
85 | @m.adapter
86 | end
87 |
88 | it 'should extend the adapter with the right module' do
89 | @adapter.should_receive(:extend).with(SQL.const_get(DataMapper::Spec.adapter_name.capitalize))
90 |
91 | @m.adapter
92 | end
93 |
94 | it 'should raise "Unsupported adapter" on an unknown adapter' do
95 | @adapter.should_receive(:class).any_number_of_times.and_return("InvalidAdapter")
96 |
97 | lambda { @m.adapter }.should raise_error
98 | end
99 | end
100 |
101 | describe 'perform_up' do
102 | before do
103 | @up_action = mock('proc', :call => true)
104 | @m.instance_variable_set(:@up_action, @up_action)
105 | @m.stub!(:needs_up?).and_return(true)
106 | @m.stub!(:update_migration_info)
107 | end
108 |
109 | it 'should call the action assigned to @up_action and return the result' do
110 | @up_action.should_receive(:call).and_return(:result)
111 | @m.perform_up.should == :result
112 | end
113 |
114 | it 'should output a status message with the position and name of the migration' do
115 | @m.should_receive(:write).with(/Performing Up Migration #1: do_nothing/)
116 | @m.perform_up
117 | end
118 |
119 | it 'should not run if it doesnt need to be' do
120 | @m.should_receive(:needs_up?).and_return(false)
121 | @up_action.should_not_receive(:call)
122 | @m.perform_up
123 | end
124 |
125 | it 'should update the migration info table' do
126 | @m.should_receive(:update_migration_info).with(:up)
127 | @m.perform_up
128 | end
129 |
130 | it 'should not update the migration info table if the migration does not need run' do
131 | @m.should_receive(:needs_up?).and_return(false)
132 | @m.should_not_receive(:update_migration_info)
133 | @m.perform_up
134 | end
135 |
136 | end
137 |
138 | describe 'perform_down' do
139 | before do
140 | @down_action = mock('proc', :call => true)
141 | @m.instance_variable_set(:@down_action, @down_action)
142 | @m.stub!(:needs_down?).and_return(true)
143 | @m.stub!(:update_migration_info)
144 | end
145 |
146 | it 'should call the action assigned to @down_action and return the result' do
147 | @down_action.should_receive(:call).and_return(:result)
148 | @m.perform_down.should == :result
149 | end
150 |
151 | it 'should output a status message with the position and name of the migration' do
152 | @m.should_receive(:write).with(/Performing Down Migration #1: do_nothing/)
153 | @m.perform_down
154 | end
155 |
156 | it 'should not run if it doesnt need to be' do
157 | @m.should_receive(:needs_down?).and_return(false)
158 | @down_action.should_not_receive(:call)
159 | @m.perform_down
160 | end
161 |
162 | it 'should update the migration info table' do
163 | @m.should_receive(:update_migration_info).with(:down)
164 | @m.perform_down
165 | end
166 |
167 | it 'should not update the migration info table if the migration does not need run' do
168 | @m.should_receive(:needs_down?).and_return(false)
169 | @m.should_not_receive(:update_migration_info)
170 | @m.perform_down
171 | end
172 |
173 | end
174 |
175 | describe 'methods used in the action blocks' do
176 |
177 | describe '#execute' do
178 | before do
179 | @adapter.stub!(:execute)
180 | end
181 |
182 | it 'should send the SQL it its executing to the adapter execute method' do
183 | @adapter.should_receive(:execute).with('SELECT SOME SQL')
184 | @m.execute('SELECT SOME SQL')
185 | end
186 |
187 | it 'should output the SQL it is executing' do
188 | @m.should_receive(:write).with(/SELECT SOME SQL/)
189 | @m.execute('SELECT SOME SQL')
190 | end
191 | end
192 |
193 | describe 'helpers' do
194 | before do
195 | @m.stub!(:execute) # don't actually run anything
196 | end
197 |
198 | describe '#create_table' do
199 | before do
200 | @tc = mock('TableCreator', :to_sql => 'CREATE TABLE')
201 | SQL::TableCreator.stub!(:new).and_return(@tc)
202 | end
203 |
204 | it 'should create a new TableCreator object' do
205 | SQL::TableCreator.should_receive(:new).with(@adapter, :users, {}).and_return(@tc)
206 | @m.create_table(:users) { }
207 | end
208 |
209 | it 'should convert the TableCreator object to an sql statement' do
210 | @tc.should_receive(:to_sql).and_return('CREATE TABLE')
211 | @m.create_table(:users) { }
212 | end
213 |
214 | it 'should execute the create table sql' do
215 | @m.should_receive(:execute).with('CREATE TABLE')
216 | @m.create_table(:users) { }
217 | end
218 |
219 | end
220 |
221 | describe '#drop_table' do
222 | it 'should quote the table name' do
223 | @adapter.should_receive(:quote_name).with('users')
224 | @m.drop_table :users
225 | end
226 |
227 | it 'should execute the DROP TABLE sql for the table' do
228 | @adapter.stub!(:quote_name).and_return("'users'")
229 | @m.should_receive(:execute).with(%{DROP TABLE 'users'})
230 | @m.drop_table :users
231 | end
232 |
233 | end
234 |
235 | describe '#modify_table' do
236 | before do
237 | @tm = mock('TableModifier', :statements => [])
238 | SQL::TableModifier.stub!(:new).and_return(@tm)
239 | end
240 |
241 | it 'should create a new TableModifier object' do
242 | SQL::TableModifier.should_receive(:new).with(@adapter, :users, {}).and_return(@tm)
243 | @m.modify_table(:users){ }
244 | end
245 |
246 | it 'should get the statements from the TableModifier object' do
247 | @tm.should_receive(:statements).and_return([])
248 | @m.modify_table(:users){ }
249 | end
250 |
251 | it 'should iterate over the statements and execute each one' do
252 | @tm.should_receive(:statements).and_return(['SELECT 1', 'SELECT 2'])
253 | @m.should_receive(:execute).with('SELECT 1')
254 | @m.should_receive(:execute).with('SELECT 2')
255 | @m.modify_table(:users){ }
256 | end
257 |
258 | end
259 |
260 | describe 'sorting' do
261 | it 'should order things by position' do
262 | m1 = DataMapper::Migration.new(1, :do_nothing){}
263 | m2 = DataMapper::Migration.new(2, :do_nothing_else){}
264 |
265 | (m1 <=> m2).should == -1
266 | end
267 |
268 | it 'should order things by name when they have the same position' do
269 | m1 = DataMapper::Migration.new(1, :do_nothing_a){}
270 | m2 = DataMapper::Migration.new(1, :do_nothing_b){}
271 |
272 | (m1 <=> m2).should == -1
273 | end
274 |
275 | end
276 |
277 | describe 'formatting output' do
278 | describe '#say' do
279 | it 'should output the message' do
280 | @m.should_receive(:write).with(/Paul/)
281 | @m.say("Paul")
282 | end
283 |
284 | it 'should indent the message with 4 spaces by default' do
285 | @m.should_receive(:write).with(/^\s{4}/)
286 | @m.say("Paul")
287 | end
288 |
289 | it 'should indext the message with a given number of spaces' do
290 | @m.should_receive(:write).with(/^\s{3}/)
291 | @m.say("Paul", 3)
292 | end
293 | end
294 |
295 | describe '#say_with_time' do
296 | before do
297 | @m.stub!(:say)
298 | end
299 |
300 | it 'should say the message with an indent of 2' do
301 | @m.should_receive(:say).with("Paul", 2)
302 | @m.say_with_time("Paul"){}
303 | end
304 |
305 | it 'should output the time it took' do
306 | @m.should_receive(:say).with(/\d+/, 2)
307 | @m.say_with_time("Paul"){}
308 | end
309 | end
310 |
311 | describe '#write' do
312 | before do
313 | # need a new migration object, because the main one had #write stubbed to silence output
314 | @m = DataMapper::Migration.new(1, :do_nothing) {}
315 | end
316 |
317 | it 'should puts the message' do
318 | @m.should_receive(:puts).with("Paul")
319 | @m.write("Paul")
320 | end
321 |
322 | it 'should not puts the message if @verbose is false' do
323 | @m.instance_variable_set(:@verbose, false)
324 | @m.should_not_receive(:puts)
325 | @m.write("Paul")
326 | end
327 |
328 | end
329 |
330 | end
331 |
332 | describe 'working with the migration_info table' do
333 | before do
334 | @adapter.stub!(:storage_exists?).and_return(true)
335 | # --- Please remove stubs ---
336 | @adapter.stub!(:quote_name).and_return { |name| "'#{name}'" }
337 | end
338 |
339 | describe '#update_migration_info' do
340 | it 'should add a record of the migration' do
341 | @m.should_receive(:execute).with(
342 | %Q{INSERT INTO 'migration_info' ('migration_name') VALUES ('do_nothing')}
343 | )
344 | @m.update_migration_info(:up)
345 | end
346 |
347 | it 'should remove the record of the migration' do
348 | @m.should_receive(:execute).with(
349 | %Q{DELETE FROM 'migration_info' WHERE 'migration_name' = 'do_nothing'}
350 | )
351 | @m.update_migration_info(:down)
352 | end
353 |
354 | it 'should try to create the migration_info table' do
355 | @m.should_receive(:create_migration_info_table_if_needed)
356 | @m.update_migration_info(:up)
357 | end
358 | end
359 |
360 | describe '#create_migration_info_table_if_needed' do
361 | it 'should create the migration info table' do
362 | @m.should_receive(:migration_info_table_exists?).and_return(false)
363 | @m.should_receive(:execute).with(
364 | %Q{CREATE TABLE 'migration_info' ('migration_name' VARCHAR(255) UNIQUE)}
365 | )
366 | @m.create_migration_info_table_if_needed
367 | end
368 |
369 | it 'should not try to create the migration info table if it already exists' do
370 | @m.should_receive(:migration_info_table_exists?).and_return(true)
371 | @m.should_not_receive(:execute)
372 | @m.create_migration_info_table_if_needed
373 | end
374 | end
375 |
376 | it 'should quote the name of the migration for use in sql' do
377 | @m.quoted_name.should == %{'do_nothing'}
378 | end
379 |
380 | it 'should query the adapter to see if the migration_info table exists' do
381 | @adapter.should_receive(:storage_exists?).with('migration_info').and_return(true)
382 | @m.migration_info_table_exists?.should == true
383 | end
384 |
385 | describe '#migration_record' do
386 | it 'should query for the migration' do
387 | @adapter.should_receive(:select).with(
388 | %Q{SELECT 'migration_name' FROM 'migration_info' WHERE 'migration_name' = 'do_nothing'}
389 | )
390 | @m.migration_record
391 | end
392 |
393 | it 'should not try to query if the table does not exist' do
394 | @m.stub!(:migration_info_table_exists?).and_return(false)
395 | @adapter.should_not_receive(:select)
396 | @m.migration_record
397 | end
398 |
399 | end
400 |
401 | describe '#needs_up?' do
402 | it 'should be true if there is no record' do
403 | @m.should_receive(:migration_record).and_return([])
404 | @m.needs_up?.should == true
405 | end
406 |
407 | it 'should be false if the record exists' do
408 | @m.should_receive(:migration_record).and_return([:not_empty])
409 | @m.needs_up?.should == false
410 | end
411 |
412 | it 'should be true if there is no migration_info table' do
413 | @m.should_receive(:migration_info_table_exists?).and_return(false)
414 | @m.needs_up?.should == true
415 | end
416 |
417 | end
418 |
419 | describe '#needs_down?' do
420 | it 'should be false if there is no record' do
421 | @m.should_receive(:migration_record).and_return([])
422 | @m.needs_down?.should == false
423 | end
424 |
425 | it 'should be true if the record exists' do
426 | @m.should_receive(:migration_record).and_return([:not_empty])
427 | @m.needs_down?.should == true
428 | end
429 |
430 | it 'should be false if there is no migration_info table' do
431 | @m.should_receive(:migration_info_table_exists?).and_return(false)
432 | @m.needs_down?.should == false
433 | end
434 |
435 | end
436 |
437 | it 'should have the adapter quote the migration_info table' do
438 | @adapter.should_receive(:quote_name).with('migration_info').and_return("'migration_info'")
439 | @m.migration_info_table.should == "'migration_info'"
440 | end
441 |
442 | it 'should have a quoted migration_name_column' do
443 | @adapter.should_receive(:quote_name).with('migration_name').and_return("'migration_name'")
444 | @m.migration_name_column.should == "'migration_name'"
445 | end
446 |
447 | end
448 |
449 | end
450 |
451 | end
452 | end
453 | end
454 |
--------------------------------------------------------------------------------
/spec/integration/auto_migration_spec.rb:
--------------------------------------------------------------------------------
1 | require 'spec_helper'
2 |
3 | require 'dm-migrations/auto_migration'
4 |
5 | describe DataMapper::Migrations do
6 | def capture_log(mod)
7 | original, mod.logger = mod.logger, DataObjects::Logger.new(@log = StringIO.new, :debug)
8 | yield
9 | ensure
10 | @log.rewind
11 | @output = @log.readlines.map do |line|
12 | line.chomp.gsub(/\A.+?~ \(\d+\.?\d*\)\s+/, '')
13 | end
14 |
15 | mod.logger = original
16 | end
17 |
18 | before :all do
19 | class DataMapper::Property::NumericString < DataMapper::Property::String
20 | default 0
21 |
22 | def dump(value)
23 | return if value.nil?
24 | value.to_s
25 | end
26 | end
27 | end
28 |
29 | supported_by :mysql do
30 | before :all do
31 | module ::Blog
32 | class Article
33 | include DataMapper::Resource
34 | end
35 | end
36 |
37 | @model = ::Blog::Article
38 | end
39 |
40 | describe '#auto_migrate' do
41 | describe 'Integer property' do
42 | [
43 | [ 0, 1, 'TINYINT(1) UNSIGNED' ],
44 | [ 0, 9, 'TINYINT(1) UNSIGNED' ],
45 | [ 0, 10, 'TINYINT(2) UNSIGNED' ],
46 | [ 0, 99, 'TINYINT(2) UNSIGNED' ],
47 | [ 0, 100, 'TINYINT(3) UNSIGNED' ],
48 | [ 0, 255, 'TINYINT(3) UNSIGNED' ],
49 | [ 0, 256, 'SMALLINT(3) UNSIGNED' ],
50 | [ 0, 999, 'SMALLINT(3) UNSIGNED' ],
51 | [ 0, 1000, 'SMALLINT(4) UNSIGNED' ],
52 | [ 0, 9999, 'SMALLINT(4) UNSIGNED' ],
53 | [ 0, 10000, 'SMALLINT(5) UNSIGNED' ],
54 | [ 0, 65535, 'SMALLINT(5) UNSIGNED' ],
55 | [ 0, 65536, 'MEDIUMINT(5) UNSIGNED' ],
56 | [ 0, 99999, 'MEDIUMINT(5) UNSIGNED' ],
57 | [ 0, 100000, 'MEDIUMINT(6) UNSIGNED' ],
58 | [ 0, 999999, 'MEDIUMINT(6) UNSIGNED' ],
59 | [ 0, 1000000, 'MEDIUMINT(7) UNSIGNED' ],
60 | [ 0, 9999999, 'MEDIUMINT(7) UNSIGNED' ],
61 | [ 0, 10000000, 'MEDIUMINT(8) UNSIGNED' ],
62 | [ 0, 16777215, 'MEDIUMINT(8) UNSIGNED' ],
63 | [ 0, 16777216, 'INT(8) UNSIGNED' ],
64 | [ 0, 99999999, 'INT(8) UNSIGNED' ],
65 | [ 0, 100000000, 'INT(9) UNSIGNED' ],
66 | [ 0, 999999999, 'INT(9) UNSIGNED' ],
67 | [ 0, 1000000000, 'INT(10) UNSIGNED' ],
68 | [ 0, 4294967295, 'INT(10) UNSIGNED' ],
69 | [ 0, 4294967296, 'BIGINT(10) UNSIGNED' ],
70 | [ 0, 9999999999, 'BIGINT(10) UNSIGNED' ],
71 | [ 0, 10000000000, 'BIGINT(11) UNSIGNED' ],
72 | [ 0, 99999999999, 'BIGINT(11) UNSIGNED' ],
73 | [ 0, 100000000000, 'BIGINT(12) UNSIGNED' ],
74 | [ 0, 999999999999, 'BIGINT(12) UNSIGNED' ],
75 | [ 0, 1000000000000, 'BIGINT(13) UNSIGNED' ],
76 | [ 0, 9999999999999, 'BIGINT(13) UNSIGNED' ],
77 | [ 0, 10000000000000, 'BIGINT(14) UNSIGNED' ],
78 | [ 0, 99999999999999, 'BIGINT(14) UNSIGNED' ],
79 | [ 0, 100000000000000, 'BIGINT(15) UNSIGNED' ],
80 | [ 0, 999999999999999, 'BIGINT(15) UNSIGNED' ],
81 | [ 0, 1000000000000000, 'BIGINT(16) UNSIGNED' ],
82 | [ 0, 9999999999999999, 'BIGINT(16) UNSIGNED' ],
83 | [ 0, 10000000000000000, 'BIGINT(17) UNSIGNED' ],
84 | [ 0, 99999999999999999, 'BIGINT(17) UNSIGNED' ],
85 | [ 0, 100000000000000000, 'BIGINT(18) UNSIGNED' ],
86 | [ 0, 999999999999999999, 'BIGINT(18) UNSIGNED' ],
87 | [ 0, 1000000000000000000, 'BIGINT(19) UNSIGNED' ],
88 | [ 0, 9999999999999999999, 'BIGINT(19) UNSIGNED' ],
89 | [ 0, 10000000000000000000, 'BIGINT(20) UNSIGNED' ],
90 | [ 0, 18446744073709551615, 'BIGINT(20) UNSIGNED' ],
91 |
92 | [ -1, 0, 'TINYINT(2)' ],
93 | [ -1, 9, 'TINYINT(2)' ],
94 | [ -1, 10, 'TINYINT(2)' ],
95 | [ -1, 99, 'TINYINT(2)' ],
96 | [ -1, 100, 'TINYINT(3)' ],
97 | [ -1, 127, 'TINYINT(3)' ],
98 | [ -1, 128, 'SMALLINT(3)' ],
99 | [ -1, 999, 'SMALLINT(3)' ],
100 | [ -1, 1000, 'SMALLINT(4)' ],
101 | [ -1, 9999, 'SMALLINT(4)' ],
102 | [ -1, 10000, 'SMALLINT(5)' ],
103 | [ -1, 32767, 'SMALLINT(5)' ],
104 | [ -1, 32768, 'MEDIUMINT(5)' ],
105 | [ -1, 99999, 'MEDIUMINT(5)' ],
106 | [ -1, 100000, 'MEDIUMINT(6)' ],
107 | [ -1, 999999, 'MEDIUMINT(6)' ],
108 | [ -1, 1000000, 'MEDIUMINT(7)' ],
109 | [ -1, 8388607, 'MEDIUMINT(7)' ],
110 | [ -1, 8388608, 'INT(7)' ],
111 | [ -1, 9999999, 'INT(7)' ],
112 | [ -1, 10000000, 'INT(8)' ],
113 | [ -1, 99999999, 'INT(8)' ],
114 | [ -1, 100000000, 'INT(9)' ],
115 | [ -1, 999999999, 'INT(9)' ],
116 | [ -1, 1000000000, 'INT(10)' ],
117 | [ -1, 2147483647, 'INT(10)' ],
118 | [ -1, 2147483648, 'BIGINT(10)' ],
119 | [ -1, 9999999999, 'BIGINT(10)' ],
120 | [ -1, 10000000000, 'BIGINT(11)' ],
121 | [ -1, 99999999999, 'BIGINT(11)' ],
122 | [ -1, 100000000000, 'BIGINT(12)' ],
123 | [ -1, 999999999999, 'BIGINT(12)' ],
124 | [ -1, 1000000000000, 'BIGINT(13)' ],
125 | [ -1, 9999999999999, 'BIGINT(13)' ],
126 | [ -1, 10000000000000, 'BIGINT(14)' ],
127 | [ -1, 99999999999999, 'BIGINT(14)' ],
128 | [ -1, 100000000000000, 'BIGINT(15)' ],
129 | [ -1, 999999999999999, 'BIGINT(15)' ],
130 | [ -1, 1000000000000000, 'BIGINT(16)' ],
131 | [ -1, 9999999999999999, 'BIGINT(16)' ],
132 | [ -1, 10000000000000000, 'BIGINT(17)' ],
133 | [ -1, 99999999999999999, 'BIGINT(17)' ],
134 | [ -1, 100000000000000000, 'BIGINT(18)' ],
135 | [ -1, 999999999999999999, 'BIGINT(18)' ],
136 | [ -1, 1000000000000000000, 'BIGINT(19)' ],
137 | [ -1, 9223372036854775807, 'BIGINT(19)' ],
138 |
139 | [ -1, 0, 'TINYINT(2)' ],
140 | [ -9, 0, 'TINYINT(2)' ],
141 | [ -10, 0, 'TINYINT(3)' ],
142 | [ -99, 0, 'TINYINT(3)' ],
143 | [ -100, 0, 'TINYINT(4)' ],
144 | [ -128, 0, 'TINYINT(4)' ],
145 | [ -129, 0, 'SMALLINT(4)' ],
146 | [ -999, 0, 'SMALLINT(4)' ],
147 | [ -1000, 0, 'SMALLINT(5)' ],
148 | [ -9999, 0, 'SMALLINT(5)' ],
149 | [ -10000, 0, 'SMALLINT(6)' ],
150 | [ -32768, 0, 'SMALLINT(6)' ],
151 | [ -32769, 0, 'MEDIUMINT(6)' ],
152 | [ -99999, 0, 'MEDIUMINT(6)' ],
153 | [ -100000, 0, 'MEDIUMINT(7)' ],
154 | [ -999999, 0, 'MEDIUMINT(7)' ],
155 | [ -1000000, 0, 'MEDIUMINT(8)' ],
156 | [ -8388608, 0, 'MEDIUMINT(8)' ],
157 | [ -8388609, 0, 'INT(8)' ],
158 | [ -9999999, 0, 'INT(8)' ],
159 | [ -10000000, 0, 'INT(9)' ],
160 | [ -99999999, 0, 'INT(9)' ],
161 | [ -100000000, 0, 'INT(10)' ],
162 | [ -999999999, 0, 'INT(10)' ],
163 | [ -1000000000, 0, 'INT(11)' ],
164 | [ -2147483648, 0, 'INT(11)' ],
165 | [ -2147483649, 0, 'BIGINT(11)' ],
166 | [ -9999999999, 0, 'BIGINT(11)' ],
167 | [ -10000000000, 0, 'BIGINT(12)' ],
168 | [ -99999999999, 0, 'BIGINT(12)' ],
169 | [ -100000000000, 0, 'BIGINT(13)' ],
170 | [ -999999999999, 0, 'BIGINT(13)' ],
171 | [ -1000000000000, 0, 'BIGINT(14)' ],
172 | [ -9999999999999, 0, 'BIGINT(14)' ],
173 | [ -10000000000000, 0, 'BIGINT(15)' ],
174 | [ -99999999999999, 0, 'BIGINT(15)' ],
175 | [ -100000000000000, 0, 'BIGINT(16)' ],
176 | [ -999999999999999, 0, 'BIGINT(16)' ],
177 | [ -1000000000000000, 0, 'BIGINT(17)' ],
178 | [ -9999999999999999, 0, 'BIGINT(17)' ],
179 | [ -10000000000000000, 0, 'BIGINT(18)' ],
180 | [ -99999999999999999, 0, 'BIGINT(18)' ],
181 | [ -100000000000000000, 0, 'BIGINT(19)' ],
182 | [ -999999999999999999, 0, 'BIGINT(19)' ],
183 | [ -1000000000000000000, 0, 'BIGINT(20)' ],
184 | [ -9223372036854775808, 0, 'BIGINT(20)' ],
185 |
186 | [ nil, 2147483647, 'INT(10) UNSIGNED' ],
187 | [ 0, nil, 'INT(10) UNSIGNED' ],
188 | [ nil, nil, 'INTEGER' ],
189 | ].each do |min, max, statement|
190 | options = { :key => true }
191 | options[:min] = min if min
192 | options[:max] = max if max
193 |
194 | describe "with a min of #{min} and a max of #{max}" do
195 | before :all do
196 | @property = @model.property(:id, Integer, options)
197 |
198 | @response = capture_log(DataObjects::Mysql) { @model.auto_migrate! }
199 | end
200 |
201 | it 'should return true' do
202 | @response.should be(true)
203 | end
204 |
205 | it "should create a #{statement} column" do
206 | @output.last.should =~ %r{\ACREATE TABLE `blog_articles` \(`id` #{Regexp.escape(statement)} NOT NULL, PRIMARY KEY\(`id`\)\) ENGINE = InnoDB CHARACTER SET [a-z\d]+ COLLATE (?:[a-z\d](?:_?[a-z\d]+)*)\z}
207 | end
208 |
209 | options.only(:min, :max).each do |key, value|
210 | it "should allow the #{key} value #{value} to be stored" do
211 | pending_if "#{value} causes problem with JRuby 1.5.2 parser", RUBY_PLATFORM[/java/] && JRUBY_VERSION < '1.5.6' && value == -9223372036854775808 do
212 | lambda {
213 | resource = @model.create(@property => value)
214 | @model.first(@property => value).should eql(resource)
215 | }.should_not raise_error
216 | end
217 | end
218 | end
219 | end
220 | end
221 | end
222 |
223 | describe 'Text property' do
224 | before :all do
225 | @model.property(:id, DataMapper::Property::Serial)
226 | end
227 |
228 | [
229 | [ 0, 'TINYTEXT' ],
230 | [ 1, 'TINYTEXT' ],
231 | [ 255, 'TINYTEXT' ],
232 | [ 256, 'TEXT' ],
233 | [ 65535, 'TEXT' ],
234 | [ 65536, 'MEDIUMTEXT' ],
235 | [ 16777215, 'MEDIUMTEXT' ],
236 | [ 16777216, 'LONGTEXT' ],
237 | [ 4294967295, 'LONGTEXT' ],
238 |
239 | [ nil, 'TEXT' ],
240 | ].each do |length, statement|
241 | options = {}
242 | options[:length] = length if length
243 |
244 | describe "with a length of #{length}" do
245 | before :all do
246 | @property = @model.property(:body, DataMapper::Property::Text, options)
247 |
248 | @response = capture_log(DataObjects::Mysql) { @model.auto_migrate! }
249 | end
250 |
251 | it 'should return true' do
252 | @response.should be(true)
253 | end
254 |
255 | it "should create a #{statement} column" do
256 | @output.last.should =~ %r{\ACREATE TABLE `blog_articles` \(`id` INT\(10\) UNSIGNED NOT NULL AUTO_INCREMENT, `body` #{Regexp.escape(statement)}, PRIMARY KEY\(`id`\)\) ENGINE = InnoDB CHARACTER SET [a-z\d]+ COLLATE (?:[a-z\d](?:_?[a-z\d]+)*)\z}
257 | end
258 | end
259 | end
260 | end
261 |
262 | describe 'String property' do
263 | before :all do
264 | @model.property(:id, DataMapper::Property::Serial)
265 | end
266 |
267 | [
268 | [ 1, 'VARCHAR(1)' ],
269 | [ 50, 'VARCHAR(50)' ],
270 | [ 255, 'VARCHAR(255)' ],
271 | [ nil, 'VARCHAR(50)' ],
272 | ].each do |length, statement|
273 | options = {}
274 | options[:length] = length if length
275 |
276 | describe "with a length of #{length}" do
277 | before :all do
278 | @property = @model.property(:title, String, options)
279 |
280 | @response = capture_log(DataObjects::Mysql) { @model.auto_migrate! }
281 | end
282 |
283 | it 'should return true' do
284 | @response.should be(true)
285 | end
286 |
287 | it "should create a #{statement} column" do
288 | @output.last.should =~ %r{\ACREATE TABLE `blog_articles` \(`id` INT\(10\) UNSIGNED NOT NULL AUTO_INCREMENT, `title` #{Regexp.escape(statement)}, PRIMARY KEY\(`id`\)\) ENGINE = InnoDB CHARACTER SET [a-z\d]+ COLLATE (?:[a-z\d](?:_?[a-z\d]+)*)\z}
289 | end
290 | end
291 | end
292 | end
293 |
294 | describe 'NumericString property' do
295 | before :all do
296 | @model.property(:id, DataMapper::Property::Serial)
297 | @model.property(:number, DataMapper::Property::NumericString)
298 |
299 | @response = capture_log(DataObjects::Mysql) { @model.auto_migrate! }
300 | end
301 |
302 | it "should create a VARCHAR(50) column with a default of '0'" do
303 | @output.last.should =~ %r{\ACREATE TABLE `blog_articles` \(`id` INT\(10\) UNSIGNED NOT NULL AUTO_INCREMENT, `number` VARCHAR\(50\) DEFAULT '0', PRIMARY KEY\(`id`\)\) ENGINE = InnoDB CHARACTER SET [a-z\d]+ COLLATE (?:[a-z\d](?:_?[a-z\d]+)*)\z}
304 | end
305 | end
306 | end
307 | end
308 |
309 | supported_by :postgres do
310 | before :all do
311 | module ::Blog
312 | class Article
313 | include DataMapper::Resource
314 | end
315 | end
316 |
317 | @model = ::Blog::Article
318 | end
319 |
320 | describe '#auto_migrate' do
321 | describe 'Integer property' do
322 | [
323 | [ 0, 1, 'SMALLINT' ],
324 | [ 0, 32767, 'SMALLINT' ],
325 | [ 0, 32768, 'INTEGER' ],
326 | [ 0, 2147483647, 'INTEGER' ],
327 | [ 0, 2147483648, 'BIGINT' ],
328 | [ 0, 9223372036854775807, 'BIGINT' ],
329 |
330 | [ -1, 1, 'SMALLINT' ],
331 | [ -1, 32767, 'SMALLINT' ],
332 | [ -1, 32768, 'INTEGER' ],
333 | [ -1, 2147483647, 'INTEGER' ],
334 | [ -1, 2147483648, 'BIGINT' ],
335 | [ -1, 9223372036854775807, 'BIGINT' ],
336 |
337 | [ -1, 0, 'SMALLINT' ],
338 | [ -32768, 0, 'SMALLINT' ],
339 | [ -32769, 0, 'INTEGER' ],
340 | [ -2147483648, 0, 'INTEGER' ],
341 | [ -2147483649, 0, 'BIGINT' ],
342 | [ -9223372036854775808, 0, 'BIGINT' ],
343 |
344 | [ nil, 2147483647, 'INTEGER' ],
345 | [ 0, nil, 'INTEGER' ],
346 | [ nil, nil, 'INTEGER' ],
347 | ].each do |min, max, statement|
348 | options = { :key => true }
349 | options[:min] = min if min
350 | options[:max] = max if max
351 |
352 | describe "with a min of #{min} and a max of #{max}" do
353 | before :all do
354 | @property = @model.property(:id, Integer, options)
355 |
356 | @response = capture_log(DataObjects::Postgres) { @model.auto_migrate! }
357 | end
358 |
359 | it 'should return true' do
360 | @response.should be(true)
361 | end
362 |
363 | it "should create a #{statement} column" do
364 | @output[-2].should == "CREATE TABLE \"blog_articles\" (\"id\" #{statement} NOT NULL, PRIMARY KEY(\"id\"))"
365 | end
366 |
367 | options.only(:min, :max).each do |key, value|
368 | it "should allow the #{key} value #{value} to be stored" do
369 | pending_if "#{value} causes problem with JRuby 1.5.2 parser", RUBY_PLATFORM =~ /java/ && value == -9223372036854775808 do
370 | lambda {
371 | resource = @model.create(@property => value)
372 | @model.first(@property => value).should eql(resource)
373 | }.should_not raise_error
374 | end
375 | end
376 | end
377 | end
378 | end
379 | end
380 |
381 | describe 'Serial property' do
382 | [
383 | [ 1, 'SERIAL' ],
384 | [ 2147483647, 'SERIAL' ],
385 | [ 2147483648, 'BIGSERIAL' ],
386 | [ 9223372036854775807, 'BIGSERIAL' ],
387 |
388 | [ nil, 'SERIAL' ],
389 | ].each do |max, statement|
390 | options = {}
391 | options[:max] = max if max
392 |
393 | describe "with a max of #{max}" do
394 | before :all do
395 | @property = @model.property(:id, DataMapper::Property::Serial, options)
396 |
397 | @response = capture_log(DataObjects::Postgres) { @model.auto_migrate! }
398 | end
399 |
400 | it 'should return true' do
401 | @response.should be(true)
402 | end
403 |
404 | it "should create a #{statement} column" do
405 | @output[-2].should == "CREATE TABLE \"blog_articles\" (\"id\" #{statement} NOT NULL, PRIMARY KEY(\"id\"))"
406 | end
407 |
408 | options.only(:min, :max).each do |key, value|
409 | it "should allow the #{key} value #{value} to be stored" do
410 | lambda {
411 | resource = @model.create(@property => value)
412 | @model.first(@property => value).should eql(resource)
413 | }.should_not raise_error
414 | end
415 | end
416 | end
417 | end
418 | end
419 |
420 | describe 'String property' do
421 | before :all do
422 | @model.property(:id, DataMapper::Property::Serial)
423 | end
424 |
425 | [
426 | [ 1, 'VARCHAR(1)' ],
427 | [ 50, 'VARCHAR(50)' ],
428 | [ 255, 'VARCHAR(255)' ],
429 | [ nil, 'VARCHAR(50)' ],
430 | ].each do |length, statement|
431 | options = {}
432 | options[:length] = length if length
433 |
434 | describe "with a length of #{length}" do
435 | before :all do
436 | @property = @model.property(:title, String, options)
437 |
438 | @response = capture_log(DataObjects::Postgres) { @model.auto_migrate! }
439 | end
440 |
441 | it 'should return true' do
442 | @response.should be(true)
443 | end
444 |
445 | it "should create a #{statement} column" do
446 | @output[-2].should == "CREATE TABLE \"blog_articles\" (\"id\" SERIAL NOT NULL, \"title\" #{statement}, PRIMARY KEY(\"id\"))"
447 | end
448 | end
449 | end
450 | end
451 |
452 | describe 'NumericString property' do
453 | before :all do
454 | @model.property(:id, DataMapper::Property::Serial)
455 | @model.property(:number, DataMapper::Property::NumericString)
456 |
457 | @response = capture_log(DataObjects::Postgres) { @model.auto_migrate! }
458 | end
459 |
460 | it "should create a VARCHAR(50) column with a default of '0'" do
461 | @output[-2].should == "CREATE TABLE \"blog_articles\" (\"id\" SERIAL NOT NULL, \"number\" VARCHAR(50) DEFAULT '0', PRIMARY KEY(\"id\"))"
462 | end
463 | end
464 | end
465 | end
466 |
467 | supported_by :sqlserver do
468 | before :all do
469 | module ::Blog
470 | class Article
471 | include DataMapper::Resource
472 | end
473 | end
474 |
475 | @model = ::Blog::Article
476 | end
477 |
478 | describe '#auto_migrate' do
479 | describe 'Integer property' do
480 | [
481 | [ 0, 1, 'TINYINT' ],
482 | [ 0, 255, 'TINYINT' ],
483 | [ 0, 256, 'SMALLINT' ],
484 | [ 0, 32767, 'SMALLINT' ],
485 | [ 0, 32768, 'INT' ],
486 | [ 0, 2147483647, 'INT' ],
487 | [ 0, 2147483648, 'BIGINT' ],
488 | [ 0, 9223372036854775807, 'BIGINT' ],
489 |
490 | [ -1, 1, 'SMALLINT' ],
491 | [ -1, 255, 'SMALLINT' ],
492 | [ -1, 256, 'SMALLINT' ],
493 | [ -1, 32767, 'SMALLINT' ],
494 | [ -1, 32768, 'INT' ],
495 | [ -1, 2147483647, 'INT' ],
496 | [ -1, 2147483648, 'BIGINT' ],
497 | [ -1, 9223372036854775807, 'BIGINT' ],
498 |
499 | [ -1, 0, 'SMALLINT' ],
500 | [ -32768, 0, 'SMALLINT' ],
501 | [ -32769, 0, 'INT' ],
502 | [ -2147483648, 0, 'INT' ],
503 | [ -2147483649, 0, 'BIGINT' ],
504 | [ -9223372036854775808, 0, 'BIGINT' ],
505 |
506 | [ nil, 2147483647, 'INT' ],
507 | [ 0, nil, 'INT' ],
508 | [ nil, nil, 'INTEGER' ],
509 | ].each do |min, max, statement|
510 | options = { :key => true }
511 | options[:min] = min if min
512 | options[:max] = max if max
513 |
514 | describe "with a min of #{min} and a max of #{max}" do
515 | before :all do
516 | @property = @model.property(:id, Integer, options)
517 |
518 | @response = capture_log(DataObjects::Sqlserver) { @model.auto_migrate! }
519 | end
520 |
521 | it 'should return true' do
522 | @response.should be(true)
523 | end
524 |
525 | it "should create a #{statement} column" do
526 | @output.last.should == "CREATE TABLE \"blog_articles\" (\"id\" #{statement} NOT NULL, PRIMARY KEY(\"id\"))"
527 | end
528 |
529 | options.only(:min, :max).each do |key, value|
530 | it "should allow the #{key} value #{value} to be stored" do
531 | pending_if "#{value} causes problem with JRuby 1.5.2 parser", RUBY_PLATFORM =~ /java/ && value == -9223372036854775808 do
532 | lambda {
533 | resource = @model.create(@property => value)
534 | @model.first(@property => value).should eql(resource)
535 | }.should_not raise_error
536 | end
537 | end
538 | end
539 | end
540 | end
541 | end
542 |
543 | describe 'String property' do
544 | it 'needs specs'
545 | end
546 | end
547 | end
548 |
549 | end
550 |
--------------------------------------------------------------------------------