├── test ├── goldens-unit │ └── postgresql │ │ ├── complete-migration.txt │ │ ├── partial-migration.txt │ │ ├── partial-migration-avoids-shorter-path.txt │ │ ├── migration-with-shorter-path.txt │ │ └── basic-migration.txt ├── goldens-integration │ └── postgresql │ │ ├── migrate-from-empty.txt │ │ ├── migrate-with-v1-person.txt │ │ ├── migrations-are-idempotent.txt │ │ ├── migrate-with-default-colorblind.txt │ │ └── migrate-from-sex-to-gender.txt ├── unit │ ├── Main.hs │ ├── Utils │ │ └── Backends.hs │ ├── Migration.hs │ └── Property.hs ├── integration │ ├── Utils │ │ ├── RunSql.hs │ │ └── Backends.hs │ ├── Main.hs │ ├── Migration.hs │ └── Property.hs └── utils │ └── Utils │ ├── Goldens.hs │ └── QuickCheck.hs ├── .gitignore ├── stack.yaml ├── scripts ├── install-stack-deps.sh ├── hlint.sh ├── install-system-deps.sh └── stylish-haskell.sh ├── stack.yaml.lock ├── src └── Database │ └── Persist │ ├── Migration │ ├── Backend.hs │ ├── Utils │ │ ├── Plan.hs │ │ └── Sql.hs │ ├── Operation │ │ └── Types.hs │ ├── Operation.hs │ ├── Postgres.hs │ └── Core.hs │ └── Migration.hs ├── .stylish-haskell.yaml ├── DEVELOPER.md ├── LICENSE ├── CHANGELOG.md ├── package.yaml ├── .circleci └── config.yml └── README.md /test/goldens-unit/postgresql/complete-migration.txt: -------------------------------------------------------------------------------- 1 | 2 | [] 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .stack-work/ 3 | persistent-migration.cabal 4 | -------------------------------------------------------------------------------- /test/goldens-unit/postgresql/partial-migration.txt: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS "person" 2 | 3 | [] 4 | -------------------------------------------------------------------------------- /stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-18.11 2 | install-ghc: true 3 | 4 | packages: 5 | - . 6 | 7 | ghc-options: 8 | '$locals': -Werror 9 | -------------------------------------------------------------------------------- /test/goldens-unit/postgresql/partial-migration-avoids-shorter-path.txt: -------------------------------------------------------------------------------- 1 | ALTER TABLE "person" ADD COLUMN "gender" VARCHAR 2 | 3 | [] 4 | -------------------------------------------------------------------------------- /test/goldens-unit/postgresql/migration-with-shorter-path.txt: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS "person" ( "id" BIGSERIAL NOT NULL ,"gender" VARCHAR,PRIMARY KEY ( "id" ) ) 2 | 3 | [] 4 | -------------------------------------------------------------------------------- /scripts/install-stack-deps.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Install third-party stack dependencies. 4 | 5 | set -eo pipefail 6 | 7 | stack build -j1 --test --only-dependencies 8 | stack install -j1 hlint stylish-haskell 9 | -------------------------------------------------------------------------------- /scripts/hlint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Runs HLint and errors if any hints are found. 4 | 5 | set -eo pipefail 6 | 7 | builtin cd "$(dirname "${BASH_SOURCE[0]}")/.." 8 | 9 | stack exec -- ~/.local/bin/hlint . 10 | -------------------------------------------------------------------------------- /test/goldens-integration/postgresql/migrate-from-empty.txt: -------------------------------------------------------------------------------- 1 | - hometown: 1 2 | gender: Female 3 | name: Alice 4 | colorblind: false 5 | - hometown: 1 6 | gender: Male 7 | name: Bob 8 | colorblind: true 9 | - hometown: 1 10 | gender: null 11 | name: Courtney 12 | colorblind: false 13 | -------------------------------------------------------------------------------- /test/goldens-integration/postgresql/migrate-with-v1-person.txt: -------------------------------------------------------------------------------- 1 | - hometown: 1 2 | gender: null 3 | name: David 4 | colorblind: false 5 | - hometown: 1 6 | gender: Female 7 | name: Alice 8 | colorblind: false 9 | - hometown: 1 10 | gender: Male 11 | name: Bob 12 | colorblind: true 13 | - hometown: 1 14 | gender: null 15 | name: Courtney 16 | colorblind: false 17 | -------------------------------------------------------------------------------- /test/goldens-integration/postgresql/migrations-are-idempotent.txt: -------------------------------------------------------------------------------- 1 | - hometown: 1 2 | gender: null 3 | name: David 4 | colorblind: true 5 | - hometown: 1 6 | gender: Female 7 | name: Alice 8 | colorblind: false 9 | - hometown: 1 10 | gender: Male 11 | name: Bob 12 | colorblind: true 13 | - hometown: 1 14 | gender: null 15 | name: Courtney 16 | colorblind: false 17 | -------------------------------------------------------------------------------- /test/goldens-integration/postgresql/migrate-with-default-colorblind.txt: -------------------------------------------------------------------------------- 1 | - hometown: 1 2 | gender: null 3 | name: David 4 | colorblind: false 5 | - hometown: 1 6 | gender: Female 7 | name: Alice 8 | colorblind: false 9 | - hometown: 1 10 | gender: Male 11 | name: Bob 12 | colorblind: true 13 | - hometown: 1 14 | gender: null 15 | name: Courtney 16 | colorblind: false 17 | -------------------------------------------------------------------------------- /test/goldens-unit/postgresql/basic-migration.txt: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS "person" ( "id" INT4,"name" VARCHAR NOT NULL,"age" INT4 NOT NULL,"alive" BOOLEAN NOT NULL,"hometown" INT8 REFERENCES "cities" ( "id" ),PRIMARY KEY ( "id" ),CONSTRAINT "unique_name" UNIQUE ( "name" ) ) 2 | ALTER TABLE "person" ADD COLUMN "gender" VARCHAR 3 | ALTER TABLE "person" DROP COLUMN "alive" 4 | DROP TABLE IF EXISTS "person" 5 | 6 | [] 7 | -------------------------------------------------------------------------------- /stack.yaml.lock: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by Stack. 2 | # You should not edit this file by hand. 3 | # For more information, please see the documentation at: 4 | # https://docs.haskellstack.org/en/stable/lock_files 5 | 6 | packages: [] 7 | snapshots: 8 | - completed: 9 | size: 586041 10 | url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/18/11.yaml 11 | sha256: ae30d25a4691c6bb32b5766c617dff24270b2b3576ebd6047d754418e9256389 12 | original: lts-18.11 13 | -------------------------------------------------------------------------------- /test/goldens-integration/postgresql/migrate-from-sex-to-gender.txt: -------------------------------------------------------------------------------- 1 | - hometown: 1 2 | gender: null 3 | name: Foster 4 | colorblind: false 5 | - hometown: 1 6 | gender: Male 7 | name: David 8 | colorblind: false 9 | - hometown: 1 10 | gender: Female 11 | name: Elizabeth 12 | colorblind: false 13 | - hometown: 1 14 | gender: Female 15 | name: Alice 16 | colorblind: false 17 | - hometown: 1 18 | gender: Male 19 | name: Bob 20 | colorblind: true 21 | - hometown: 1 22 | gender: null 23 | name: Courtney 24 | colorblind: false 25 | -------------------------------------------------------------------------------- /scripts/install-system-deps.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Install system dependencies. 4 | 5 | set -eo pipefail 6 | 7 | function is_command() { 8 | type "$1" &> /dev/null 9 | } 10 | 11 | function install_darwin() { 12 | if is_command brew; then 13 | if ! is_command postgres; then 14 | brew install postgresql 15 | fi 16 | fi 17 | } 18 | 19 | function install_linux() { 20 | if is_command yum; then 21 | yum update -y --exclude=filesystem 22 | yum install -y zlib-devel postgresql-devel postgresql-server ncurses-devel 23 | fi 24 | } 25 | 26 | case "$(uname)" in 27 | (Darwin) install_darwin ;; 28 | (Linux) install_linux ;; 29 | esac 30 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration/Backend.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration.Backend 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Defines `MigrateBackend`, the data constructor that each SQL backend will need to implement. 8 | -} 9 | 10 | module Database.Persist.Migration.Backend (MigrateBackend(..)) where 11 | 12 | import Database.Persist.Migration.Operation (Operation) 13 | import Database.Persist.Migration.Utils.Sql (MigrateSql) 14 | import Database.Persist.Sql (SqlPersistT) 15 | 16 | -- | The backend to migrate with. 17 | newtype MigrateBackend = MigrateBackend 18 | { getMigrationSql :: Operation -> SqlPersistT IO [MigrateSql] 19 | } 20 | -------------------------------------------------------------------------------- /test/unit/Main.hs: -------------------------------------------------------------------------------- 1 | import Database.Persist.Migration (MigrateBackend) 2 | import qualified Database.Persist.Migration.Postgres as Postgres 3 | import Test.Tasty 4 | 5 | import Migration (testMigrations) 6 | import Property (testProperties) 7 | import Utils.Goldens (goldenDir) 8 | 9 | unitDir :: String -> FilePath 10 | unitDir = goldenDir "unit" 11 | 12 | main :: IO () 13 | main = defaultMain $ testGroup "persistent-migration-unit" 14 | [ testBackend "postgresql" Postgres.backend 15 | , testProperties 16 | ] 17 | 18 | -- | Build a test suite running unit tests for the given MigrateBackend. 19 | testBackend :: String -> MigrateBackend -> TestTree 20 | testBackend label backend = testGroup label 21 | [ testMigrations (unitDir label) backend 22 | ] 23 | -------------------------------------------------------------------------------- /test/integration/Utils/RunSql.hs: -------------------------------------------------------------------------------- 1 | module Utils.RunSql 2 | ( runSql 3 | , runMigration 4 | ) where 5 | 6 | import Control.Monad.Reader (runReaderT) 7 | import Data.Pool (Pool, withResource) 8 | import Database.Persist.Migration (MigrateBackend, Migration, defaultSettings) 9 | import qualified Database.Persist.Migration.Core as Migration 10 | import Database.Persist.Sql (SqlBackend, SqlPersistT) 11 | 12 | -- | Run the given persistent query. 13 | runSql :: Pool SqlBackend -> SqlPersistT IO a -> IO a 14 | runSql pool = withResource pool . runReaderT 15 | 16 | -- | Run the given migration. 17 | runMigration :: MigrateBackend -> Pool SqlBackend -> Migration -> IO () 18 | runMigration backend pool = runSql pool . Migration.runMigration backend defaultSettings 19 | -------------------------------------------------------------------------------- /.stylish-haskell.yaml: -------------------------------------------------------------------------------- 1 | # stylish-haskell configuration file 2 | # ref: https://github.com/jaspervdj/stylish-haskell/blob/master/data/stylish-haskell.yaml 3 | 4 | steps: 5 | # Align the right hand side of some elements. This is quite conservative 6 | # and only applies to statements where each element occupies a single 7 | # line. 8 | - simple_align: 9 | cases: false 10 | top_level_patterns: false 11 | records: true 12 | 13 | - imports: 14 | align: none 15 | long_list_align: new_line_multiline 16 | list_padding: 4 17 | separate_lists: false 18 | 19 | - language_pragmas: 20 | align: false 21 | remove_redundant: true 22 | 23 | - tabs: 24 | spaces: 8 25 | 26 | - trailing_whitespace: {} 27 | 28 | newline: lf 29 | -------------------------------------------------------------------------------- /DEVELOPER.md: -------------------------------------------------------------------------------- 1 | # Developer README 2 | 3 | ## Setup 4 | 5 | * To set up your system, run `scripts/install-system-deps.sh` 6 | * If stack is not already installed, [install stack](https://docs.haskellstack.org/en/stable/install_and_upgrade/) 7 | * To install stack dependencies, run `scripts/install-stack-deps.sh` 8 | 9 | ## Build 10 | 11 | `stack build` 12 | 13 | ## Run tests 14 | 15 | * `stack test :persistent-migration-test` 16 | * Runs unit tests 17 | * Frameworks: tasty-golden, tasty-quickcheck 18 | * `stack test :persistent-migration-integration` 19 | * Runs integration tests 20 | * Frameworks: tasty-golden, tasty-quickcheck 21 | 22 | To accept goldens changes, run with `--test-arguments '--accept'` 23 | 24 | ## Linting 25 | 26 | * To run hlint: `scripts/hlint.sh` 27 | * To run stylish-haskell: 28 | * `scripts/stylish-haskell.sh` - will error if differences found 29 | * `scripts/stylish-haskell.sh --apply` - will overwrite (inline) with stylish fixes 30 | -------------------------------------------------------------------------------- /test/integration/Main.hs: -------------------------------------------------------------------------------- 1 | import Data.Pool (Pool) 2 | import Database.Persist.Migration (MigrateBackend) 3 | import qualified Database.Persist.Migration.Postgres as Postgres 4 | import Database.Persist.Sql (SqlBackend) 5 | import System.IO.Temp (withTempDirectory) 6 | import Test.Tasty 7 | 8 | import Migration (testMigrations) 9 | import Property (testProperties) 10 | import Utils.Backends (withPostgres) 11 | import Utils.Goldens (goldenDir) 12 | 13 | integrationDir :: String -> FilePath 14 | integrationDir = goldenDir "integration" 15 | 16 | main :: IO () 17 | main = withTempDirectory "/tmp" "persistent-migration-integration" $ \dir -> 18 | defaultMain $ testGroup "persistent-migration-integration" 19 | [ withPostgres dir $ testBackend "postgresql" Postgres.backend 20 | ] 21 | 22 | -- | Build a test suite running integration tests for the given MigrateBackend. 23 | testBackend :: String -> MigrateBackend -> IO (Pool SqlBackend) -> TestTree 24 | testBackend label backend getPool = testGroup label 25 | [ testMigrations (integrationDir label) backend getPool 26 | , testProperties backend getPool 27 | ] 28 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration/Utils/Plan.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration.Utils.Plan 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Define functions useful for compiling a plan of migration. 8 | -} 9 | {-# LANGUAGE TupleSections #-} 10 | 11 | module Database.Persist.Migration.Utils.Plan 12 | ( getPath 13 | ) where 14 | 15 | import Data.Graph.Inductive (Gr, mkGraph, sp) 16 | import Data.HashMap.Lazy ((!)) 17 | import qualified Data.HashMap.Lazy as HashMap 18 | import qualified Data.IntSet as IntSet 19 | 20 | type Node = Int 21 | type Edge = (Int, Int) 22 | 23 | -- | Given a list of edges and their data and a start/end node, return the shortest path. 24 | -- 25 | -- Errors if no path is found. 26 | getPath :: [(Edge, a)] -> Node -> Node -> Maybe [a] 27 | getPath edgeData start end = map (edgeMap !) . nodesToEdges <$> sp start end graph 28 | where 29 | graph = mkGraph' $ map fst edgeData 30 | nodesToEdges nodes = zip nodes $ tail nodes 31 | edgeMap = HashMap.fromList edgeData 32 | 33 | mkGraph' :: [Edge] -> Gr () Int 34 | mkGraph' edgeData = mkGraph nodes edges 35 | where 36 | detuple (a, b) = [a, b] 37 | nodes = map (, ()) . IntSet.toList . IntSet.fromList . concatMap detuple $ edgeData 38 | edges = map (uncurry (,,1)) edgeData 39 | -------------------------------------------------------------------------------- /scripts/stylish-haskell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Runs stylish-haskell on all the Haskell files in the project. If --apply is 4 | # passed, overwrites the files with the styled output. Otherwise, errors if 5 | # differences are detected. 6 | 7 | set -eo pipefail 8 | 9 | builtin cd "$(dirname "${BASH_SOURCE[0]}")/.." 10 | 11 | STYLISH_APPLY=0 12 | 13 | for arg in "$@"; do 14 | case "$arg" in 15 | (--apply) STYLISH_APPLY=1 ;; 16 | esac 17 | done 18 | 19 | function get_files() { 20 | find . -path ./.stack-work -prune -o -name "*.hs" -print 21 | } 22 | 23 | function diff_no_fail() { 24 | diff "$@" || true 25 | } 26 | 27 | function check_file_empty() { 28 | if [[ -n "$(cat $1)" ]]; then 29 | return 1 30 | fi 31 | } 32 | 33 | RUN_STYLISH=~/.local/bin/stylish-haskell 34 | if [[ ! -f "${RUN_STYLISH}" ]]; then 35 | echo "stylish-haskell not built" 36 | exit 1 37 | fi 38 | 39 | if [[ "$STYLISH_APPLY" == 1 ]]; then 40 | get_files | xargs stack exec -- stylish-haskell --inplace 41 | else 42 | trap 'rm -rf .tmp' 0 43 | get_files | while read FILE; do 44 | mkdir -p ".tmp/$(dirname "$FILE")" 45 | stack exec -- "${RUN_STYLISH}" "$FILE" | diff_no_fail --unified "$FILE" - > .tmp/"$FILE" 46 | done 47 | find .tmp -type f | xargs cat | tee .tmp/diffs.txt 48 | check_file_empty .tmp/diffs.txt 49 | fi 50 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2018 Brandon Chinn 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## persistent-migration 0.3.0 2 | 3 | Fixes: 4 | * Don't insert into `persistent_migration` table if no migrations are running ([#72](https://github.com/brandonchinn178/persistent-migration/pull/72)) 5 | * Fix `SERIAL` type for INT64 columns ([#79](https://github.com/brandonchinn178/persistent-migration/pull/79)) 6 | 7 | ## persistent-migration 0.2.1 8 | 9 | Fixes: 10 | * Fix build for persistent-2.12.0.0 11 | 12 | ## persistent-migration 0.2.0 13 | 14 | Breaking changes: 15 | * Moved some types out of `Database.Persist.Migration.Operation` and into `Database.Persist.Migration.Core` 16 | 17 | Fixes: 18 | * Fix for GHC 8.8 19 | 20 | Other changes: 21 | * Re-export `rawSql` in `Database.Persist.Migration` 22 | 23 | ## persistent-migration 0.1.0 24 | 25 | Breaking changes: 26 | * Remove prefixes from operations (#31) 27 | * Refactored module structure (#34) 28 | * New migration format with batched operations (#36) 29 | * `Operation` is now a sum type instead of a newtype wrapper around `Migrateable` (#58) 30 | * Interpolation now done with `MigrateSql`, e.g. in `RawOperation` (#62) 31 | * Fix bug in `CreateTable` having multiple `Unique` constraints (#63) 32 | 33 | Other changes: 34 | * Add new operations: RenameTable, AddConstraint, DropConstraint (#33) 35 | * Use hpack (#42) 36 | * Allow specifying defaults in columns (#52) 37 | * Add new operation: RenameColumn (#55) 38 | 39 | ## persistent-migration 0.0.2 40 | 41 | * Generalize `hasMigration` and `checkMigration` to `MonadIO` 42 | 43 | ## persistent-migration 0.0.1 44 | 45 | * Initial implementation 46 | -------------------------------------------------------------------------------- /test/utils/Utils/Goldens.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE LambdaCase #-} 2 | 3 | module Utils.Goldens 4 | ( goldenDir 5 | , goldenVsString 6 | , goldenVsText 7 | , goldenVsShow 8 | ) where 9 | 10 | import Data.ByteString.Lazy (ByteString) 11 | import Data.Char (toLower) 12 | import Data.Text (Text) 13 | import qualified Data.Text as Text 14 | import Data.Text.Lazy (fromStrict) 15 | import qualified Data.Text.Lazy.Encoding as Text 16 | import Test.Tasty (TestTree) 17 | import qualified Test.Tasty.Golden as Golden 18 | 19 | {- Golden files -} 20 | 21 | -- | Get the directory of the golden files for the given test type and label. 22 | goldenDir :: String -> String -> FilePath 23 | goldenDir testType label = "test/goldens-" ++ testType ++ "/" ++ label ++ "/" 24 | 25 | {- Golden test -} 26 | 27 | -- | Run a goldens test where the goldens file is generated from the name. 28 | goldenVsString :: FilePath -> String -> IO ByteString -> TestTree 29 | goldenVsString dir name = Golden.goldenVsString name goldenFile 30 | where 31 | goldenFile = dir ++ map slugify name ++ ".txt" 32 | slugify = \case 33 | ' ' -> '-' 34 | x -> toLower x 35 | 36 | -- | Run a goldens test against a Text. 37 | goldenVsText :: FilePath -> String -> IO Text -> TestTree 38 | goldenVsText dir name = goldenVsString dir name . fmap toByteString 39 | where 40 | toByteString = Text.encodeUtf8 . fromStrict 41 | 42 | -- | Run a goldens test against a Showable value. 43 | goldenVsShow :: Show a => FilePath -> String -> IO a -> TestTree 44 | goldenVsShow dir name = goldenVsText dir name . fmap (Text.pack . show) 45 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Defines a migration framework for the persistent library. 8 | -} 9 | 10 | module Database.Persist.Migration 11 | ( hasMigration 12 | , checkMigration 13 | -- * Re-exports 14 | , module Database.Persist.Migration.Backend 15 | , module Database.Persist.Migration.Core 16 | , module Database.Persist.Migration.Operation 17 | , module Database.Persist.Migration.Operation.Types 18 | , module Database.Persist.Migration.Utils.Sql 19 | , PersistValue(..) 20 | , SqlType(..) 21 | , rawSql 22 | ) where 23 | 24 | import Control.Monad (unless) 25 | import Control.Monad.IO.Class (MonadIO) 26 | import qualified Data.Text as Text 27 | import Database.Persist.Migration.Backend 28 | import Database.Persist.Migration.Core hiding (getMigration, runMigration) 29 | import Database.Persist.Migration.Operation 30 | import Database.Persist.Migration.Operation.Types 31 | import Database.Persist.Migration.Utils.Sql 32 | import Database.Persist.Sql (PersistValue(..), SqlType(..), rawSql) 33 | import qualified Database.Persist.Sql as Persistent 34 | 35 | -- | True if the persistent library detects more migrations unaccounted for. 36 | hasMigration :: MonadIO m => Persistent.Migration -> Persistent.SqlPersistT m Bool 37 | hasMigration = fmap (not . null) . Persistent.showMigration 38 | 39 | -- | Fails if the persistent library detects more migrations unaccounted for. 40 | checkMigration :: MonadIO m => Persistent.Migration -> Persistent.SqlPersistT m () 41 | checkMigration migration = do 42 | migrationText <- Persistent.showMigration migration 43 | unless (null migrationText) $ error $ 44 | unlines $ "More migrations detected:" : bullets migrationText 45 | where 46 | bullets = map ((" * " ++ ) . Text.unpack) 47 | -------------------------------------------------------------------------------- /package.yaml: -------------------------------------------------------------------------------- 1 | name: persistent-migration 2 | version: 0.3.0 3 | license: BSD3 4 | author: Brandon Chinn 5 | maintainer: Brandon Chinn 6 | category: Database 7 | synopsis: Manual migrations for the persistent library 8 | description: Manual migrations for the persistent library. 9 | extra-doc-files: 10 | - CHANGELOG.md 11 | - README.md 12 | 13 | github: brandonchinn178/persistent-migration 14 | 15 | ghc-options: -Wall 16 | when: 17 | - condition: impl(ghc >= 8.0) 18 | ghc-options: 19 | - -Wcompat 20 | - -Wincomplete-record-updates 21 | - -Wincomplete-uni-patterns 22 | - -Wnoncanonical-monad-instances 23 | - condition: impl(ghc < 8.8) 24 | ghc-options: 25 | - -Wnoncanonical-monadfail-instances 26 | 27 | library: 28 | source-dirs: src 29 | dependencies: 30 | base: '>= 4.7 && < 5' 31 | containers: 32 | fgl: 33 | persistent: 34 | mtl: 35 | text: 36 | time: 37 | unordered-containers: 38 | 39 | _test-aliases: 40 | - &test-utils-deps 41 | base: 42 | bytestring: 43 | persistent: 44 | persistent-migration: 45 | QuickCheck: 46 | tasty: 47 | tasty-golden: 48 | tasty-quickcheck: 49 | text: 50 | time: 51 | 52 | tests: 53 | persistent-migration-test: 54 | main: Main.hs 55 | source-dirs: 56 | - test/unit 57 | - test/utils 58 | dependencies: 59 | <<: *test-utils-deps 60 | conduit: 61 | containers: 62 | mtl: 63 | 64 | persistent-migration-integration: 65 | main: Main.hs 66 | source-dirs: 67 | - test/integration 68 | - test/utils 69 | dependencies: 70 | <<: *test-utils-deps 71 | exceptions: 72 | monad-logger: 73 | mtl: 74 | process: 75 | persistent-postgresql: 76 | persistent-template: 77 | resource-pool: 78 | temporary: 79 | yaml: 80 | -------------------------------------------------------------------------------- /test/integration/Utils/Backends.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | 4 | module Utils.Backends 5 | ( withPostgres 6 | ) where 7 | 8 | import Control.Concurrent (threadDelay) 9 | import Control.Monad.Logger (runNoLoggingT) 10 | import qualified Data.ByteString.Char8 as ByteString 11 | #if !MIN_VERSION_base(4,11,0) 12 | import Data.Monoid ((<>)) 13 | #endif 14 | import Data.Pool (Pool, destroyAllResources) 15 | import qualified Data.Text as Text 16 | import qualified Data.Text.IO as Text 17 | import Database.Persist.Postgresql (createPostgresqlPool) 18 | import Database.Persist.Sql (SqlBackend) 19 | import System.Exit (ExitCode(..), exitWith) 20 | import System.IO (hPutStrLn, stderr) 21 | import System.Process (readProcessWithExitCode) 22 | import Test.Tasty (TestTree, withResource) 23 | 24 | -- | Run a function with the PostgreSQL backend. 25 | withPostgres :: FilePath -> (IO (Pool SqlBackend) -> TestTree) -> TestTree 26 | withPostgres dir = withResource startPostgres stopPostgres 27 | where 28 | dir' = dir ++ "/postgresql/" 29 | -- running postgres 30 | startPostgres = do 31 | -- initialize local postgres server 32 | callProcess' "pg_ctl" ["-D", dir', "init"] 33 | -- modify configuration 34 | let confFile = dir' ++ "postgresql.conf" 35 | conf <- Text.readFile confFile 36 | Text.writeFile confFile . Text.unlines . modifyConf . Text.lines $ conf 37 | -- start postgres server 38 | callProcess' "pg_ctl" 39 | [ "-D", dir' 40 | , "-l", dir' ++ "postgres.log" 41 | , "-o", "-h '' -k '" ++ dir' ++ "'" 42 | , "start" 43 | ] 44 | threadDelay 1000000 45 | callProcess' "createdb" ["-h", dir', "test_db"] 46 | -- create a connection Pool 47 | let connString = ByteString.pack $ "postgresql:///test_db?host=" ++ dir' 48 | runNoLoggingT $ createPostgresqlPool connString 4 49 | stopPostgres pool = do 50 | callProcess' "pg_ctl" ["-D", dir', "stop", "-m", "fast"] 51 | destroyAllResources pool 52 | -- utilities 53 | callProcess' cmd args = do 54 | (code, out, err) <- readProcessWithExitCode cmd args "" 55 | case code of 56 | ExitSuccess -> return () 57 | ExitFailure _ -> do 58 | hPutStrLn stderr out 59 | hPutStrLn stderr err 60 | exitWith code 61 | modifyConf conf = foldl modifyConf' conf 62 | [ ("client_min_messages", "warning") 63 | , ("log_min_messages", "info") 64 | , ("log_statement", "all") 65 | ] 66 | modifyConf' conf (k, v) = 67 | let setting = Text.unwords [k, "=", v] 68 | in case conf of 69 | [] -> [setting] 70 | line:conf' -> if ("#" <> k) `Text.isPrefixOf` line 71 | then setting : conf' 72 | else modifyConf' conf' (k, v) 73 | -------------------------------------------------------------------------------- /test/unit/Utils/Backends.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | {-# LANGUAGE NamedFieldPuns #-} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | 6 | module Utils.Backends 7 | ( MockDatabase(..) 8 | , defaultDatabase 9 | , setDatabase 10 | , withTestBackend 11 | ) where 12 | 13 | import Control.Monad.IO.Class (liftIO) 14 | import Data.Conduit.List (sourceList) 15 | import Data.IORef (IORef, newIORef, readIORef, writeIORef) 16 | import qualified Data.Map as Map 17 | import Data.Maybe (maybeToList) 18 | import Database.Persist.Migration (Version) 19 | import Database.Persist.Sql (PersistValue(..), Statement(..)) 20 | import Database.Persist.SqlBackend 21 | (MkSqlBackendArgs(..), SqlBackend, mkSqlBackend) 22 | import System.IO.Unsafe (unsafePerformIO) 23 | 24 | {- Mock test database -} 25 | 26 | -- | The mock database backend for testing. 27 | newtype MockDatabase = MockDatabase 28 | { version :: Maybe Version 29 | } 30 | 31 | -- | The default test database. 32 | defaultDatabase :: MockDatabase 33 | defaultDatabase = MockDatabase Nothing 34 | 35 | -- | The global test database. 36 | mockDatabase :: IORef MockDatabase 37 | mockDatabase = unsafePerformIO $ newIORef defaultDatabase 38 | {-# NOINLINE mockDatabase #-} 39 | 40 | -- | Set the test database. 41 | setDatabase :: MockDatabase -> IO () 42 | setDatabase = writeIORef mockDatabase 43 | 44 | {- Mock SqlBackend -} 45 | 46 | -- | Initialize a mock SqlBackend for testing. 47 | withTestBackend :: (SqlBackend -> IO a) -> IO a 48 | withTestBackend action = do 49 | smap <- newIORef Map.empty 50 | action $ mkSqlBackend MkSqlBackendArgs 51 | { connPrepare = \case 52 | "SELECT version FROM persistent_migration ORDER BY timestamp DESC LIMIT 1" -> 53 | return stmt 54 | { stmtQuery = \_ -> do 55 | MockDatabase{version} <- liftIO $ readIORef mockDatabase 56 | let result = pure . PersistInt64 . fromIntegral <$> maybeToList version 57 | return $ sourceList result 58 | } 59 | _ -> return stmt 60 | , connStmtMap = smap 61 | , connInsertSql = error "connInsertSql" 62 | , connClose = error "connClose" 63 | , connMigrateSql = error "connMigrateSql" 64 | , connBegin = error "connBegin" 65 | , connCommit = error "connCommit" 66 | , connRollback = error "connRollback" 67 | , connEscapeFieldName = error "connEscapeFieldName" 68 | , connEscapeTableName = error "connEscapeTableName" 69 | , connEscapeRawName = error "connEscapeRawName" 70 | , connNoLimit = error "connNoLimit" 71 | , connRDBMS = error "connRDBMS" 72 | , connLimitOffset = error "connLimitOffset" 73 | , connLogFunc = \_ _ _ _ -> return () 74 | } 75 | where 76 | stmt = Statement 77 | { stmtFinalize = return () 78 | , stmtReset = return () 79 | , stmtExecute = \_ -> return 0 80 | , stmtQuery = error "stmtQuery" 81 | } 82 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration/Utils/Sql.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration.Utils.Sql 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Defines helper functions for writing SQL queries. 8 | -} 9 | {-# LANGUAGE CPP #-} 10 | {-# LANGUAGE OverloadedStrings #-} 11 | {-# LANGUAGE RecordWildCards #-} 12 | 13 | module Database.Persist.Migration.Utils.Sql 14 | ( commas 15 | , uncommas 16 | , uncommas' 17 | , quote 18 | , MigrateSql(..) 19 | , executeSql 20 | , pureSql 21 | , mapSql 22 | , concatSql 23 | ) where 24 | 25 | import Control.Monad.IO.Class (MonadIO(..)) 26 | #if !MIN_VERSION_base(4,11,0) 27 | import Data.Monoid ((<>)) 28 | #endif 29 | import Data.Text (Text) 30 | import qualified Data.Text as Text 31 | import Database.Persist.Sql (PersistValue(..), SqlPersistT) 32 | import qualified Database.Persist.Sql as Persist 33 | 34 | -- | Split the given line by commas, ignoring commas within parentheses. 35 | -- 36 | -- > commas "a,b,c" == ["a", "b", "c"] 37 | -- > commas "a,b,c (d,e),z" == ["a", "b", "c (d,e)", "z"] 38 | -- > commas "a,b,c (d,e,(f,g)),z" == ["a", "b", "c (d,e,(f,g))", "z"] 39 | commas :: Text -> [Text] 40 | commas t = go (Text.unpack t) "" [] (0 :: Int) 41 | where 42 | go src buffer result level = 43 | let result' = result ++ [Text.pack buffer] 44 | in case src of 45 | "" -> result' 46 | ',':xs | level == 0 -> go xs "" result' level 47 | '(':xs -> go xs (buffer ++ "(") result (level + 1) 48 | ')':xs -> go xs (buffer ++ ")") result (max 0 $ level - 1) 49 | x:xs -> go xs (buffer ++ [x]) result level 50 | 51 | -- | Join the given Text with commas separating each item. 52 | uncommas :: [Text] -> Text 53 | uncommas = Text.intercalate "," 54 | 55 | -- | Join the given Text with commas separating each item and quoting them. 56 | uncommas' :: [Text] -> Text 57 | uncommas' = uncommas . map quote 58 | 59 | -- | Quote the given Text. 60 | quote :: Text -> Text 61 | quote t = "\"" <> t <> "\"" 62 | 63 | -- | A SQL query (with placeholders) and values to replace those placeholders. 64 | data MigrateSql = MigrateSql 65 | { sqlText :: Text 66 | , sqlVals :: [PersistValue] 67 | } deriving (Show) 68 | 69 | -- | Execute a SQL query. 70 | executeSql :: MonadIO m => MigrateSql -> SqlPersistT m () 71 | executeSql MigrateSql{..} = Persist.rawExecute sqlText sqlVals 72 | 73 | -- | Create a MigrateSql from the given Text. 74 | pureSql :: Text -> MigrateSql 75 | pureSql sql = MigrateSql sql [] 76 | 77 | -- | Map the SQL text with the given function. 78 | mapSql :: (Text -> Text) -> MigrateSql -> MigrateSql 79 | mapSql f sql = sql { sqlText = f $ sqlText sql } 80 | 81 | -- | Concatenate the given MigrateSql queries with the given combining function. 82 | concatSql :: ([Text] -> Text) -> [MigrateSql] -> MigrateSql 83 | concatSql f queries = MigrateSql 84 | { sqlText = f $ map sqlText queries 85 | , sqlVals = concatMap sqlVals queries 86 | } 87 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration/Operation/Types.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration.Operation.Types 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Defines auxiliary data types that can be used in Operations. 8 | -} 9 | {-# LANGUAGE DuplicateRecordFields #-} 10 | {-# LANGUAGE LambdaCase #-} 11 | {-# LANGUAGE OverloadedStrings #-} 12 | {-# LANGUAGE RecordWildCards #-} 13 | 14 | module Database.Persist.Migration.Operation.Types 15 | ( ColumnIdentifier 16 | , dotted 17 | , Column(..) 18 | , validateColumn 19 | , ColumnProp(..) 20 | , TableConstraint(..) 21 | , isPrimaryKey 22 | , getConstraintColumns 23 | ) where 24 | 25 | import Control.Monad (when) 26 | import Data.List (nub) 27 | import Data.Text (Text) 28 | import qualified Data.Text as Text 29 | import Database.Persist.Sql (PersistValue(..)) 30 | import Database.Persist.Types (SqlType) 31 | 32 | -- | A column identifier, table.column 33 | type ColumnIdentifier = (Text, Text) 34 | 35 | -- | Make a ColumnIdentifier displayable. 36 | dotted :: ColumnIdentifier -> Text 37 | dotted (tab, col) = Text.concat [tab, ".", col] 38 | 39 | -- | The definition for a Column in a SQL database. 40 | data Column = Column 41 | { colName :: Text 42 | , colType :: SqlType 43 | , colProps :: [ColumnProp] 44 | } deriving (Show) 45 | 46 | -- | Validate a Column. 47 | validateColumn :: Column -> Either String () 48 | validateColumn col@Column{..} = when (hasDuplicates $ map getColumnPropName colProps) $ 49 | Left $ "Duplicate column properties detected: " ++ show col 50 | where 51 | hasDuplicates l = length l /= length (nub l) 52 | 53 | getColumnPropName :: ColumnProp -> String 54 | getColumnPropName = \case 55 | NotNull{} -> "NotNull" 56 | References{} -> "References" 57 | AutoIncrement{} -> "AutoIncrement" 58 | Default{} -> "Default" 59 | 60 | -- | A property for a 'Column'. 61 | data ColumnProp 62 | = NotNull 63 | -- ^ Makes a column non-nullable (defaults to nullable) 64 | | References ColumnIdentifier 65 | -- ^ Mark this column as a foreign key to the given column 66 | | AutoIncrement 67 | -- ^ Makes a column auto-incrementing 68 | | Default PersistValue 69 | -- ^ Sets the default value for the column. Note that this doesn't matter when inserting 70 | -- data via Haskell; this property only sets the schema in the SQL backend. 71 | -- 72 | -- See 'AddColumn' for setting the default value for existing rows in a migration. 73 | -- 74 | -- More info: https://www.yesodweb.com/book/persistent#persistent_attributes 75 | deriving (Show,Eq) 76 | 77 | -- | Table constraints in a CREATE query. 78 | data TableConstraint 79 | = PrimaryKey [Text] -- ^ PRIMARY KEY (col1, col2, ...) 80 | | Unique Text [Text] -- ^ CONSTRAINT name UNIQUE (col1, col2, ...) 81 | deriving (Show) 82 | 83 | isPrimaryKey :: TableConstraint -> Bool 84 | isPrimaryKey = \case 85 | PrimaryKey{} -> True 86 | _ -> False 87 | 88 | -- | Get the columns defined in the given TableConstraint. 89 | getConstraintColumns :: TableConstraint -> [Text] 90 | getConstraintColumns = \case 91 | PrimaryKey cols -> cols 92 | Unique _ cols -> cols 93 | -------------------------------------------------------------------------------- /test/unit/Migration.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE RecordWildCards #-} 3 | 4 | module Migration (testMigrations) where 5 | 6 | import Control.Monad.Reader (runReaderT) 7 | import qualified Data.Text as Text 8 | import Database.Persist.Migration 9 | import Database.Persist.Migration.Core (getMigration) 10 | import Test.Tasty (TestName, TestTree, testGroup) 11 | 12 | import Utils.Backends 13 | (MockDatabase(..), defaultDatabase, setDatabase, withTestBackend) 14 | import Utils.Goldens (goldenVsText) 15 | 16 | -- | Build a test suite for the given MigrateBackend. 17 | testMigrations :: FilePath -> MigrateBackend -> TestTree 18 | testMigrations dir backend = testGroup "goldens" 19 | [ goldenMigration' "Basic migration" defaultDatabase 20 | [ 0 ~> 1 := 21 | [ CreateTable 22 | { name = "person" 23 | , schema = 24 | [ Column "id" SqlInt32 [] 25 | , Column "name" SqlString [NotNull] 26 | , Column "age" SqlInt32 [NotNull] 27 | , Column "alive" SqlBool [NotNull] 28 | , Column "hometown" SqlInt64 [References ("cities", "id")] 29 | ] 30 | , constraints = 31 | [ PrimaryKey ["id"] 32 | , Unique "unique_name" ["name"] 33 | ] 34 | } 35 | ] 36 | , 1 ~> 2 := 37 | [ AddColumn "person" (Column "gender" SqlString []) Nothing 38 | , DropColumn ("person", "alive") 39 | ] 40 | , 2 ~> 3 := 41 | [ DropTable "person" 42 | ] 43 | ] 44 | , goldenMigration' "Partial migration" (withVersion 1) 45 | [ 0 ~> 1 := [createTablePerson []] 46 | , 1 ~> 2 := [DropTable "person"] 47 | ] 48 | , goldenMigration' "Complete migration" (withVersion 2) 49 | [ 0 ~> 1 := [createTablePerson []] 50 | , 1 ~> 2 := [DropTable "person"] 51 | ] 52 | , goldenMigration' "Migration with shorter path" defaultDatabase 53 | [ 0 ~> 1 := [createTablePerson []] 54 | , 1 ~> 2 := [AddColumn "person" (Column "gender" SqlString []) Nothing] 55 | , 0 ~> 2 := [createTablePerson [Column "gender" SqlString []]] 56 | ] 57 | , goldenMigration' "Partial migration avoids shorter path" (withVersion 1) 58 | [ 0 ~> 1 := [createTablePerson []] 59 | , 1 ~> 2 := [AddColumn "person" (Column "gender" SqlString []) Nothing] 60 | , 0 ~> 2 := [createTablePerson [Column "gender" SqlString []]] 61 | ] 62 | ] 63 | where 64 | goldenMigration' = goldenMigration dir backend 65 | 66 | {- Helpers -} 67 | 68 | -- | Run a goldens test for a migration. 69 | goldenMigration 70 | :: FilePath -> MigrateBackend -> TestName -> MockDatabase -> Migration -> TestTree 71 | goldenMigration dir backend testName testBackend migration = goldenVsText dir testName $ do 72 | setDatabase testBackend 73 | MigrateSql{..} <- concatSql Text.unlines <$> getMigration' migration 74 | return $ Text.unlines [sqlText, Text.pack $ show sqlVals] 75 | where 76 | getMigration' = withTestBackend . runReaderT . getMigration backend defaultSettings 77 | 78 | -- | Set the version in the MockDatabase. 79 | withVersion :: Version -> MockDatabase 80 | withVersion v = defaultDatabase{version = Just v} 81 | 82 | -- | Create a basic 'person' table. 83 | createTablePerson :: [Column] -> Operation 84 | createTablePerson cols = CreateTable 85 | { name = "person" 86 | , schema = Column "id" SqlInt64 [NotNull, AutoIncrement] : cols 87 | , constraints = [PrimaryKey ["id"]] 88 | } 89 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration/Operation.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration.Operation 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Defines the Operation data types. 8 | -} 9 | {-# LANGUAGE FlexibleInstances #-} 10 | {-# LANGUAGE RecordWildCards #-} 11 | {-# OPTIONS_GHC -fno-warn-orphans #-} 12 | 13 | module Database.Persist.Migration.Operation 14 | ( Operation(..) 15 | , validateOperation 16 | ) where 17 | 18 | import Control.Monad (when) 19 | import Data.List (nub) 20 | import Data.Maybe (isNothing, mapMaybe) 21 | import Data.Text (Text) 22 | import Database.Persist.Migration.Operation.Types 23 | import Database.Persist.Migration.Utils.Sql (MigrateSql) 24 | import Database.Persist.Sql (PersistValue, SqlPersistT) 25 | 26 | -- | An operation that can be migrated. 27 | data Operation 28 | = CreateTable 29 | { name :: Text 30 | , schema :: [Column] 31 | , constraints :: [TableConstraint] 32 | } 33 | | DropTable 34 | { table :: Text 35 | } 36 | | RenameTable 37 | { from :: Text 38 | , to :: Text 39 | } 40 | | AddConstraint 41 | { table :: Text 42 | , constraint :: TableConstraint 43 | } 44 | | DropConstraint 45 | { table :: Text 46 | , constraintName :: Text 47 | } 48 | | AddColumn 49 | { table :: Text 50 | , column :: Column 51 | , colDefault :: Maybe PersistValue 52 | -- ^ The default for existing rows; required if the column is non-nullable 53 | } 54 | | RenameColumn 55 | { table :: Text 56 | , from :: Text 57 | , to :: Text 58 | } 59 | | DropColumn 60 | { columnId :: ColumnIdentifier 61 | } 62 | | RawOperation 63 | { message :: Text 64 | , rawOp :: SqlPersistT IO [MigrateSql] 65 | } 66 | -- ^ A custom operation that can be defined manually. 67 | -- 68 | -- RawOperations should primarily use 'rawSql' and 'rawExecute' from the persistent library. If the 69 | -- operation depends on the backend being run, query 'connRDBMS' from the 'SqlBackend': 70 | -- 71 | -- @ 72 | -- asks connRDBMS >>= \case 73 | -- "sqlite" -> ... 74 | -- _ -> return () 75 | -- @ 76 | deriving (Show) 77 | 78 | instance Show (SqlPersistT m a) where 79 | show _ = "" 80 | 81 | -- | Validate that the given Operation is valid. 82 | validateOperation :: Operation -> Either String () 83 | validateOperation ct@CreateTable{..} = do 84 | when (null schema) $ 85 | fail' "No columns specified in the schema" 86 | 87 | mapM_ validateColumn schema 88 | 89 | case length . filter isPrimaryKey $ constraints of 90 | 0 -> fail' "No primary key specified" 91 | 1 -> return () 92 | _ -> fail' "Multiple primary keys specified" 93 | 94 | let getUniqueName (Unique n _) = Just n 95 | getUniqueName _ = Nothing 96 | uniqueNames = mapMaybe getUniqueName constraints 97 | when (length (nub uniqueNames) /= length uniqueNames) $ 98 | fail' "Multiple unique constraints with the same name detected" 99 | 100 | let constraintCols = concatMap getConstraintColumns constraints 101 | schemaCols = map colName schema 102 | when (any (`notElem` schemaCols) constraintCols) $ 103 | fail' "Table constraint references non-existent column" 104 | where 105 | fail' = Left . (++ ": " ++ show ct) 106 | 107 | validateOperation ac@AddColumn{..} = do 108 | validateColumn column 109 | when (NotNull `elem` colProps column && isNothing colDefault) $ 110 | fail' "Adding a non-nullable column requires a default" 111 | where 112 | fail' = Left . (++ ": " ++ show ac) 113 | 114 | validateOperation _ = return () 115 | -------------------------------------------------------------------------------- /test/unit/Property.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NamedFieldPuns #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | 5 | module Property (testProperties) where 6 | 7 | import Control.Applicative (liftA2) 8 | import Data.Either (isRight) 9 | import Database.Persist.Migration 10 | import Test.QuickCheck 11 | import Test.Tasty (TestTree, testGroup) 12 | import Test.Tasty.QuickCheck (testProperty) 13 | 14 | import Utils.QuickCheck (CreateTable'(..), Identifier(..), mapSome, toOperation) 15 | 16 | -- | Run tests related to migration validaton. 17 | testProperties :: TestTree 18 | testProperties = testGroup "properties" 19 | [ testProperty "Valid migration" $ forAll arbitrary isValidOperationPaths 20 | , testProperty "Invalid decreasing migration" $ 21 | forAll arbitrary $ \(OperationPaths opPaths) -> do 22 | opPaths' <- mapSome (\(a, b) -> (b, a)) opPaths 23 | return . not . isValidOperationPaths $ OperationPaths opPaths' 24 | , testProperty "Invalid duplicate operation path" $ 25 | forAll arbitrary $ \(OperationPaths opPaths) -> do 26 | opPaths' <- mapSomeDupl opPaths 27 | return . not . isValidOperationPaths $ OperationPaths opPaths' 28 | , testProperty "Valid CreateTable" $ 29 | forAll arbitrary $ isValidOperation . toOperation 30 | , testProperty "Duplicate ColumnProps in CreateTable" $ do 31 | colsMaybeProps <- listOf arbitrary 32 | colsWithProps <- listOf1 (arbitrary `suchThat` (not . null . colProps)) 33 | let duplProps = mapM $ \col@Column{colProps} -> do 34 | colProps' <- mapSomeDupl colProps 35 | return col{colProps = colProps'} 36 | cols <- liftA2 (++) (duplProps colsMaybeProps) (duplProps colsWithProps) 37 | let ct = CreateTable "foo" cols [] 38 | return . not . isValidOperation $ ct 39 | , testProperty "Duplicate Constraints in CreateTable" $ 40 | forAll arbitrary $ \ct@CreateTable'{..} -> do 41 | constraints <- mapSomeDupl ctConstraints 42 | return . not . isValidOperation . toOperation $ ct{ctConstraints = constraints} 43 | , testProperty "Constraint references non-existent column" $ 44 | forAll arbitrary $ \ct@CreateTable'{..} -> do 45 | let existing = map (Identifier . colName) ctSchema 46 | genConstraint = do 47 | Identifier name' <- arbitrary 48 | cols <- map unIdent <$> listOf1 (arbitrary `suchThat` (`notElem` existing)) 49 | elements [PrimaryKey cols, Unique name' cols] 50 | newConstraints <- listOf1 genConstraint 51 | return . not . isValidOperation . toOperation $ 52 | ct{ctConstraints = ctConstraints ++ newConstraints} 53 | , testProperty "Duplicate ColumnProps in AddColumn" $ 54 | forAll arbitrary $ \col@Column{colProps} -> do 55 | Identifier table <- arbitrary 56 | colProps' <- mapSomeDupl colProps 57 | let column = col{colProps = colProps'} 58 | colDefault = Nothing 59 | return $ not (null colProps) ==> not (isValidOperation AddColumn{..}) 60 | , testProperty "Non-null AddColumn without default" $ 61 | forAll arbitrary $ \col@Column{colProps} -> do 62 | Identifier table <- arbitrary 63 | let colProps' = if NotNull `elem` colProps then colProps else NotNull : colProps 64 | column = col{colProps = colProps'} 65 | colDefault = Nothing 66 | return . not . isValidOperation $ AddColumn{..} 67 | ] 68 | 69 | newtype OperationPaths = OperationPaths { getOpPaths :: [OperationPath] } 70 | deriving (Show) 71 | 72 | instance Arbitrary OperationPaths where 73 | arbitrary = do 74 | Positive maxVersion <- arbitrary 75 | let versions = [0..maxVersion] 76 | opPaths = zip versions $ tail versions 77 | -- order should not matter 78 | OperationPaths <$> shuffle opPaths 79 | 80 | -- | Validate an Operation. 81 | isValidOperation :: Operation -> Bool 82 | isValidOperation = isRight . validateOperation 83 | 84 | -- | Validate OperationPaths in a Migration. 85 | isValidOperationPaths :: OperationPaths -> Bool 86 | isValidOperationPaths = isRight . validateMigration . map mkOperation . getOpPaths 87 | where 88 | mkOperation path = path := [] 89 | 90 | -- | Duplicate some elements in the given list. 91 | mapSomeDupl :: [a] -> Gen [a] 92 | mapSomeDupl = fmap concat . mapSome dupl . map pure 93 | where 94 | dupl [x] = [x, x] 95 | dupl _ = error "unreachable" 96 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | aliases: 4 | # build environment 5 | - &docker-linux 6 | docker: 7 | - image: centos:7 8 | working_directory: /root/src 9 | 10 | # system dependencies 11 | - &install-system-deps 12 | run: 13 | name: Install system dependencies 14 | command: scripts/install-system-deps.sh 15 | - &install-stack 16 | run: 17 | name: Install stack 18 | command: | 19 | curl -sSL https://get.haskellstack.org/ | sh 20 | stack --version 21 | 22 | # cache 23 | - &build-cache-key 24 | run: 25 | name: Build cache key 26 | command: | 27 | FILES=( 28 | stack.yaml 29 | package.yaml 30 | scripts/install-stack-deps.sh 31 | ) 32 | cat "${FILES[@]}" > cache-key.txt 33 | curl -sSL https://get.haskellstack.org/ | sed -n 's/^STACK_VERSION="\(.*\)"/\1/p' >> cache-key.txt 34 | - &cache-key 35 | v3-{{ checksum "cache-key.txt" }} 36 | - &install-stack-deps 37 | run: 38 | name: Build external dependencies 39 | command: scripts/install-stack-deps.sh 40 | 41 | # build steps 42 | - &run-build 43 | run: 44 | name: Build package 45 | command: stack build --test --no-run-tests 46 | 47 | # test steps 48 | - &run-unit-tests 49 | run: 50 | name: Run unit tests 51 | command: | 52 | DIR=$(stack path --dist-dir)/build/persistent-migration-test 53 | ${DIR}/persistent-migration-test --no-create 54 | - &run-integration-tests 55 | run: 56 | name: Run integration tests 57 | command: | 58 | # integration tests need to run as non-root 59 | useradd test 60 | DIR=$(stack path --dist-dir)/build/persistent-migration-integration 61 | su test -c "${DIR}/persistent-migration-integration" 62 | - &run-hlint 63 | run: 64 | name: Run HLint 65 | command: scripts/hlint.sh 66 | - &run-stylish-haskell 67 | run: 68 | name: Run stylish-haskell 69 | command: scripts/stylish-haskell.sh 70 | 71 | jobs: 72 | prebuild: 73 | <<: *docker-linux 74 | steps: 75 | - checkout 76 | - *build-cache-key 77 | - persist_to_workspace: 78 | root: . 79 | paths: 80 | - cache-key.txt 81 | 82 | build: 83 | <<: *docker-linux 84 | steps: 85 | - checkout 86 | - attach_workspace: 87 | at: . 88 | - *install-system-deps 89 | - *install-stack 90 | - restore_cache: 91 | key: *cache-key 92 | - *install-stack-deps 93 | - save_cache: 94 | key: *cache-key 95 | paths: 96 | - ~/.stack 97 | - ~/.local 98 | - *run-build 99 | - persist_to_workspace: 100 | root: . 101 | paths: 102 | - .stack-work 103 | 104 | unit-tests: 105 | <<: *docker-linux 106 | steps: 107 | - checkout 108 | - attach_workspace: 109 | at: . 110 | - *install-stack 111 | - restore_cache: 112 | key: *cache-key 113 | - *run-unit-tests 114 | 115 | integration-tests: 116 | <<: *docker-linux 117 | steps: 118 | - checkout 119 | - attach_workspace: 120 | at: . 121 | - *install-system-deps 122 | - *install-stack 123 | - restore_cache: 124 | key: *cache-key 125 | - *run-integration-tests 126 | 127 | hlint: 128 | <<: *docker-linux 129 | steps: 130 | - checkout 131 | - attach_workspace: 132 | at: . 133 | - *install-stack 134 | - restore_cache: 135 | key: *cache-key 136 | - *run-hlint 137 | 138 | stylish-haskell: 139 | <<: *docker-linux 140 | steps: 141 | - checkout 142 | - attach_workspace: 143 | at: . 144 | - *install-stack 145 | - restore_cache: 146 | key: *cache-key 147 | - *run-stylish-haskell 148 | 149 | workflows: 150 | version: 2 151 | 152 | build_and_test: 153 | jobs: 154 | - prebuild 155 | - build: 156 | requires: 157 | - prebuild 158 | - unit-tests: 159 | requires: 160 | - build 161 | - integration-tests: 162 | requires: 163 | - build 164 | - hlint: 165 | requires: 166 | - build 167 | - stylish-haskell: 168 | requires: 169 | - build 170 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # persistent-migration 2 | 3 | [![CircleCI](https://circleci.com/gh/brandonchinn178/persistent-migration/tree/master.svg?style=svg)](https://circleci.com/gh/brandonchinn178/persistent-migration/tree/master) 4 | [![Hackage](https://img.shields.io/hackage/v/persistent-migration.svg)](https://hackage.haskell.org/package/persistent-migration) 5 | 6 | This is a migration library for the 7 | [persistent](http://www.stackage.org/package/persistent) package. 8 | 9 | ## Overview 10 | 11 | By default, persistent provides a way to do automatic migrations; how to 12 | quickly and conveniently update the schema to match the definitions of the 13 | models. Because of its automatic nature, it will also balk at any operations 14 | that may delete data ("unsafe" migrations). 15 | 16 | However, in a lot of production cases, you don't actually want this automatic 17 | migration. You might want to be able to run certain unsafe migrations because 18 | you know a column is safe to delete. You might want to be able to copy and 19 | transform data from one column to another and then delete the old column. You 20 | might want explicit/manual migrations for other reasons. 21 | 22 | This package exposes an `Operation` data type that will be converted into SQL 23 | by a persistent backend. To define a series of migrations, write a list of 24 | these `Operations` and call `runMigration` from the appropriate backend module. 25 | Each `Operation` represents a movement from one version of the schema to 26 | another. `runMigration` will check to see the current version of the schema and 27 | run the `Operations` necessary to get from the current version to the latest 28 | version. 29 | 30 | ## Usage 31 | 32 | ```haskell 33 | import Database.Persist.Migration 34 | 35 | createPerson :: CreateTable 36 | createPerson = CreateTable 37 | { name = "person" 38 | , schema = 39 | [ Column "id" SqlInt32 [NotNull, AutoIncrement] 40 | , Column "name" SqlString [NotNull] 41 | , Column "age" SqlInt32 [NotNull] 42 | , Column "alive" SqlBool [NotNull] 43 | , Column "hometown" SqlInt64 [References ("cities", "id")] 44 | ] 45 | , constraints = 46 | [ PrimaryKey ["id"] 47 | , Unique "person_identifier" ["name", "age", "hometown"] 48 | ] 49 | } 50 | 51 | migrateHeight :: RawOperation 52 | migrateHeight = RawOperation "Separate height into height_feet, height_inches" $ 53 | map migrateHeight' <$> rawSql "SELECT id, height FROM person" [] 54 | where 55 | migrateHeight' (Single id', Single height) = 56 | let (feet, inches) = quotRem height 12 57 | in MigrateSql "UPDATE person SET height_feet = ?, height_inches = ? WHERE id = ?" 58 | [ PersistInt64 feet 59 | , PersistInt64 inches 60 | , PersistInt64 id' 61 | ] 62 | 63 | migration :: Migration 64 | migration = 65 | -- first migration path should create all the tables 66 | [ 0 ~> 1 := [createPerson] 67 | 68 | -- can define shorter migration paths for equivalent operations; version 2, in this case, should result 69 | -- in the same schema, regardless of the path taken to get there. 70 | , 1 ~> 2 := [DropColumn ("person", "alive")] 71 | , 0 ~> 2 := 72 | [ createPerson{ctSchema = filter ((/= "alive") . colName) $ ctSchema createPerson} 73 | ] 74 | 75 | -- example for adding columns 76 | , 2 ~> 3 := 77 | [ AddColumn "person" (Column "gender" SqlString []) Nothing 78 | -- Adding a non-null column needs a default for existing rows. 79 | , AddColumn "person" (Column "height" SqlInt32 [NotNull]) (Just $ PersistInt64 0) 80 | ] 81 | 82 | -- example for more complex migrations; here, we split up the height field into feet and inches fields 83 | , 3 ~> 4 := 84 | [ AddColumn "person" (Column "height_feet" SqlInt32 []) (Just $ PersistInt64 0) 85 | , AddColumn "person" (Column "height_inches" SqlInt32 []) (Just $ PersistInt64 0) 86 | , migrateHeight 87 | , DropColumn ("person", "height") 88 | ] 89 | ] 90 | ``` 91 | 92 | ```haskell 93 | import Database.Persist.Migration (checkMigration, defaultSettings) 94 | import Database.Persist.Migration.Postgres (runMigration) 95 | 96 | -- the migration defined above 97 | import MyMigration (migration) 98 | 99 | -- the migration from persistent's mkMigrate 100 | import MyMigration.Migrate (migrationDef) 101 | 102 | main = do 103 | -- run the usual migration 104 | runMigration defaultSettings migration 105 | 106 | -- fails if persistent detects more migrations not accounted for 107 | checkMigration migrationDef 108 | ``` 109 | 110 | For more examples, see `test/integration/Migration.hs`. 111 | 112 | ## FAQs 113 | 114 | * I don't know the `SqlType` corresponding to my column's Haskell type 115 | 116 | As a general rule, complicated JSON serialization will be `SqlBlob`, but 117 | it might be `SqlString` for simpler data types. You can always choose one, 118 | and see if Persistent complains about it needing to be another type. 119 | 120 | For example, you might want to put `SqlBlob` first, and see if Persistent 121 | errors with something like: 122 | 123 | ``` 124 | More migrations detected: 125 | * ALTER TABLE table ALTER COLUMN col TYPE VARCHAR 126 | ``` 127 | 128 | If Persistent tries to change the type to `VARCHAR`, then it probably 129 | wants `SqlString` instead. 130 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration/Postgres.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration.Postgres 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Defines the migration backend for PostgreSQL. 8 | -} 9 | {-# LANGUAGE LambdaCase #-} 10 | {-# LANGUAGE OverloadedStrings #-} 11 | {-# LANGUAGE RecordWildCards #-} 12 | {-# LANGUAGE ViewPatterns #-} 13 | 14 | module Database.Persist.Migration.Postgres 15 | ( backend 16 | , getMigration 17 | , runMigration 18 | ) where 19 | 20 | import Data.Maybe (maybeToList) 21 | import Data.Text (Text) 22 | import qualified Data.Text as Text 23 | import Database.Persist.Migration 24 | import qualified Database.Persist.Migration.Core as Migration 25 | import Database.Persist.Sql (SqlPersistT) 26 | 27 | -- | Run a migration with the Postgres backend. 28 | runMigration :: MigrateSettings -> Migration -> SqlPersistT IO () 29 | runMigration = Migration.runMigration backend 30 | 31 | -- | Get a migration with the Postgres backend. 32 | getMigration :: MigrateSettings -> Migration -> SqlPersistT IO [MigrateSql] 33 | getMigration = Migration.getMigration backend 34 | 35 | -- | The migration backend for Postgres. 36 | backend :: MigrateBackend 37 | backend = MigrateBackend 38 | { getMigrationSql = getMigrationSql' 39 | } 40 | 41 | getMigrationSql' :: Operation -> SqlPersistT IO [MigrateSql] 42 | 43 | getMigrationSql' CreateTable{..} = fromMigrateSql $ mapSql 44 | (\sql -> Text.unwords ["CREATE TABLE IF NOT EXISTS", quote name, "(", sql, ")"]) 45 | $ concatSql uncommas tableDefs 46 | where 47 | tableDefs = map showColumn schema ++ map showTableConstraint constraints 48 | 49 | getMigrationSql' DropTable{..} = fromWords 50 | ["DROP TABLE IF EXISTS", quote table] 51 | 52 | getMigrationSql' RenameTable{..} = fromWords 53 | ["ALTER TABLE", quote from, "RENAME TO", quote to] 54 | 55 | getMigrationSql' AddConstraint{..} = fromWords 56 | ["ALTER TABLE", quote table, statement] 57 | where 58 | statement = case constraint of 59 | PrimaryKey cols -> Text.unwords ["ADD PRIMARY KEY (", uncommas' cols, ")"] 60 | Unique label cols -> Text.unwords 61 | ["ADD CONSTRAINT", quote label, "UNIQUE (", uncommas' cols, ")"] 62 | 63 | getMigrationSql' DropConstraint{..} = fromWords 64 | ["ALTER TABLE", quote table, "DROP CONSTRAINT", constraintName] 65 | 66 | getMigrationSql' AddColumn{..} = return $ createQuery : maybeToList alterQuery 67 | where 68 | Column{..} = column 69 | alterTable = Text.unwords ["ALTER TABLE", quote table] 70 | -- The CREATE query with the default specified by AddColumn{colDefault} 71 | withoutDefault = showColumn $ column { colProps = filter (not . isDefault) colProps } 72 | createDefault = case colDefault of 73 | Nothing -> MigrateSql "" [] 74 | Just def -> MigrateSql "DEFAULT ?" [def] 75 | createQuery = concatSql 76 | (\sqls -> Text.unwords $ [alterTable, "ADD COLUMN"] ++ sqls) 77 | [withoutDefault, createDefault] 78 | -- The ALTER query to drop/set the default (if colDefault was set) 79 | alterQuery = 80 | let action = case getDefault colProps of 81 | Nothing -> pureSql "DROP DEFAULT" 82 | Just v -> MigrateSql "SET DEFAULT ?" [v] 83 | alterQuery' = mapSql 84 | (\sql -> Text.unwords [alterTable, "ALTER COLUMN", quote colName, sql]) 85 | action 86 | in alterQuery' <$ colDefault 87 | 88 | getMigrationSql' RenameColumn{..} = fromWords 89 | ["ALTER TABLE", quote table, "RENAME COLUMN", quote from, "TO", quote to] 90 | 91 | getMigrationSql' DropColumn{..} = fromWords 92 | ["ALTER TABLE", quote tab, "DROP COLUMN", quote col] 93 | where 94 | (tab, col) = columnId 95 | 96 | getMigrationSql' RawOperation{..} = rawOp 97 | 98 | {- Helpers -} 99 | 100 | fromMigrateSql :: Monad m => MigrateSql -> m [MigrateSql] 101 | fromMigrateSql = return . pure 102 | 103 | fromWords :: Monad m => [Text] -> m [MigrateSql] 104 | fromWords = fromMigrateSql . pureSql . Text.unwords 105 | 106 | -- | True if the given ColumnProp sets a default. 107 | isDefault :: ColumnProp -> Bool 108 | isDefault (Default _) = True 109 | isDefault _ = False 110 | 111 | -- | Get the default value from the given ColumnProps. 112 | getDefault :: [ColumnProp] -> Maybe PersistValue 113 | getDefault [] = Nothing 114 | getDefault (Default v : _) = Just v 115 | getDefault (_:props) = getDefault props 116 | 117 | -- | Show a 'Column'. 118 | showColumn :: Column -> MigrateSql 119 | showColumn Column{..} = concatSql 120 | (\sqls -> Text.unwords $ [quote colName, sqlType] ++ sqls) 121 | $ map showColumnProp colProps 122 | where 123 | sqlType = case (AutoIncrement `elem` colProps, colType) of 124 | (True, SqlInt32) -> "SERIAL" 125 | (True, SqlInt64) -> "BIGSERIAL" 126 | _ -> showSqlType colType 127 | 128 | -- | Show a 'SqlType'. See `showSqlType` from `Database.Persist.Postgresql`. 129 | showSqlType :: SqlType -> Text 130 | showSqlType = \case 131 | SqlString -> "VARCHAR" 132 | SqlInt32 -> "INT4" 133 | SqlInt64 -> "INT8" 134 | SqlReal -> "DOUBLE PRECISION" 135 | SqlNumeric s prec -> Text.concat ["NUMERIC(", showT s, ",", showT prec, ")"] 136 | SqlDay -> "DATE" 137 | SqlTime -> "TIME" 138 | SqlDayTime -> "TIMESTAMP WITH TIME ZONE" 139 | SqlBlob -> "BYTEA" 140 | SqlBool -> "BOOLEAN" 141 | SqlOther (Text.toLower -> "integer") -> "INT4" 142 | SqlOther t -> t 143 | where 144 | showT = Text.pack . show 145 | 146 | -- | Show a 'ColumnProp'. 147 | showColumnProp :: ColumnProp -> MigrateSql 148 | showColumnProp = \case 149 | NotNull -> pureSql "NOT NULL" 150 | References (tab, col) -> pureSql $ Text.unwords 151 | ["REFERENCES", quote tab, "(", quote col, ")"] 152 | AutoIncrement -> pureSql "" 153 | Default v -> MigrateSql "DEFAULT ?" [v] 154 | 155 | -- | Show a `TableConstraint`. 156 | showTableConstraint :: TableConstraint -> MigrateSql 157 | showTableConstraint = pureSql . \case 158 | PrimaryKey cols -> Text.unwords ["PRIMARY KEY (", uncommas' cols, ")"] 159 | Unique name cols -> Text.unwords ["CONSTRAINT", quote name, "UNIQUE (", uncommas' cols, ")"] 160 | -------------------------------------------------------------------------------- /src/Database/Persist/Migration/Core.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Database.Persist.Migration.Core 3 | Maintainer : Brandon Chinn 4 | Stability : experimental 5 | Portability : portable 6 | 7 | Defines a migration framework for the persistent library. 8 | -} 9 | {-# LANGUAGE DuplicateRecordFields #-} 10 | {-# LANGUAGE ExistentialQuantification #-} 11 | {-# LANGUAGE FlexibleContexts #-} 12 | {-# LANGUAGE LambdaCase #-} 13 | {-# LANGUAGE OverloadedStrings #-} 14 | {-# LANGUAGE RankNTypes #-} 15 | {-# LANGUAGE RecordWildCards #-} 16 | {-# OPTIONS_GHC -fno-warn-redundant-constraints #-} 17 | 18 | module Database.Persist.Migration.Core 19 | ( Version 20 | , OperationPath 21 | , (~>) 22 | , Migration 23 | , MigrationPath(..) 24 | , opPath 25 | , MigrateSettings(..) 26 | , defaultSettings 27 | , validateMigration 28 | , runMigration 29 | , getMigration 30 | ) where 31 | 32 | import Control.Monad (unless, when) 33 | import Control.Monad.IO.Class (MonadIO(..)) 34 | import Control.Monad.Reader (mapReaderT) 35 | import Data.List (nub) 36 | import Data.Maybe (fromMaybe) 37 | import qualified Data.Text as Text 38 | import Data.Time.Clock (getCurrentTime) 39 | import Database.Persist.Migration.Backend (MigrateBackend(..)) 40 | import Database.Persist.Migration.Operation (Operation(..), validateOperation) 41 | import Database.Persist.Migration.Operation.Types 42 | (Column(..), ColumnProp(..), TableConstraint(..)) 43 | import Database.Persist.Migration.Utils.Plan (getPath) 44 | import Database.Persist.Migration.Utils.Sql (MigrateSql, executeSql) 45 | import Database.Persist.Sql 46 | (PersistValue(..), Single(..), SqlPersistT, rawExecute, rawSql) 47 | import Database.Persist.Types (SqlType(..)) 48 | 49 | -- | The version of a database. An operation migrates from the given version to another version. 50 | -- 51 | -- The version must be increasing, such that the lowest version is the first version and the highest 52 | -- version is the most up-to-date version. 53 | -- 54 | -- A version represents a version of the database schema. In other words, any set of operations 55 | -- taken to get to version X *MUST* all result in the same database schema. 56 | type Version = Int 57 | 58 | -- | The path that an operation takes. 59 | type OperationPath = (Version, Version) 60 | 61 | -- | An infix constructor for 'OperationPath'. 62 | (~>) :: Version -> Version -> OperationPath 63 | (~>) = (,) 64 | 65 | -- | A migration list that defines operations to manually migrate a database schema. 66 | type Migration = [MigrationPath] 67 | 68 | -- | A path representing the operations needed to run to get from one version of the database schema 69 | -- to the next. 70 | data MigrationPath = OperationPath := [Operation] 71 | deriving (Show) 72 | 73 | -- | Get the OperationPath in the MigrationPath. 74 | opPath :: MigrationPath -> OperationPath 75 | opPath (path := _) = path 76 | 77 | -- | Get the current version of the database, or Nothing if none exists. 78 | getCurrVersion :: MonadIO m => MigrateBackend -> SqlPersistT m (Maybe Version) 79 | getCurrVersion backend = do 80 | -- create the persistent_migration table if it doesn't already exist 81 | mapReaderT liftIO (getMigrationSql backend migrationSchema) >>= mapM_ executeSql 82 | extractVersion <$> rawSql queryVersion [] 83 | where 84 | migrationSchema = CreateTable 85 | { name = "persistent_migration" 86 | , schema = 87 | [ Column "id" SqlInt32 [NotNull, AutoIncrement] 88 | , Column "version" SqlInt32 [NotNull] 89 | , Column "label" SqlString [] 90 | , Column "timestamp" SqlDayTime [NotNull] 91 | ] 92 | , constraints = 93 | [ PrimaryKey ["id"] 94 | ] 95 | } 96 | queryVersion = "SELECT version FROM persistent_migration ORDER BY timestamp DESC LIMIT 1" 97 | extractVersion = \case 98 | [] -> Nothing 99 | [Single v] -> Just v 100 | _ -> error "Invalid response from the database." 101 | 102 | -- | Get the list of operations to run, given the current state of the database. 103 | getOperations :: Migration -> Maybe Version -> Either (Version, Version) [Operation] 104 | getOperations migration mVersion = case getPath edges start end of 105 | Just path -> Right $ concat path 106 | Nothing -> Left (start, end) 107 | where 108 | edges = map (\(path := ops) -> (path, ops)) migration 109 | start = fromMaybe (getFirstVersion migration) mVersion 110 | end = getLatestVersion migration 111 | 112 | -- | Get the first version in the given migration. 113 | getFirstVersion :: Migration -> Version 114 | getFirstVersion = minimum . map (fst . opPath) 115 | 116 | -- | Get the most up-to-date version in the given migration. 117 | getLatestVersion :: Migration -> Version 118 | getLatestVersion = maximum . map (snd . opPath) 119 | 120 | {- Migration plan and execution -} 121 | 122 | -- | Settings to customize migration steps. 123 | newtype MigrateSettings = MigrateSettings 124 | { versionToLabel :: Version -> Maybe String 125 | -- ^ A function to optionally label certain versions 126 | } 127 | 128 | -- | Default migration settings. 129 | defaultSettings :: MigrateSettings 130 | defaultSettings = MigrateSettings 131 | { versionToLabel = const Nothing 132 | } 133 | 134 | -- | Validate the given migration. 135 | validateMigration :: Migration -> Either String () 136 | validateMigration migration = do 137 | unless (allIncreasing opVersions) $ 138 | Left "Operation versions must be monotonically increasing" 139 | when (hasDuplicates opVersions) $ 140 | Left "There may only be one operation per pair of versions" 141 | where 142 | opVersions = map opPath migration 143 | allIncreasing = all (uncurry (<)) 144 | hasDuplicates l = length (nub l) < length l 145 | 146 | -- | Run the given migration. After successful completion, saves the migration to the database. 147 | runMigration :: MonadIO m => MigrateBackend -> MigrateSettings -> Migration -> SqlPersistT m () 148 | runMigration backend settings@MigrateSettings{..} migration = do 149 | currVersion <- getCurrVersion backend 150 | let latestVersion = getLatestVersion migration 151 | case currVersion of 152 | Just current | current >= latestVersion -> pure () 153 | _ -> do 154 | getMigration backend settings migration >>= mapM_ executeSql 155 | now <- liftIO getCurrentTime 156 | rawExecute "INSERT INTO persistent_migration(version, label, timestamp) VALUES (?, ?, ?)" 157 | [ PersistInt64 $ fromIntegral latestVersion 158 | , PersistText $ Text.pack $ fromMaybe (show latestVersion) $ versionToLabel latestVersion 159 | , PersistUTCTime now 160 | ] 161 | 162 | -- | Get the SQL queries for the given migration. 163 | getMigration :: MonadIO m 164 | => MigrateBackend 165 | -> MigrateSettings 166 | -> Migration 167 | -> SqlPersistT m [MigrateSql] 168 | getMigration backend _ migration = do 169 | either error return $ validateMigration migration 170 | currVersion <- getCurrVersion backend 171 | operations <- either badPath return $ getOperations migration currVersion 172 | either error return $ mapM_ validateOperation operations 173 | concatMapM (mapReaderT liftIO . getMigrationSql backend) operations 174 | where 175 | badPath (start, end) = error $ "Could not find path: " ++ show start ++ " ~> " ++ show end 176 | -- Utilities 177 | concatMapM f = fmap concat . mapM f 178 | -------------------------------------------------------------------------------- /test/integration/Migration.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE AllowAmbiguousTypes #-} 2 | {-# LANGUAGE CPP #-} 3 | {-# LANGUAGE DataKinds #-} 4 | {-# LANGUAGE DerivingStrategies #-} 5 | {-# LANGUAGE ExistentialQuantification #-} 6 | {-# LANGUAGE FlexibleContexts #-} 7 | {-# LANGUAGE FlexibleInstances #-} 8 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 9 | {-# LANGUAGE LambdaCase #-} 10 | {-# LANGUAGE MultiParamTypeClasses #-} 11 | {-# LANGUAGE OverloadedStrings #-} 12 | {-# LANGUAGE QuasiQuotes #-} 13 | {-# LANGUAGE RecordWildCards #-} 14 | {-# LANGUAGE ScopedTypeVariables #-} 15 | {-# LANGUAGE StandaloneDeriving #-} 16 | {-# LANGUAGE TemplateHaskell #-} 17 | {-# LANGUAGE TypeFamilies #-} 18 | {-# LANGUAGE UndecidableInstances #-} 19 | {-# OPTIONS_GHC -fno-warn-unused-top-binds #-} 20 | 21 | module Migration (testMigrations) where 22 | 23 | import Control.Exception (finally) 24 | import Control.Monad (unless, when) 25 | import Data.ByteString.Lazy (ByteString, fromStrict) 26 | import Data.Maybe (mapMaybe) 27 | #if !MIN_VERSION_base(4,11,0) 28 | import Data.Monoid ((<>)) 29 | #endif 30 | import Data.Pool (Pool) 31 | import Data.Text (Text) 32 | import Data.Yaml (array, encode, object, (.=)) 33 | import Database.Persist (Entity(..), get, insertKey, insertMany_, selectList) 34 | import Database.Persist.Migration 35 | import Database.Persist.Sql (Single(..), SqlBackend, SqlPersistT, rawExecute) 36 | import Database.Persist.TH 37 | (mkMigrate, mkPersist, persistLowerCase, share, sqlSettings) 38 | import Test.Tasty (TestTree, testGroup) 39 | 40 | import Utils.Goldens (goldenVsString) 41 | import Utils.RunSql (runMigration, runSql) 42 | 43 | {- Schema and migration -} 44 | 45 | share [mkPersist sqlSettings, mkMigrate "autoMigration"] [persistLowerCase| 46 | Person 47 | name String 48 | hometown CityId 49 | gender String Maybe 50 | colorblind Bool 51 | deriving Show 52 | City 53 | name String 54 | state String 55 | UniqueCity name state 56 | deriving Show 57 | |] 58 | 59 | manualMigration :: Migration 60 | manualMigration = 61 | -- create tables 62 | [ 0 ~> 1 := 63 | [ CreateTable 64 | { name = "city" 65 | , schema = 66 | [ Column "id" SqlInt64 [NotNull, AutoIncrement] 67 | , Column "name" SqlString [NotNull] 68 | , Column "state" SqlString [NotNull] 69 | ] 70 | , constraints = 71 | [ PrimaryKey ["id"] 72 | , Unique "unique_city" ["state", "name"] 73 | ] 74 | } 75 | , CreateTable 76 | { name = "person" 77 | , schema = 78 | [ Column "id" SqlInt64 [NotNull, AutoIncrement] 79 | , Column "name" SqlString [NotNull] 80 | , Column "hometown" SqlInt64 [NotNull, References ("city", "id")] 81 | ] 82 | , constraints = 83 | [ PrimaryKey ["id"] 84 | ] 85 | } 86 | ] 87 | 88 | -- add binary sex column 89 | , 1 ~> 2 := 90 | [ AddColumn "person" (Column "sex" SqlInt32 []) Nothing 91 | ] 92 | 93 | -- change binary sex to stringly gender 94 | , 2 ~> 3 := 95 | [ AddColumn "person" (Column "gender" SqlString []) Nothing 96 | , migrateGender 97 | , DropColumn ("person", "sex") 98 | ] 99 | 100 | -- shortcut for databases that hadn't already added the sex column 101 | , 1 ~> 3 := 102 | [ AddColumn "person" (Column "gender" SqlString []) Nothing 103 | ] 104 | 105 | -- add colorblind column, with everyone currently in the database being not colorblind 106 | , 3 ~> 4 := 107 | [ AddColumn "person" (Column "colorblind" SqlBool [NotNull]) 108 | (Just $ PersistBool False) 109 | ] 110 | ] 111 | where 112 | migrateGender = RawOperation "Convert binary sex column into stringly gender column" $ 113 | mapMaybe migrateGender' <$> rawSql "SELECT id, sex FROM person" [] 114 | migrateGender' = \case 115 | (_, Single Nothing) -> Nothing 116 | (Single id', Single (Just sex)) -> 117 | Just $ MigrateSql "UPDATE person SET gender = ? WHERE id = ?" 118 | [ PersistText $ sexToGender sex 119 | , PersistInt64 id' 120 | ] 121 | sexToGender :: Int -> Text 122 | sexToGender sex = if sex == 0 then "Male" else "Female" 123 | 124 | {- Test suite -} 125 | 126 | -- | A test suite for running migrations. 127 | testMigrations :: FilePath -> MigrateBackend -> IO (Pool SqlBackend) -> TestTree 128 | testMigrations dir backend getPool = testGroup "goldens" 129 | [ testMigration' "Migrate from empty" 0 [] 130 | , testMigration' "Migrate with v1 person" 1 131 | [ insertPerson "David" [] 132 | ] 133 | , testMigration' "Migrate from sex to gender" 2 134 | [ insertPerson "David" [("sex", "0")] 135 | , insertPerson "Elizabeth" [("sex", "1")] 136 | , insertPerson "Foster" [("sex", "NULL")] 137 | ] 138 | , testMigration' "Migrate with default colorblind" 4 139 | [ insertPerson "David" [] 140 | ] 141 | , testMigration' "Migrations are idempotent" (length manualMigration) 142 | [ insertPerson "David" [("colorblind", "TRUE")] 143 | ] 144 | ] 145 | where 146 | testMigration' = testMigration dir backend getPool 147 | insertPerson name extra = 148 | let cols = ["name", "hometown"] ++ map fst extra 149 | vals = ["'" <> name <> "'", "1"] ++ map snd extra 150 | in rawExecute 151 | ("INSERT INTO person(" <> uncommas' cols <> ") VALUES (" <> uncommas vals <> ")") 152 | [] 153 | 154 | -- | Run a test where: 155 | -- * the first N migration paths have been migrated (in the `manualMigration` list, NOT the 156 | -- version number) 157 | -- * the given query is run to populate the database 158 | -- * the remaining migration paths are migrated 159 | -- * insert some data into the database 160 | -- * output "SELECT * FROM person" to goldens file 161 | -- * clean up database 162 | testMigration 163 | :: FilePath 164 | -> MigrateBackend 165 | -> IO (Pool SqlBackend) 166 | -> String 167 | -> Int 168 | -> [SqlPersistT IO ()] 169 | -> TestTree 170 | testMigration dir backend getPool name n populateDb = goldenVsString dir name $ do 171 | pool <- getPool 172 | let runMigration' = runMigration backend pool 173 | city = CityKey 1 174 | insertCity = insertKey city $ City "Berkeley" "CA" 175 | 176 | res <- (`finally` cleanup pool) $ do 177 | -- test setup 178 | unless (null setupMigration) $ do 179 | runMigration' setupMigration 180 | -- populateDb scripts can use hometown=1 181 | runSql pool insertCity 182 | needsMore <- runSql pool $ hasMigration autoMigration 183 | if n >= length manualMigration 184 | then when needsMore $ fail "More migrations are detected" 185 | else unless needsMore $ fail "No more migrations detected" 186 | mapM_ (runSql pool) populateDb 187 | 188 | -- run migrations and check inserting current models works 189 | runMigration' manualMigration 190 | runSql pool $ do 191 | checkMigration autoMigration 192 | get city >>= \case 193 | Just _ -> return () 194 | Nothing -> insertCity 195 | insertMany_ 196 | [ Person "Alice" city (Just "Female") False 197 | , Person "Bob" city (Just "Male") True 198 | , Person "Courtney" city Nothing False 199 | ] 200 | map entityVal <$> selectList [] [] 201 | 202 | return $ showPersons res 203 | where 204 | setupMigration = take n manualMigration 205 | cleanup pool = runSql pool $ do 206 | rawExecute "DROP TABLE IF EXISTS persistent_migration" [] 207 | rawExecute "DROP TABLE IF EXISTS person" [] 208 | rawExecute "DROP TABLE IF EXISTS city" [] 209 | 210 | -- | Display a Person as a YAML object. 211 | showPersons :: [Person] -> ByteString 212 | showPersons = fromStrict . encode . array . map showPerson 213 | where 214 | showPerson Person{..} = object 215 | [ "name" .= personName 216 | , "hometown" .= personHometown 217 | , "gender" .= personGender 218 | , "colorblind" .= personColorblind 219 | ] 220 | -------------------------------------------------------------------------------- /test/integration/Property.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE DuplicateRecordFields #-} 3 | {-# LANGUAGE FlexibleContexts #-} 4 | {-# LANGUAGE LambdaCase #-} 5 | {-# LANGUAGE NamedFieldPuns #-} 6 | {-# LANGUAGE OverloadedStrings #-} 7 | 8 | module Property (testProperties) where 9 | 10 | import Control.Monad (unless) 11 | import Control.Monad.Catch (SomeException(..), try) 12 | import Control.Monad.IO.Class (liftIO) 13 | import Data.List (nub) 14 | import Data.Maybe (mapMaybe) 15 | #if !MIN_VERSION_base(4,11,0) 16 | import Data.Monoid ((<>)) 17 | #endif 18 | import Data.Pool (Pool) 19 | import qualified Data.Text as Text 20 | import qualified Data.Text.IO as Text 21 | import Database.Persist.Migration 22 | import Database.Persist.Sql (SqlBackend, SqlPersistT) 23 | import Test.QuickCheck 24 | import Test.QuickCheck.Monadic (PropertyM, monadicIO, pick, run, stop) 25 | import Test.QuickCheck.Property (rejected) 26 | import Test.Tasty (TestTree, testGroup) 27 | import Test.Tasty.QuickCheck (testProperty) 28 | 29 | import Utils.QuickCheck 30 | ( ColumnIdentifier(..) 31 | , CreateTable'(..) 32 | , Identifier(..) 33 | , genPersistValue 34 | , toOperation 35 | ) 36 | import Utils.RunSql (runSql) 37 | 38 | -- | A test suite for testing migration properties. 39 | testProperties :: MigrateBackend -> IO (Pool SqlBackend) -> TestTree 40 | testProperties backend getPool = testGroup "properties" 41 | [ testProperty "Create and drop tables" $ withCreateTable $ 42 | const $ return () 43 | , testProperty "Rename table" $ withCreateTable $ \(table, fkTables) -> do 44 | let tableName = ctName table 45 | fkNames = map ctName fkTables 46 | Identifier newName <- pick $ arbitrary `suchThat` 47 | ((`notElem` tableName:fkNames) . unIdent) 48 | runSqlPool' $ do 49 | runOperation' $ RenameTable tableName newName 50 | runOperation' $ DropTable newName 51 | , testProperty "Add UNIQUE constraint" $ withCreateTable $ \(table, _) -> do 52 | let getUniqueCols = \case 53 | PrimaryKey _ -> [] 54 | Unique _ cols -> cols 55 | tableCols = map colName $ ctSchema table 56 | uniqueCols = concatMap getUniqueCols $ ctConstraints table 57 | nonUniqueCols = take 32 $ filter (`notElem` uniqueCols) tableCols 58 | if null nonUniqueCols 59 | then return False 60 | else do 61 | let uniqueName = Text.take 63 $ "unique_" <> Text.intercalate "_" nonUniqueCols 62 | runSqlPool' $ 63 | runOperation' $ AddConstraint (ctName table) $ Unique uniqueName nonUniqueCols 64 | return True 65 | , testProperty "Drop UNIQUE constraint" $ withCreateTable $ \(table, _) -> do 66 | let getUniqueName = \case 67 | PrimaryKey _ -> Nothing 68 | Unique n _ -> Just n 69 | uniqueNames = mapMaybe getUniqueName $ ctConstraints table 70 | if null uniqueNames 71 | then return False 72 | else do 73 | uniqueName <- pick $ elements uniqueNames 74 | runSqlPool' $ runOperation' $ DropConstraint (ctName table) uniqueName 75 | return True 76 | , testProperty "Add column" $ withCreateTable $ \(table, fkTables) -> do 77 | -- generate a new column 78 | col <- pick arbitrary 79 | 80 | -- pick a new name for the column 81 | let cols = map colName $ ctSchema table 82 | Identifier newName <- pick $ arbitrary `suchThat` ((`notElem` cols) . unIdent) 83 | 84 | -- if foreign key tables exist, update any foreign key references to point to one of those. 85 | -- otherwise, strip out any foreign key references. 86 | let splitProps [] = (False, []) 87 | splitProps (x:xs) = 88 | let (mRef, props) = splitProps xs 89 | in case x of 90 | References _ -> (True, props) 91 | AutoIncrement -> (mRef, props) -- don't test adding AutoIncrement columns 92 | _ -> (mRef, x:props) 93 | col' <- 94 | let (hasReference, props) = splitProps $ colProps col 95 | in if null fkTables || not hasReference 96 | then return col{colProps = props} 97 | else do 98 | fkTable <- pick $ ctName <$> elements fkTables 99 | return $ col{colProps = References (fkTable, "id") : props} 100 | 101 | -- pick a default value according to nullability and sqltype 102 | defaultVal <- pick $ genPersistValue $ colType col 103 | defaultVal' <- if NotNull `elem` colProps col 104 | then return $ Just defaultVal 105 | else pick $ elements [Nothing, Just defaultVal] 106 | 107 | runSqlPool' $ runOperation' $ AddColumn (ctName table) col'{colName = newName} defaultVal' 108 | , testProperty "Rename column" $ withCreateTable $ \(table, _) -> do 109 | let cols = map colName $ ctSchema table 110 | col <- pick $ elements cols 111 | ColumnIdentifier newName <- pick $ arbitrary `suchThat` ((`notElem` cols) . unColIdent) 112 | runSqlPool' $ runOperation' $ RenameColumn (ctName table) col newName 113 | , testProperty "Drop column" $ withCreateTable $ \(table, _) -> do 114 | let cols = map colName $ ctSchema table 115 | col <- pick $ elements cols 116 | runSqlPool' $ runOperation' $ DropColumn (ctName table, col) 117 | ] 118 | where 119 | runSqlPool' = runSqlPool getPool 120 | runOperation' = runOperation backend 121 | -- | Create a table and its foreign key dependencies, then run the given action, which should 122 | -- return False if the test case should be discarded (`()` == True). The tables will be dropped 123 | -- when finished 124 | withCreateTable :: PseudoBool a => ((CreateTable', [CreateTable']) -> PropertyM IO a) -> Property 125 | withCreateTable action = monadicIO $ do 126 | table <- pick arbitrary 127 | fkTables <- pick $ getForeignKeyTables table 128 | runSqlPool' $ mapM_ (runOperation' . toOperation) (fkTables ++ [table]) 129 | isSuccessful <- toBool <$> action (table, fkTables) 130 | runSqlPool' $ mapM_ dropTable' (table:fkTables) 131 | unless isSuccessful $ stop rejected 132 | dropTable' CreateTable'{ctName} = runOperation' $ DropTable ctName 133 | 134 | {- Helpers -} 135 | 136 | -- | Run the given Sql query in the given SqlBackend. 137 | runSqlPool :: IO (Pool SqlBackend) -> SqlPersistT IO () -> PropertyM IO () 138 | runSqlPool getPool f = run $ getPool >>= \pool -> runSql pool f 139 | 140 | -- | Run the given operation. 141 | runOperation :: MigrateBackend -> Operation -> SqlPersistT IO () 142 | runOperation backend operation = do 143 | case validateOperation operation of 144 | Right () -> return () 145 | Left msg -> do 146 | liftIO $ putStrLn "\n*** Failure: validateOperation failed" 147 | fail msg 148 | 149 | getMigrationSql backend operation >>= mapM_ rawExecutePrint 150 | where 151 | -- if rawExecute fails, show the sql query run 152 | rawExecutePrint sql = try (executeSql sql) >>= \case 153 | Right () -> return () 154 | Left (SomeException e) -> do 155 | liftIO $ do 156 | putStrLn "\n*** Failure:" 157 | Text.putStrLn $ sqlText sql 158 | print $ sqlVals sql 159 | fail $ show e 160 | 161 | -- | Get the CreateTable operations that are necessary for the foreign keys in the 162 | -- given CreateTable operation. 163 | getForeignKeyTables :: CreateTable' -> Gen [CreateTable'] 164 | getForeignKeyTables ct = 165 | zipWith modifyTable neededTables <$> vectorOf (length neededTables) arbitrary 166 | where 167 | neededTables = nub $ concatMap (mapMaybe getReferenceTable . colProps) $ ctSchema ct 168 | getReferenceTable = \case 169 | References (table, _) -> Just table 170 | _ -> Nothing 171 | isReference = \case 172 | References _ -> True 173 | _ -> False 174 | noFKs = filter (not . isReference) . colProps 175 | modifyTable name ct' = ct' 176 | { ctName = name 177 | , ctSchema = map (\col -> col{colProps = noFKs col}) $ ctSchema ct' 178 | } 179 | 180 | class PseudoBool a where 181 | toBool :: a -> Bool 182 | instance PseudoBool () where 183 | toBool = const True 184 | instance PseudoBool Bool where 185 | toBool = id 186 | -------------------------------------------------------------------------------- /test/utils/Utils/QuickCheck.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE RecordWildCards #-} 5 | {-# OPTIONS_GHC -fno-warn-orphans #-} 6 | 7 | module Utils.QuickCheck 8 | ( CreateTable'(..) 9 | , toOperation 10 | , ColumnIdentifier(..) 11 | , Identifier(..) 12 | , genPersistValue 13 | -- * Utilities 14 | , DistinctList(..) 15 | , mapSome 16 | , group 17 | ) where 18 | 19 | import Control.Monad ((>=>)) 20 | import Data.ByteString (ByteString) 21 | import qualified Data.ByteString.Char8 as ByteString 22 | import Data.List (nub) 23 | #if !MIN_VERSION_base(4,11,0) 24 | import Data.Monoid ((<>)) 25 | #endif 26 | import Data.Text (Text) 27 | import qualified Data.Text as Text 28 | import qualified Data.Text.Encoding as Text 29 | import Data.Time.Calendar (Day, fromGregorian) 30 | import Data.Time.Clock (UTCTime(..), secondsToDiffTime) 31 | import Data.Time.LocalTime (TimeOfDay(..)) 32 | import Database.Persist.Migration 33 | (Column(..), ColumnProp(..), Operation(..), TableConstraint(..)) 34 | import Database.Persist.Sql (PersistValue(..), SqlType(..)) 35 | import Test.QuickCheck hiding (scale) 36 | 37 | -- | A duplicate of the CreateTable constructor for testing. 38 | data CreateTable' = CreateTable' 39 | { ctName :: Text 40 | , ctSchema :: [Column] 41 | , ctConstraints :: [TableConstraint] 42 | } deriving (Show) 43 | 44 | toOperation :: CreateTable' -> Operation 45 | toOperation CreateTable'{..} = CreateTable ctName ctSchema ctConstraints 46 | 47 | instance Arbitrary CreateTable' where 48 | arbitrary = do 49 | name <- arbitrary 50 | let Identifier ctName = name 51 | 52 | -- get names of tables this table can have foreign keys towards 53 | DistinctList colNames <- arbitrary 54 | -- max out at 100 names 55 | let colNames' = take 100 $ map unColIdent colNames 56 | 57 | -- generate schema 58 | DistinctList tableNames <- arbitrary 59 | let tableNames' = filter (/= name) tableNames 60 | cols <- vectorOf (length colNames') $ genColumn tableNames' 61 | let idCol = Column "id" SqlInt32 [NotNull, AutoIncrement] 62 | cols' = zipWith (\colName' col -> col{colName = colName'}) colNames' cols 63 | ctSchema = idCol : cols' 64 | 65 | -- all of the columns that will be unique 66 | uniqueCols <- sublistOf $ map colName cols' 67 | let mkUnique names = 68 | -- constraint name can be max 63 characters 69 | let namespace = Text.take 10 ctName 70 | constraintName = Text.take 63 $ "unique_" <> Text.intercalate "_" (namespace:names) 71 | in Unique constraintName names 72 | -- unique constraints should not have more than 32 columns 73 | max32 l = if length l > 32 74 | then take 32 l : max32 (drop 32 l) 75 | else [l] 76 | uniqueConstraints <- map mkUnique . concatMap max32 <$> group uniqueCols 77 | 78 | let ctConstraints = PrimaryKey ["id"] : uniqueConstraints 79 | 80 | return $ CreateTable'{..} 81 | 82 | -- | Generate an arbitrary Column with a possibly pre-determined name. 83 | -- 84 | -- Also given the set of table names that can be referenced by foreign keys. 85 | genColumn :: [Identifier] -> Gen Column 86 | genColumn tableNames = do 87 | colName <- fmap unColIdent arbitrary 88 | 89 | references <- if null tableNames 90 | then return [] 91 | else do 92 | Identifier table <- elements tableNames 93 | arbitrarySingleton 10 $ References (table, "id") 94 | 95 | colType <- if null references 96 | then arbitrary 97 | else return SqlInt32 98 | 99 | autoIncrement <- arbitrarySingleton 1 AutoIncrement 100 | notNull <- arbitrarySingleton 50 NotNull 101 | colDefault <- case autoIncrement of 102 | [] -> arbitrarySingleton 10 . Default =<< genPersistValue colType 103 | _ -> return [] 104 | 105 | let colProps = notNull ++ autoIncrement ++ references ++ colDefault 106 | 107 | return Column{..} 108 | where 109 | -- get list with x% chance of having the given element and (100 - x)% chance of 110 | -- being an empty list 111 | arbitrarySingleton x v = frequency [(x, pure [v]), (100 - x, pure [])] 112 | 113 | instance Arbitrary Column where 114 | arbitrary = listOf arbitrary >>= genColumn 115 | 116 | newtype Identifier = Identifier { unIdent :: Text } 117 | deriving (Show,Eq) 118 | 119 | instance Arbitrary Identifier where 120 | arbitrary = do 121 | first <- elements underletter 122 | rest <- listOf $ elements $ underletter ++ ['0'..'9'] 123 | return . Identifier . Text.pack . take 63 $ first : rest 124 | where 125 | underletter = '_':['a'..'z'] 126 | 127 | newtype ColumnIdentifier = ColumnIdentifier { unColIdent :: Text } 128 | deriving (Show,Eq) 129 | 130 | instance Arbitrary ColumnIdentifier where 131 | arbitrary = do 132 | Identifier ident <- arbitrary 133 | if ident `elem` invalidIdents 134 | then arbitrary 135 | else return $ ColumnIdentifier ident 136 | where 137 | invalidIdents = 138 | [ "id" 139 | -- https://www.postgresql.org/docs/9.6/static/ddl-system-columns.html 140 | , "oid", "tableoid", "xmin", "cmin", "xmax", "cmax", "ctid" 141 | ] 142 | 143 | instance Arbitrary SqlType where 144 | arbitrary = do 145 | numPrecision <- choose (1, 1000) 146 | numScale <- choose (0, numPrecision) 147 | elements 148 | [ SqlString 149 | , SqlInt32 150 | , SqlInt64 151 | , SqlReal 152 | , SqlNumeric numPrecision numScale 153 | , SqlBool 154 | , SqlDay 155 | , SqlTime 156 | , SqlDayTime 157 | , SqlBlob 158 | ] 159 | 160 | -- | Generate an arbitrary PersistValue for the given SqlType. 161 | genPersistValue :: SqlType -> Gen PersistValue 162 | genPersistValue = \case 163 | SqlString -> PersistText . Text.map cleanText <$> arbitrary 164 | SqlInt32 -> PersistInt64 <$> choose (-2147483648, 2147483647) 165 | SqlInt64 -> PersistInt64 <$> choose (-2147483648, 2147483647) 166 | SqlReal -> PersistDouble . cleanDouble <$> arbitrary 167 | SqlNumeric precision scale -> do 168 | v <- choose (0, 1) :: Gen Double 169 | let v' = truncate (v * (10 ^ precision)) :: Integer 170 | x = fromIntegral v' / (10 ^ scale) 171 | return . PersistRational . toRational . cleanDouble $ x 172 | SqlBool -> PersistBool <$> arbitrary 173 | SqlDay -> PersistDay <$> arbitrary 174 | SqlTime -> PersistTimeOfDay <$> arbitrary 175 | SqlDayTime -> PersistUTCTime <$> arbitrary 176 | SqlBlob -> PersistByteString . ByteString.map cleanText <$> arbitrary 177 | SqlOther _ -> error "SqlOther not supported" 178 | where 179 | cleanDouble :: Double -> Double 180 | cleanDouble x = if isInfinite x || isNaN x then 0 else x 181 | -- https://github.com/lpsmith/postgresql-simple/issues/169 182 | cleanText :: Char -> Char 183 | cleanText '?' = '_' 184 | cleanText c = c 185 | 186 | {- Utilities -} 187 | 188 | newtype DistinctList a = DistinctList { unDistinctList :: [a] } 189 | deriving (Show) 190 | 191 | instance (Arbitrary a, Eq a) => Arbitrary (DistinctList a) where 192 | arbitrary = DistinctList . nub <$> listOf arbitrary 193 | 194 | instance Arbitrary Text where 195 | arbitrary = Text.pack . getASCIIString <$> arbitrary 196 | 197 | instance Arbitrary ByteString where 198 | arbitrary = Text.encodeUtf8 <$> arbitrary 199 | 200 | instance Arbitrary Day where 201 | arbitrary = fromGregorian <$> choose (1, 294276) <*> choose (1, 12) <*> choose (1, 31) 202 | 203 | instance Arbitrary TimeOfDay where 204 | arbitrary = TimeOfDay <$> choose (0, 23) <*> choose (0, 59) <*> genSeconds 205 | where 206 | genSeconds = fromRational . toRational <$> (choose (0, 60) :: Gen Double) 207 | 208 | instance Arbitrary UTCTime where 209 | arbitrary = UTCTime <$> arbitrary <*> (secondsToDiffTime <$> choose (0, 86400)) 210 | 211 | -- | Randomly modify at least one element in the list with the given function. 212 | mapSome :: (a -> a) -> [a] -> Gen [a] 213 | mapSome _ [] = return [] 214 | mapSome f l = do 215 | i <- choose (0, length l - 1) 216 | let (half1, half2) = splitAt i l 217 | modify = mapM $ \x -> do 218 | shouldModify <- arbitrary 219 | return $ if shouldModify then f x else x 220 | half1' <- modify half1 221 | half2' <- modify $ tail half2 222 | return $ half1' ++ [f $ head half2] ++ half2' 223 | 224 | -- | Randomly group the given list. 225 | group :: [a] -> Gen [[a]] 226 | group = shuffle >=> partition [] 227 | where 228 | partition res [] = return res 229 | partition res l = do 230 | i <- choose (1, length l) 231 | let (first, rest) = splitAt i l 232 | partition (first : res) rest 233 | --------------------------------------------------------------------------------