├── .travis.yml ├── Setup.hs ├── test ├── Main.hs ├── Looper.hs ├── Unions.hs ├── Schemas │ ├── OpenApi2Spec.hs │ └── SOPSpec.hs ├── Generators.hs └── SchemasSpec.hs ├── default.nix ├── hie.yaml ├── shell.nix ├── .hlint.yaml ├── nix ├── default.nix ├── sources.json └── sources.nix ├── .gitignore ├── LICENSE ├── .github └── workflows │ └── integration.yaml ├── CHANGELOG.md ├── src ├── Schemas │ ├── Attempt.hs │ ├── SOP.hs │ ├── OpenApi2.hs │ ├── Class.hs │ ├── Untyped.hs │ └── Internal.hs └── Schemas.hs ├── example ├── Person3.hs ├── Person4.hs ├── Person.hs ├── Walkthrough.lhs └── Person2.hs ├── schemas.cabal └── README.md /.travis.yml: -------------------------------------------------------------------------------- 1 | language: nix 2 | -------------------------------------------------------------------------------- /Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /test/Main.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -F -pgmF hspec-discover #-} 2 | 3 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | { ghc ? "ghc883" }: 2 | (import ./nix { inherit ghc; }).schemas 3 | -------------------------------------------------------------------------------- /hie.yaml: -------------------------------------------------------------------------------- 1 | cradle: 2 | cabal: 3 | - path: "src" 4 | component: "lib:schemas" 5 | - path: "test" 6 | component: "schemas:test:spec" 7 | - path: "example" 8 | component: "schemas:test:spec" -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { ghc ? "ghc883" }: 2 | let 3 | nix = import ./nix { inherit ghc; }; 4 | in 5 | 6 | nix.schemas.env.overrideAttrs (old: { 7 | buildInputs = old.buildInputs ++ [nix.haskellPackages.ghcide]; 8 | }) 9 | -------------------------------------------------------------------------------- /.hlint.yaml: -------------------------------------------------------------------------------- 1 | - ignore: 2 | name: "Use fmap" 3 | - ignore: 4 | name: "Use <$>" 5 | - ignore: 6 | name: "Use record patterns" 7 | - ignore: 8 | name: "Unused LANGUAGE pragma" 9 | within: Schemas 10 | - ignore: 11 | name: "Use camelCase" 12 | -------------------------------------------------------------------------------- /nix/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | pkgs0 = (import ./sources.nix).nixpkgs; 3 | in 4 | { ghc 5 | }: 6 | rec { 7 | pkgs = import pkgs0 {}; 8 | haskellPackages = pkgs.haskell.packages.${ghc}; 9 | schemas = haskellPackages.callCabal2nix "schemas" ../. {}; 10 | } 11 | -------------------------------------------------------------------------------- /test/Looper.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | {-# LANGUAGE OverloadedLabels #-} 3 | {-# LANGUAGE OverloadedLists #-} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | module Looper where 6 | 7 | import Data.Generics.Labels () 8 | import GHC.Generics 9 | import Schemas 10 | 11 | data Looper 12 | = Number Int 13 | | Loop Looper 14 | deriving (Eq, Generic, Show) 15 | 16 | instance HasSchema Looper where 17 | schema = named "Looper" $ oneOf 18 | [ altWith schema #_Number 19 | , altWith schema #_Loop 20 | ] 21 | 22 | looper1 = Number 1 23 | looper2 = Loop $ Number 2 24 | -------------------------------------------------------------------------------- /test/Unions.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | {-# LANGUAGE OverloadedLabels #-} 3 | {-# LANGUAGE OverloadedLists #-} 4 | module Unions where 5 | 6 | import Data.Generics.Labels () 7 | import Schemas 8 | import GHC.Generics (Generic) 9 | 10 | data Some a = Some a | None 11 | deriving (Eq, Generic, Show) 12 | 13 | schemaSomeNone, schemaNoneSome :: HasSchema a => TypedSchema (Some a) 14 | schemaSomeNone = oneOf [alt #_Some, alt #_None] 15 | schemaNoneSome = oneOf [alt #_None, alt #_Some] 16 | 17 | 18 | data Three a b = One a | Two b | Three 19 | deriving (Eq, Generic, Show) 20 | 21 | schemaThree, schemaThree' :: TypedSchema a -> TypedSchema b -> TypedSchema (Three a b) 22 | schemaThree a b = oneOf [altWith a #_One, altWith b #_Two, alt #_Three] 23 | schemaThree' a b = oneOf [alt #_Three, altWith a #_One, altWith b #_Two] 24 | -------------------------------------------------------------------------------- /test/Schemas/OpenApi2Spec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedLists #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | 4 | module Schemas.OpenApi2Spec where 5 | 6 | 7 | import qualified Data.HashMap.Strict as Map 8 | import Person 9 | import Person2 10 | import Schemas 11 | import Schemas.OpenApi2 12 | import SchemasSpec 13 | import Test.Hspec 14 | 15 | spec :: Spec 16 | spec = do 17 | let personDocument = toOpenApi2Document defaultOptions [("Person", schemaFor @Person)] 18 | describe "OpenApi2 schema" $ 19 | schemaSpec schema (definitions personDocument Map.! "Person") 20 | describe "toOpenApi2Document" $ do 21 | it "works for Person" $ do 22 | Map.keys (definitions personDocument) `shouldContain` ["Person"] 23 | Map.keys (failures personDocument) `shouldNotContain` ["Person"] 24 | it "works for Person2" $ do 25 | let document = toOpenApi2Document defaultOptions [("Person2", schemaFor @Person2)] 26 | Map.keys (definitions document) `shouldContain` ["Person2"] 27 | Map.keys (failures document) `shouldNotContain` ["Person2"] 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/emacs,haskell 3 | # Edit at https://www.gitignore.io/?templates=emacs,haskell 4 | 5 | ### Emacs ### 6 | # -*- mode: gitignore; -*- 7 | *~ 8 | \#*\# 9 | /.emacs.desktop 10 | /.emacs.desktop.lock 11 | *.elc 12 | auto-save-list 13 | tramp 14 | .\#* 15 | 16 | # Org-mode 17 | .org-id-locations 18 | *_archive 19 | 20 | # flymake-mode 21 | *_flymake.* 22 | 23 | # eshell files 24 | /eshell/history 25 | /eshell/lastdir 26 | 27 | # elpa packages 28 | /elpa/ 29 | 30 | # reftex files 31 | *.rel 32 | 33 | # AUCTeX auto folder 34 | /auto/ 35 | 36 | # cask packages 37 | .cask/ 38 | dist/ 39 | 40 | # Flycheck 41 | flycheck_*.el 42 | 43 | # server auth directory 44 | /server/ 45 | 46 | # projectiles files 47 | .projectile 48 | 49 | # directory configuration 50 | .dir-locals.el 51 | 52 | # network security 53 | /network-security.data 54 | 55 | 56 | ### Haskell ### 57 | dist 58 | dist-* 59 | cabal-dev 60 | *.o 61 | *.hi 62 | *.chi 63 | *.chs.h 64 | *.dyn_o 65 | *.dyn_hi 66 | .hpc 67 | .hsenv 68 | .cabal-sandbox/ 69 | cabal.sandbox.config 70 | *.prof 71 | *.aux 72 | *.hp 73 | *.eventlog 74 | .stack-work/ 75 | cabal.project.local 76 | cabal.project.local~ 77 | .HTF/ 78 | .ghc.environment.* 79 | 80 | # End of https://www.gitignore.io/api/emacs,haskell 81 | -------------------------------------------------------------------------------- /nix/sources.json: -------------------------------------------------------------------------------- 1 | { 2 | "niv": { 3 | "branch": "master", 4 | "description": "Easy dependency management for Nix projects", 5 | "homepage": "https://github.com/nmattia/niv", 6 | "owner": "nmattia", 7 | "repo": "niv", 8 | "rev": "f73bf8d584148677b01859677a63191c31911eae", 9 | "sha256": "0jlmrx633jvqrqlyhlzpvdrnim128gc81q5psz2lpp2af8p8q9qs", 10 | "type": "tarball", 11 | "url": "https://github.com/nmattia/niv/archive/f73bf8d584148677b01859677a63191c31911eae.tar.gz", 12 | "url_template": "https://github.com///archive/.tar.gz" 13 | }, 14 | "nixpkgs": { 15 | "branch": "nixpkgs-unstable", 16 | "description": "A read-only mirror of NixOS/nixpkgs tracking the released channels. Send issues and PRs to", 17 | "homepage": "https://github.com/NixOS/nixpkgs", 18 | "owner": "NixOS", 19 | "repo": "nixpkgs-channels", 20 | "rev": "7db146538e49ad4bee4b5c4fea073c38586df7e2", 21 | "sha256": "06vhwys3rpj6grxn76n1sj14wf4hn9z8bmd2k1yhcy29cqri0xhk", 22 | "type": "tarball", 23 | "url": "https://github.com/NixOS/nixpkgs-channels/archive/7db146538e49ad4bee4b5c4fea073c38586df7e2.tar.gz", 24 | "url_template": "https://github.com///archive/.tar.gz" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019, Pepe Iborra 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Pepe Iborra nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /.github/workflows/integration.yaml: -------------------------------------------------------------------------------- 1 | name: Haskell Integration 2 | 3 | on: 4 | push: {} 5 | 6 | pull_request: 7 | branches: 8 | - master 9 | 10 | schedule: 11 | - cron: "0 8 * * *" 12 | 13 | jobs: 14 | BuildAndTest: 15 | runs-on: ubuntu-latest 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | ghc: 20 | - "8.10.2" 21 | - "8.10.1" 22 | - "8.8.4" 23 | - "8.8.3" 24 | steps: 25 | - uses: actions/checkout@v2 26 | 27 | - uses: haskell/actions/setup@v1.1.5 28 | with: 29 | ghc-version: ${{ matrix.ghc }} 30 | 31 | - uses: actions/cache@v1 32 | with: 33 | path: ~/.cabal 34 | key: cabal-root-${{ matrix.ghc }} 35 | 36 | - name: Update Hackage snapshot 37 | run: cabal update 38 | 39 | - name: Configure library 40 | run: cabal configure --enable-documentation --enable-tests --disable-optimization 41 | 42 | - name: Build library 43 | run: cabal build 44 | 45 | - name: Run tests 46 | run: cabal test 47 | 48 | HLint: 49 | runs-on: ubuntu-latest 50 | steps: 51 | - uses: actions/checkout@v2 52 | 53 | - uses: haskell/actions/setup@v1.1.5 54 | 55 | - uses: actions/cache@v1 56 | with: 57 | path: ~/.cabal 58 | key: cabal-root-latest 59 | 60 | - name: Update Hackage snapshot 61 | run: cabal update 62 | 63 | - name: Install HLint 64 | run: cabal install --installdir=. hlint --constraint 'hlint >= 3' 65 | 66 | - name: Run HLint 67 | run: ./hlint src example test 68 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Revision history for schemas 2 | 3 | ## Unreleased 4 | * Export `FieldEncode` class members from `Schemas.SOP`. 5 | 6 | ## 0.4.0.2 -- 2020-04-28 7 | * Fix bug in `instance HasSchema Either` 8 | 9 | ## 0.4.0.1 -- 2020-01-01 10 | * Replace breadth-first search with a greedy depth-first search 11 | 12 | ## 0.4.0 -- 2019-12-20 13 | * Handle non-termination as an effect in decoding 14 | * Union api made more consistent across tagged and non tagged 15 | * (internal) More principled TypedSchema ADT based on Sum Profunctors 16 | * Total version of 'liftPrism' 17 | * 'theSchema' renamed to 'schemaFor' 18 | * Bugfixes for decoding 19 | 20 | ## 0.3.0.2 -- 2019-10-29 21 | * Show circular schemas 22 | 23 | ## 0.3.0.1 -- 2019-10-25 24 | * Fix a bug that made OpenApi2 generation diverge. 25 | 26 | ## 0.3.0 -- 2019-10-23 27 | * Fixed a bug in isSubtypeOf for unions 28 | * Fixed exponential performance (#3) 29 | * Changed the representation of untyped schemas to remove Alternatives 30 | Alternatives are only possible on typed schemas 31 | * Added support for recursive schemas (#1) 32 | 33 | ## 0.2.0.3 -- 2019-10-13 34 | * Bug fixes and performance improvements 35 | 36 | ## 0.2.0.2 -- 2019-10-07 37 | * Change the default schema for `Either` to handle both CamelCase and lowercase 38 | 39 | ## 0.2.0.1 -- 2019-10-02 40 | * Fixed subtyping relation for arrays 41 | 42 | ## 0.2.0 -- 2019-09-29 43 | * Add OpenApi2 encoding 44 | * Change the `Semigroup` instance for typed schemas 45 | * Trimmed down dependencies slightly 46 | 47 | ## 0.1.1.0 -- 2019-09-28 48 | * Fixed several bugs in `isSubtypeOf` and `encodeWith` 49 | * Better error messages when encoding with a partial schema fails 50 | * New: 'liftPrism' and 'oneOf' 51 | 52 | ## 0.1.0.0 -- 2019-09-25 53 | 54 | * First version. Released on an unsuspecting world. 55 | -------------------------------------------------------------------------------- /test/Schemas/SOPSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts #-} 2 | {-# LANGUAGE ScopedTypeVariables #-} 3 | module Schemas.SOPSpec where 4 | 5 | import Control.Exception 6 | import Data.Either 7 | import qualified Data.List.NonEmpty as NE 8 | import Person 9 | import Person2 10 | import Person4 11 | import Schemas 12 | import Schemas.SOP 13 | import SchemasSpec 14 | import Test.Hspec 15 | 16 | spec :: Spec 17 | spec = do 18 | describe "Generics" $ do 19 | describe "Person" $ 20 | specExample pepe 21 | describe "Person2" $ 22 | specExample pepe2 23 | -- Equality across recursive schemas (Person3) is not yet supported 24 | -- describe "Person3" $ 25 | -- specExample pepe3 26 | describe "Person4" $ 27 | specExample pepe4 28 | 29 | specExample :: forall a . (HasGenericSchema a, HasSchema a, Eq a, Show a) => a -> Spec 30 | specExample ex = do 31 | let genSchemas = extractSchema genSchemaTyped 32 | genSchema = NE.head genSchemas 33 | genSchemaTyped = gSchema defOptions 34 | 35 | it "generic schemas are unitary" $ 36 | length genSchemas `shouldBe` 1 37 | -- it "generic schema is included in handcrafted one" $ 38 | -- NE.toList (extractSchema (schema @a)) `shouldContain` NE.toList genSchemas 39 | it "can encode to generic schema" $ do 40 | let encoder = encodeTo genSchema 41 | encoded = encodeWith genSchemaTyped ex 42 | encodedTyped = attemptSuccessOrError encoder ex 43 | shouldNotDiverge $ evaluate encoder 44 | encoder `shouldSatisfy` isRight 45 | shouldNotDiverge $ evaluate encoded 46 | shouldNotDiverge $ evaluate encodedTyped 47 | encodedTyped `shouldBe` encoded 48 | it "can decode from generic schema" $ do 49 | let decoder = decodeFrom genSchema 50 | encoded = encode ex 51 | decoded = getSuccessOrError decoder encoded 52 | decodedG = decodeWith genSchemaTyped encoded 53 | shouldNotDiverge $ evaluate decoder 54 | shouldNotDiverge $ evaluate encoded 55 | shouldNotDiverge $ evaluate decoded 56 | shouldNotDiverge $ evaluate decodedG 57 | decoder `shouldSatisfy` isSuccess 58 | decodedG `shouldBe` decoded 59 | -------------------------------------------------------------------------------- /src/Schemas/Attempt.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveTraversable #-} 2 | {-# LANGUAGE FlexibleInstances #-} 3 | {-# LANGUAGE MultiParamTypeClasses #-} 4 | 5 | module Schemas.Attempt where 6 | 7 | import Control.Applicative 8 | import Data.Functor.Classes 9 | import Control.Monad.Except 10 | import Data.Maybe 11 | 12 | -- | An applicative error type 13 | data Attempt e a 14 | = Success a 15 | | Failure e 16 | deriving (Eq, Functor, Foldable, Traversable, Show) 17 | 18 | instance Eq e => Eq1 (Attempt e) where 19 | liftEq _ (Failure e) (Failure e') = e == e' 20 | liftEq eq0 (Success a) (Success a') = eq0 a a' 21 | liftEq _ _ _ = False 22 | 23 | instance Show e => Show1 (Attempt e) where 24 | liftShowsPrec _ _ p (Failure e) = showsPrec p e 25 | liftShowsPrec shows0 _ _ (Success a) = shows "Success " . shows0 0 a 26 | 27 | instance Monoid e => Applicative (Attempt e) where 28 | pure = Success 29 | Success f <*> Success a = Success (f a) 30 | Failure e <*> Failure e' = Failure (e <> e') 31 | Failure e <*> _ = Failure e 32 | _ <*> Failure e = Failure e 33 | 34 | instance Monoid e => Alternative (Attempt e) where 35 | empty = Failure mempty 36 | Success a <|> _ = Success a 37 | _ <|> Success b = Success b 38 | Failure e <|> Failure e' = Failure (e <> e') 39 | 40 | instance Monoid e => Monad (Attempt e) where 41 | return = pure 42 | Success a >>= k = k a 43 | Failure e >>= _ = Failure e 44 | 45 | instance Monoid e => MonadPlus (Attempt e) 46 | 47 | instance Monoid e => MonadError e (Attempt e) where 48 | throwError = Failure 49 | catchError (Failure e) h = h e 50 | catchError (Success a) _ = Success a 51 | 52 | bindAttempt :: Attempt e a -> (a -> Attempt e b) -> Attempt e b 53 | bindAttempt (Success a) k = k a 54 | bindAttempt (Failure e) _ = Failure e 55 | 56 | runAttempt :: Attempt e a -> Either e a 57 | runAttempt = execAttempt 58 | 59 | execAttempt :: MonadError e f => Attempt e a -> f a 60 | execAttempt (Success x) = pure x 61 | execAttempt (Failure e) = throwError e 62 | 63 | -- | Partitions a result successes and failures 64 | partitionAttempts :: [Attempt e a] -> ([e], [a]) 65 | partitionAttempts xx = (mapMaybe attemptFailure xx, mapMaybe attemptSuccess xx) 66 | 67 | attemptFailure :: Attempt a1 a2 -> Maybe a1 68 | attemptFailure Success{} = Nothing 69 | attemptFailure (Failure e) = Just e 70 | 71 | attemptSuccess :: Attempt e a -> Maybe a 72 | attemptSuccess (Success a) = Just a 73 | attemptSuccess Failure{} = Nothing 74 | 75 | isSuccess :: Attempt e a -> Bool 76 | isSuccess Success{} = True 77 | isSuccess _ = False 78 | 79 | isFailure :: Attempt e a -> Bool 80 | isFailure Failure{} = True 81 | isFailure _ = False 82 | -------------------------------------------------------------------------------- /test/Generators.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DerivingStrategies #-} 2 | {-# LANGUAGE FlexibleInstances #-} 3 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 4 | {-# LANGUAGE ImpredicativeTypes #-} 5 | {-# LANGUAGE OverloadedLists #-} 6 | {-# LANGUAGE OverloadedStrings #-} 7 | {-# LANGUAGE TupleSections #-} 8 | {-# OPTIONS_GHC -Wno-orphans #-} 9 | 10 | module Generators where 11 | 12 | import Control.Lens (review) 13 | import Control.Monad 14 | import Data.Text (Text) 15 | import GHC.Exts (IsList (..)) 16 | import Numeric.Natural 17 | import Schemas 18 | import Test.QuickCheck 19 | 20 | hasOneOf :: Schema -> Bool 21 | hasOneOf (Array sc) = hasOneOf sc 22 | hasOneOf (StringMap sc) = hasOneOf sc 23 | hasOneOf (Record ff) = any (hasOneOf . fieldSchema) ff 24 | hasOneOf (OneOf _) = True 25 | hasOneOf _ = False 26 | 27 | instance Arbitrary Schema where 28 | arbitrary = sized genSchema 29 | shrink (Record [(_, Field sc True)]) = [sc] 30 | shrink (Record fields) = 31 | [ Record [(n,Field sc' req)] 32 | | (n,Field sc req) <- toList fields, sc' <- shrink sc] 33 | shrink (OneOf [sc]) = [sc] 34 | shrink (OneOf scc) = concat 35 | [[OneOf [sc'], sc'] | sc <- toList scc, sc' <- shrink sc] 36 | shrink (Array sc) = [sc] 37 | shrink (StringMap sc) = [sc] 38 | shrink (Enum xx) = Enum . pure <$> toList xx 39 | shrink _ = [] 40 | 41 | newtype SmallNatural = SmallNatural Natural 42 | deriving (Eq, Ord, Num) 43 | deriving newtype Show 44 | 45 | instance Arbitrary SmallNatural where 46 | arbitrary = fromIntegral <$> choose (0::Int, 10) 47 | shrink 0 = [] 48 | shrink n = [n-1] 49 | 50 | fieldNames :: [Text] 51 | fieldNames = ["field1", "field2", "field3"] 52 | 53 | constructorNames :: [Text] 54 | constructorNames = ["constructor1", "constructor2"] 55 | 56 | genSchema :: Int -> Gen Schema 57 | genSchema 0 = elements [Unit, Prim "A", Prim "B"] 58 | genSchema n = frequency 59 | [ (10,) $ Record <$> do 60 | nfields <- choose (1,2) 61 | fieldArgs <- replicateM nfields (scale (`div` succ nfields) arbitrary) 62 | return $ fromList (zipWith (\n (sc,a) -> (n, Field sc a)) fieldNames fieldArgs) 63 | , (10,) $ Array <$> scale(`div` 4) arbitrary 64 | , (10,) $ Enum <$> do 65 | n <- choose (1,2) 66 | return $ fromList $ take n ["Enum1", "Enum2"] 67 | , (1,) $ OneOf . fromList <$> listOf1 (genSchema (n`div`10)) 68 | , (5,) $ review _Union <$> do 69 | nconstructors <- choose (1,2) 70 | args <- replicateM nconstructors (genSchema (n`div` succ nconstructors)) 71 | return $ fromList $ zip constructorNames args 72 | , (50,) $ genSchema 0 73 | ] 74 | -------------------------------------------------------------------------------- /src/Schemas.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PatternSynonyms #-} 2 | 3 | -- | As a simple example of a schema, let's consider a simple record type: 4 | -- 5 | -- @ 6 | -- import Schemas 7 | -- import Schemas.SOP 8 | -- 9 | -- data Person = Person 10 | -- { name :: String 11 | -- , age :: Int 12 | -- , addresses :: [String] 13 | -- } 14 | -- 15 | -- personSchema :: TypedSchema Person 16 | -- personSchema = record $ Person 17 | -- \<$\> fieldWith string "name" name 18 | -- \<*\> fieldWith int "age" age 19 | -- \<*\> fieldWith (list string) "addresses" addresses 20 | -- @ 21 | -- 22 | -- Or, by relying on the @HasSchema@ type class: 23 | -- 24 | -- @ 25 | -- personSchema :: TypedSchema Person 26 | -- personSchema = record $ Person 27 | -- \<$\> field "name" name 28 | -- \<*\> field "age" age 29 | -- \<*\> field "addresses" addresses 30 | -- @ 31 | -- 32 | -- Or, if the type is SOP generic: 33 | -- 34 | -- @ 35 | -- personSchema = gSchema defOptions 36 | -- @ 37 | 38 | module Schemas 39 | ( 40 | -- * Typed schemas 41 | TypedSchemaFlex 42 | , TypedSchema 43 | , HasSchema(..) 44 | -- ** Construction 45 | , emptySchema 46 | , pureSchema 47 | , enum 48 | , readShow 49 | , list 50 | , string 51 | , vector 52 | , Key(..) 53 | , stringMap 54 | , viaJSON 55 | , viaIso 56 | , named 57 | -- *** Applicative record definition 58 | , record 59 | , RecordFields 60 | , RecordField 61 | , field 62 | , fieldWith 63 | , fieldWith' 64 | , optField 65 | , optFieldWith 66 | , optFieldEither 67 | , optFieldEitherWith 68 | , optFieldGeneral 69 | , fieldName 70 | , fieldNameL 71 | , overFieldNames 72 | , extractFields 73 | -- *** Partial schemas 74 | , liftJust 75 | , liftRight 76 | -- *** Discriminated Unions 77 | , union 78 | , alt 79 | , altWith 80 | , UnionAlt 81 | -- *** Undiscriminated unions 82 | , Typed.oneOf 83 | , eitherSchema 84 | , liftPrism 85 | -- ** Encoding 86 | , encode 87 | , decode 88 | , encodeTo 89 | , decodeFrom 90 | , encodeWith 91 | , decodeWith 92 | , encodeToWith 93 | , decodeFromWith 94 | 95 | -- ** Results 96 | , Result 97 | , runResult 98 | 99 | -- * Untyped schemas 100 | , Schema(.., Unit, Union) 101 | , Field(..) 102 | , _Unit 103 | , _Union 104 | -- ** Extraction 105 | , extractSchema 106 | , schemaFor 107 | -- ** Functions 108 | , Mismatch(..) 109 | , Trace 110 | , TracedMismatches 111 | , isSubtypeOf 112 | , coerce 113 | -- * Validation 114 | , validate 115 | , extractValidators 116 | , validatorsFor 117 | -- * Reexports 118 | , Profunctor(..) 119 | ) 120 | where 121 | 122 | import Data.Profunctor 123 | import Schemas.Class 124 | import Schemas.Internal as Typed 125 | import Schemas.Untyped 126 | -------------------------------------------------------------------------------- /example/Person3.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | {-# LANGUAGE DerivingStrategies #-} 4 | {-# LANGUAGE DuplicateRecordFields #-} 5 | {-# LANGUAGE FlexibleInstances #-} 6 | {-# LANGUAGE OverloadedLists #-} 7 | {-# LANGUAGE OverloadedStrings #-} 8 | 9 | module Person3 where 10 | 11 | import Control.Applicative 12 | import Data.Generics.Labels () 13 | import GHC.Generics 14 | import qualified Generics.SOP as SOP 15 | import Person 16 | import Person2 17 | import Schemas 18 | 19 | -- | v3 adds recursive field 'spouse', which leads to cycles 20 | data Person3 = Person3 21 | { name :: String 22 | , age :: Int 23 | , addresses :: [String] 24 | , spouse :: Maybe Person3 -- new recursive field 25 | , religion :: Maybe Religion 26 | , education :: [Education] 27 | } 28 | deriving (Generic, Eq, Show) 29 | deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo) 30 | 31 | instance HasSchema Person3 where 32 | schema = named "Person3" 33 | $ record 34 | $ Person3 <$> field "name" Person3.name 35 | <*> field "age" Person3.age 36 | <*> field "addresses" Person3.addresses 37 | <*> optField "spouse" Person3.spouse 38 | <*> optField "religion" Person3.religion 39 | <*> (field "studies" Person3.education <|> field "education" Person3.education) 40 | 41 | laura3, pepe3 :: Person3 42 | 43 | -- pepe3 has a cycle with laura3 44 | pepe3 = Person3 45 | "Pepe" 46 | 38 47 | ["2 Edward Square", "La Mar 10"] 48 | (Just laura3) 49 | Nothing 50 | [PhD "Computer Science", Degree "Engineering"] 51 | 52 | -- laura3 has a cycle with pepe3 53 | laura3 = pepe3 { name = "Laura" 54 | , spouse = Just pepe3 55 | , education = [Degree "English"] 56 | , addresses = ["2 Edward Square"] 57 | , religion = Just Catholic 58 | } 59 | 60 | martin :: Person3 61 | martin = Person3 "Martin" 10 [] Nothing Nothing [] 62 | 63 | 64 | -- >>> import qualified Data.ByteString.Lazy.Char8 as B 65 | -- >>> import Data.Aeson.Encode.Pretty 66 | -- >>> B.putStrLn $ encodePretty $ finiteEncode 4 laura3 67 | -- { 68 | -- "L": { 69 | -- "spouse": { 70 | -- "L": {} 71 | -- }, 72 | -- "religion": "Catholic", 73 | -- "addresses": [ 74 | -- "2 Edward Square" 75 | -- ], 76 | -- "age": 38, 77 | -- "studies": { 78 | -- "Degree": "English" 79 | -- }, 80 | -- "name": "Laura" 81 | -- } 82 | -- } 83 | 84 | -- Unpacking infinite data is not supported currently 85 | -- >>> decode @Person3 (finiteEncode 4 pepe3) 86 | -- Left (MissingRecordField {name = "name", context = ["spouse"]}) 87 | -------------------------------------------------------------------------------- /example/Person4.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | {-# LANGUAGE DerivingStrategies #-} 4 | {-# LANGUAGE DuplicateRecordFields #-} 5 | {-# LANGUAGE FlexibleInstances #-} 6 | {-# LANGUAGE OverloadedLists #-} 7 | {-# LANGUAGE OverloadedStrings #-} 8 | 9 | module Person4 where 10 | 11 | import Control.Applicative 12 | import Data.Generics.Labels () 13 | import GHC.Generics 14 | import qualified Generics.SOP as SOP 15 | import Person 16 | import Person2 17 | import Schemas 18 | 19 | -- | v4 adds new fields 20 | data Person4 = Person4 21 | { name :: String 22 | , age :: Int 23 | , addresses :: [String] 24 | , religion :: Maybe Religion 25 | , education :: [Education] 26 | , a1,a2,a3,a4,a5 ,a6,a7,a8,a9,a10 27 | , b1,b2,b3,b4,b5,b6,b7,b8,b9,b10 28 | , c1,c2,c3,c4,c5,c6,c7,c8,c9,c10 29 | , d1,d2,d3,d4,d5,d6,d7,d8,d9,d10 30 | :: Bool 31 | } 32 | deriving (Generic, Eq, Show) 33 | deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo) 34 | 35 | instance HasSchema Person4 where 36 | schema = 37 | record 38 | $ Person4 39 | <$> field "name" Person4.name 40 | <*> field "age" Person4.age 41 | <*> field "addresses" Person4.addresses 42 | <*> optField "religion" Person4.religion 43 | <*> ( field "education" Person4.education 44 | <|> field "studies" Person4.education 45 | ) 46 | <*> grab "a1" a1 47 | <*> grab "a2" a2 48 | <*> grab "a3" a3 49 | <*> grab "a4" a4 50 | <*> grab "a5" a5 51 | <*> grab "a6" a6 52 | <*> grab "a7" a7 53 | <*> grab "a8" a8 54 | <*> grab "a9" a9 55 | <*> grab "a10" a10 56 | <*> grab "b1" b1 57 | <*> grab "b2" b2 58 | <*> grab "b3" b3 59 | <*> grab "b4" b4 60 | <*> grab "b5" b5 61 | <*> grab "b6" b6 62 | <*> grab "b7" b7 63 | <*> grab "b8" b8 64 | <*> grab "b9" b9 65 | <*> grab "b10" b10 66 | <*> grab "c1" c1 67 | <*> grab "c2" c2 68 | <*> grab "c3" c3 69 | <*> grab "c4" c4 70 | <*> grab "c5" c5 71 | <*> grab "c6" c6 72 | <*> grab "c7" c7 73 | <*> grab "c8" c8 74 | <*> grab "c9" c9 75 | <*> grab "c10" c10 76 | <*> grab "d1" d1 77 | <*> grab "d2" d2 78 | <*> grab "d3" d3 79 | <*> grab "d4" d4 80 | <*> grab "d5" d5 81 | <*> grab "d6" d6 82 | <*> grab "d7" d7 83 | <*> grab "d8" d8 84 | <*> grab "d9" d9 85 | <*> grab "d10" d10 86 | where grab n get = field n get <|> pure False 87 | 88 | pepe4 :: Person4 89 | pepe4 = Person4 90 | "Pepe" 91 | 38 92 | ["2 Edward Square", "La Mar 10"] 93 | Nothing 94 | [PhD "Computer Science", Degree "Engineering"] 95 | False 96 | False 97 | False 98 | False 99 | False 100 | False 101 | False 102 | False 103 | False 104 | False 105 | False 106 | False 107 | False 108 | False 109 | False 110 | False 111 | False 112 | False 113 | False 114 | False 115 | False 116 | False 117 | False 118 | False 119 | False 120 | False 121 | False 122 | False 123 | False 124 | False 125 | False 126 | False 127 | False 128 | False 129 | False 130 | False 131 | False 132 | False 133 | False 134 | False 135 | -------------------------------------------------------------------------------- /example/Person.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | {-# LANGUAGE DerivingStrategies #-} 4 | {-# LANGUAGE DuplicateRecordFields #-} 5 | {-# LANGUAGE FlexibleInstances #-} 6 | {-# LANGUAGE OverloadedLabels #-} 7 | {-# LANGUAGE OverloadedLists #-} 8 | {-# LANGUAGE OverloadedStrings #-} 9 | 10 | module Person where 11 | 12 | import Data.Generics.Labels () 13 | import GHC.Generics 14 | import qualified Generics.SOP as SOP 15 | import Schemas 16 | import Schemas.SOP 17 | 18 | data Education = NoEducation | Degree {unDegree :: String} | PhD {unPhD :: String} 19 | deriving (Generic, Eq, Show) 20 | 21 | instance HasSchema Education where 22 | schema = union 23 | [("NoEducation", alt #_NoEducation) 24 | ,("PhD", alt #_PhD) 25 | ,("Degree", alt #_Degree) 26 | ] 27 | 28 | data Person = Person 29 | { name :: String 30 | , age :: Int 31 | , addresses :: [String] 32 | , studies :: Education 33 | } 34 | deriving (Generic, Eq, Show) 35 | deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo) 36 | 37 | instance HasSchema Person where 38 | schema = gSchema defOptions 39 | 40 | pepe :: Person 41 | pepe = Person 42 | "Pepe" 43 | 38 44 | ["2 Edward Square", "La Mar 10"] 45 | (PhD "Computer Science") 46 | 47 | -- >>> import Data.Aeson.Encode.Pretty 48 | -- >>> import qualified Data.ByteString.Lazy.Char8 as B 49 | -- >>> B.putStrLn $ encodePretty $ encode pepe 50 | -- { 51 | -- "addresses": [ 52 | -- "2 Edward Square", 53 | -- "La Mar 10" 54 | -- ], 55 | -- "age": 38, 56 | -- "studies": { 57 | -- "PhD": "Computer Science" 58 | -- }, 59 | -- "name": "Pepe" 60 | -- } 61 | 62 | -- >>> B.putStrLn $ encodePretty $ encode (schemaFor @Person) 63 | -- { 64 | -- "Record": { 65 | -- "addresses": { 66 | -- "schema": { 67 | -- "Array": { 68 | -- "Prim": "String" 69 | -- } 70 | -- } 71 | -- }, 72 | -- "age": { 73 | -- "schema": { 74 | -- "Prim": "Integer" 75 | -- } 76 | -- }, 77 | -- "studies": { 78 | -- "schema": { 79 | -- "Union": [ 80 | -- { 81 | -- "schema": { 82 | -- "Prim": "String" 83 | -- }, 84 | -- "constructor": "Degree" 85 | -- }, 86 | -- { 87 | -- "schema": { 88 | -- "Prim": "String" 89 | -- }, 90 | -- "constructor": "PhD" 91 | -- }, 92 | -- { 93 | -- "schema": { 94 | -- "Record": {} 95 | -- }, 96 | -- "constructor": "NoEducation" 97 | -- } 98 | -- ] 99 | -- } 100 | -- }, 101 | -- "name": { 102 | -- "schema": { 103 | -- "Prim": "String" 104 | -- } 105 | -- } 106 | -- } 107 | -- } 108 | 109 | -- >>> import Text.Pretty.Simple 110 | -- >>> pPrintNoColor $ decode @Person $ encode pepe 111 | -- Right 112 | -- ( Person 113 | -- { name = "Pepe" 114 | -- , age = 38 115 | -- , addresses = 116 | -- [ "2 Edward Square" 117 | -- , "La Mar 10" 118 | -- ] 119 | -- , studies = 120 | -- ( PhD { unPhD = "Computer Science" } ) 121 | -- } 122 | -- ) 123 | -------------------------------------------------------------------------------- /schemas.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 2.0 2 | name: schemas 3 | version: 0.4.0.2 4 | synopsis: schema guided serialization 5 | description: 6 | Schemas is a Haskell library for serializing and deserializing data in JSON. 7 | With schemas one does not define parsing and encoding functions, instead one 8 | defines a schema that explains the "shape" of the data, and the library provides 9 | the encode and decode functions. Shape descriptions are statically typed. 10 | . 11 | Schemas are related by a subtyping relation, which can be used to implement 12 | a simple form of schema versioning. As long as one knows the source and target 13 | schemas, and the source is a subtype of the target, source values can be encoded 14 | in the target schema. 15 | . 16 | The library also supports @oneOf@ schemas, which extend the 17 | range of versioning changes that can be supported automatically without 18 | resorting to explicit versions and conversion functions. 19 | . 20 | A type class @HasSchema@ is provided for convenience, but none of the 21 | core functions in the library rely on type classes. 22 | . 23 | Schemas can be derived generically using @generics-sop@, although most of the 24 | time it makes more sense to define the schemas explicitly to ensure well-behaved 25 | versioning. 26 | 27 | -- bug-reports: 28 | license: BSD3 29 | license-file: LICENSE 30 | author: Pepe Iborra 31 | maintainer: pepeiborra@gmail.com 32 | -- copyright: 33 | category: Data 34 | build-type: Simple 35 | extra-source-files: CHANGELOG.md, README.md 36 | homepage: https://github.com/pepeiborra/schemas 37 | bug-reports: https://github.com/pepeiborra/schemas/issues 38 | 39 | source-repository head 40 | type: git 41 | location: https://github.com/pepeiborra/schemas.git 42 | 43 | library 44 | exposed-modules: 45 | Schemas 46 | Schemas.Class 47 | Schemas.Internal 48 | Schemas.OpenApi2 49 | Schemas.Attempt 50 | Schemas.SOP 51 | Schemas.Untyped 52 | -- other-modules: 53 | -- other-extensions: 54 | -- upper bounds on base are mandatory 55 | build-depends: base >= 4.12 && < 100 56 | , aeson 57 | , bifunctors 58 | , bytestring 59 | , free 60 | , generics-sop >= 0.5.0.0 61 | , hashable 62 | , lens 63 | , lens-aeson 64 | , mtl 65 | , profunctors 66 | , scientific 67 | , text 68 | , transformers 69 | , unordered-containers 70 | , vector 71 | hs-source-dirs: src 72 | default-language: Haskell2010 73 | default-extensions: TypeApplications, OverloadedStrings, LambdaCase 74 | 75 | test-suite spec 76 | default-language: Haskell2010 77 | default-extensions: TypeApplications 78 | type: exitcode-stdio-1.0 79 | hs-source-dirs: example, test 80 | main-is: Main.hs 81 | ghc-options: -threaded -with-rtsopts=-M1G 82 | other-modules: Person 83 | , Person2 84 | , Person3 85 | , Person4 86 | , Walkthrough 87 | , Looper 88 | , Unions 89 | , SchemasSpec 90 | , Schemas.OpenApi2Spec 91 | , Schemas.SOPSpec 92 | , Generators 93 | build-depends: aeson 94 | , aeson-pretty 95 | , base 96 | , bytestring 97 | , generic-lens 98 | , generics-sop 99 | , hspec 100 | , lens 101 | , mtl 102 | , pretty-simple 103 | , QuickCheck 104 | , schemas 105 | , syb 106 | , text 107 | , transformers 108 | , unordered-containers 109 | build-tool-depends: hspec-discover:hspec-discover 110 | -------------------------------------------------------------------------------- /src/Schemas/SOP.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ConstraintKinds #-} 2 | {-# LANGUAGE DataKinds #-} 3 | {-# LANGUAGE FlexibleContexts #-} 4 | {-# LANGUAGE FlexibleInstances #-} 5 | {-# LANGUAGE PolyKinds #-} 6 | {-# LANGUAGE ScopedTypeVariables #-} 7 | {-# LANGUAGE TypeFamilies #-} 8 | {-# LANGUAGE TypeOperators #-} 9 | {-# LANGUAGE UndecidableInstances #-} 10 | module Schemas.SOP 11 | ( gSchema 12 | , HasGenericSchema 13 | , gRecordFields 14 | , Options(..) 15 | , defOptions 16 | , FieldEncode(..) 17 | ) 18 | where 19 | 20 | import Control.Lens (prism') 21 | import qualified Data.List.NonEmpty as NE 22 | import Data.Profunctor 23 | import Data.Text (Text, pack) 24 | import Generics.SOP as SOP 25 | import Schemas.Class 26 | import Schemas.Internal 27 | 28 | type HasGenericSchema a = (HasDatatypeInfo a, All2 FieldEncode (Code a)) 29 | 30 | data Options = Options 31 | { fieldLabelModifier :: String -> String 32 | , constructorTagModifier :: String -> String 33 | } 34 | 35 | defOptions :: Options 36 | defOptions = Options id id 37 | 38 | fieldSchemaC :: Proxy FieldEncode 39 | fieldSchemaC = Proxy 40 | 41 | gSchema :: forall a. HasGenericSchema a => Options -> TypedSchema a 42 | gSchema opts = case datatypeInfo (Proxy @a) of 43 | (Newtype _ _ ci ) -> dimap (unZ . unSOP . from) (to . SOP . Z) $ gSchemaNP opts ci 44 | (ADT _ _ (ci :* Nil) _) -> dimap (unZ . unSOP . from) (to . SOP . Z) $ gSchemaNP opts ci 45 | (ADT _ _ cis _) -> dimap (unSOP . from) (to . SOP) $ gSchemaNS opts cis 46 | 47 | gRecordFields :: forall a xs. (HasDatatypeInfo a, All FieldEncode xs, Code a ~ '[xs]) => Options -> RecordFields a a 48 | gRecordFields opts = case datatypeInfo (Proxy @a) of 49 | (Newtype _ _ ci ) -> dimap (unZ . unSOP . from) (to . SOP . Z) $ gRecordFields' opts ci 50 | (ADT _ _ (ci :* Nil) _) -> dimap (unZ . unSOP . from) (to . SOP . Z) $ gRecordFields' opts ci 51 | 52 | 53 | gSchemaNS :: forall xss . All2 FieldEncode xss => Options -> NP ConstructorInfo xss -> TypedSchema (NS (NP I) xss) 54 | gSchemaNS opts ci = 55 | case mkAlts ci of 56 | [] -> error "empty union" 57 | other -> union $ NE.fromList other 58 | where 59 | mkAlts = hcollapse . hczipWith3 (Proxy :: Proxy (All FieldEncode)) mk (injections @_ @(NP I)) (ejections @_ @(NP I)) 60 | mk 61 | :: forall (xs :: [*]) 62 | . All FieldEncode xs 63 | => Injection (NP I) xss xs 64 | -> Ejection (NP I) xss xs 65 | -> ConstructorInfo xs 66 | -> K (Text, UnionAlt (NS (NP I) xss)) xs 67 | mk (Fn inject) (Fn eject) ci = K (cons, altWith sc (prism' (unK . inject) (unComp . eject . K))) where 68 | -- sc = dimap (unComp . eject . K) (unK . inject . fromJust) gSchemaNP opts ci) 69 | sc = gSchemaNP opts ci 70 | cons = pack (constructorTagModifier opts (constructorName ci)) 71 | 72 | gSchemaNP 73 | :: forall (xs :: [*]) 74 | . (All FieldEncode xs) 75 | => Options 76 | -> ConstructorInfo xs 77 | -> TypedSchema (NP I xs) 78 | gSchemaNP opts = record . gRecordFields' opts 79 | 80 | gRecordFields' 81 | :: forall (xs :: [*]) 82 | . (All FieldEncode xs) 83 | => Options 84 | -> ConstructorInfo xs 85 | -> RecordFields (NP I xs) (NP I xs) 86 | gRecordFields' opts ci = 87 | hsequence $ 88 | hczipWith fieldSchemaC mk fieldNames projections 89 | where 90 | mk :: (FieldEncode x) => K String x -> Projection I xs x -> RecordFields (NP I xs) x 91 | mk (K theFieldName) (Fn proj) = 92 | fieldEncoder (pack $ fieldLabelModifier opts theFieldName) (dimap K unI proj) 93 | 94 | fieldNames :: NP (K String) xs 95 | fieldNames = case ci of 96 | SOP.Record _ theFieldNames -> hmap (K . SOP.fieldName) theFieldNames 97 | SOP.Infix{} -> hmap (K . ("$" ++) . show . unK) (numbers 1) 98 | SOP.Constructor{} -> hmap (K . ("$" ++) . show . unK) (numbers 1) 99 | 100 | numbers :: forall k (fields :: [k]) . SListI fields => Int -> NP (K Int) fields 101 | numbers no = case sList :: SList fields of 102 | SNil -> Nil 103 | SCons -> K no :* numbers (no + 1) 104 | 105 | class FieldEncode a where fieldEncoder :: Text -> (from -> a) -> RecordFields from a 106 | 107 | instance {-# OVERLAPPABLE #-} HasSchema a => FieldEncode a where fieldEncoder = field 108 | instance HasSchema a => FieldEncode (Maybe a) where fieldEncoder = optField 109 | -------------------------------------------------------------------------------- /nix/sources.nix: -------------------------------------------------------------------------------- 1 | # This file has been generated by Niv. 2 | 3 | let 4 | 5 | # 6 | # The fetchers. fetch_ fetches specs of type . 7 | # 8 | 9 | fetch_file = pkgs: spec: 10 | if spec.builtin or true then 11 | builtins_fetchurl { inherit (spec) url sha256; } 12 | else 13 | pkgs.fetchurl { inherit (spec) url sha256; }; 14 | 15 | fetch_tarball = pkgs: spec: 16 | if spec.builtin or true then 17 | builtins_fetchTarball { inherit (spec) url sha256; } 18 | else 19 | pkgs.fetchzip { inherit (spec) url sha256; }; 20 | 21 | fetch_git = spec: 22 | builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; }; 23 | 24 | fetch_builtin-tarball = spec: 25 | builtins.trace 26 | '' 27 | WARNING: 28 | The niv type "builtin-tarball" will soon be deprecated. You should 29 | instead use `builtin = true`. 30 | 31 | $ niv modify -a type=tarball -a builtin=true 32 | '' 33 | builtins_fetchTarball { inherit (spec) url sha256; }; 34 | 35 | fetch_builtin-url = spec: 36 | builtins.trace 37 | '' 38 | WARNING: 39 | The niv type "builtin-url" will soon be deprecated. You should 40 | instead use `builtin = true`. 41 | 42 | $ niv modify -a type=file -a builtin=true 43 | '' 44 | (builtins_fetchurl { inherit (spec) url sha256; }); 45 | 46 | # 47 | # Various helpers 48 | # 49 | 50 | # The set of packages used when specs are fetched using non-builtins. 51 | mkPkgs = sources: 52 | let 53 | sourcesNixpkgs = 54 | import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {}; 55 | hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; 56 | hasThisAsNixpkgsPath = == ./.; 57 | in 58 | if builtins.hasAttr "nixpkgs" sources 59 | then sourcesNixpkgs 60 | else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then 61 | import {} 62 | else 63 | abort 64 | '' 65 | Please specify either (through -I or NIX_PATH=nixpkgs=...) or 66 | add a package called "nixpkgs" to your sources.json. 67 | ''; 68 | 69 | # The actual fetching function. 70 | fetch = pkgs: name: spec: 71 | 72 | if ! builtins.hasAttr "type" spec then 73 | abort "ERROR: niv spec ${name} does not have a 'type' attribute" 74 | else if spec.type == "file" then fetch_file pkgs spec 75 | else if spec.type == "tarball" then fetch_tarball pkgs spec 76 | else if spec.type == "git" then fetch_git spec 77 | else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec 78 | else if spec.type == "builtin-url" then fetch_builtin-url spec 79 | else 80 | abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; 81 | 82 | # Ports of functions for older nix versions 83 | 84 | # a Nix version of mapAttrs if the built-in doesn't exist 85 | mapAttrs = builtins.mapAttrs or ( 86 | f: set: with builtins; 87 | listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)) 88 | ); 89 | 90 | # fetchTarball version that is compatible between all the versions of Nix 91 | builtins_fetchTarball = { url, sha256 }@attrs: 92 | let 93 | inherit (builtins) lessThan nixVersion fetchTarball; 94 | in 95 | if lessThan nixVersion "1.12" then 96 | fetchTarball { inherit url; } 97 | else 98 | fetchTarball attrs; 99 | 100 | # fetchurl version that is compatible between all the versions of Nix 101 | builtins_fetchurl = { url, sha256 }@attrs: 102 | let 103 | inherit (builtins) lessThan nixVersion fetchurl; 104 | in 105 | if lessThan nixVersion "1.12" then 106 | fetchurl { inherit url; } 107 | else 108 | fetchurl attrs; 109 | 110 | # Create the final "sources" from the config 111 | mkSources = config: 112 | mapAttrs ( 113 | name: spec: 114 | if builtins.hasAttr "outPath" spec 115 | then abort 116 | "The values in sources.json should not have an 'outPath' attribute" 117 | else 118 | spec // { outPath = fetch config.pkgs name spec; } 119 | ) config.sources; 120 | 121 | # The "config" used by the fetchers 122 | mkConfig = 123 | { sourcesFile ? ./sources.json 124 | , sources ? builtins.fromJSON (builtins.readFile sourcesFile) 125 | , pkgs ? mkPkgs sources 126 | }: rec { 127 | # The sources, i.e. the attribute set of spec name to spec 128 | inherit sources; 129 | 130 | # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers 131 | inherit pkgs; 132 | }; 133 | in 134 | mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); } 135 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![CI](https://travis-ci.com/pepeiborra/schemas.svg)](https://travis-ci.com/pepeiborra/schemas) 2 | [![Hackage](https://img.shields.io/hackage/v/schemas.svg)](https://hackage.haskell.org/package/schemas) 3 | # schemas 4 | 5 | schemas is a Haskell-centric serialization library written with versioning in mind. Since a schema is a first-class citizen, it can be serialized, reasoned about, and transmitted together with the data. Serialization and deserialization work better when the source schema is provided, and versioning is accomplished by checking that the two schemas are related by a subtyping relation. This alleviates the need to keep old versions of datatypes around. 6 | 7 | Consider a schema modification that adds a field. To support upgrading old documents to the new schema, the only requirement is that the new field is optional. Downgrading is easy too, simply omit the new field. Conversely, a schema modifcation that removes a field supports trivial upgrades but the removed field must be optional to support downgrading. Changing the type of a field is supported in as much as the old and new types are relatable. Field renaming is not supported. More importantly, all these changes are defined by a schema relation, and the library provides a predicate to check whether the relation holds. 8 | 9 | schemas can also be used in a GraphQL-like fashion, allowing clients to request a subset of the schema. This comes up specially when working with recursive schemas involving cyclic data. 10 | 11 | ## Features 12 | * schemas are first-class citizens and can be serialized, 13 | * schema construction is statically typed, 14 | * versioning is driven by a subtyping relation, no need for version numbers, 15 | * Serialization to JSON only currently 16 | 17 | ## Why schemas 18 | 19 | A quick seach in Hackage reveals a large number of libraries about schemas, including [json-schema], [hjsonschema], [aeson-schema], [aeson-schemas] and [hschema], amongst others. 20 | There's undoubtedly a large amount of overlapping amongst all these libraries, so the immediate question is, why introduce another one ? 21 | 22 | This library is a re-implementation of an encoding library found in the Strats codebase at Standard Chartered Bank, the origins of which is go back a few years in time. 23 | It predates other libraries that accomplish a similar task, including most of the ones mentioned before. 24 | The approach has worked well but the codebase is showing its age and limitations, notably the lack of decoding capabilities. 25 | This library extends the original approach with decoding and alternatives, hopefully keeping the good parts like the subtyping relation, intact. 26 | 27 | [json-schema]: http://hackage.haskell.org/packages/json-schema 28 | [hjsonschema]: http://hackage.haskell.org/packages/hjsonschema 29 | [aeson-schema]: http://hackage.haskell.org/packages/aeson-schema 30 | [aeson-schemas]: http://hackage.haskell.org/packages/aeson-schemas 31 | [hschema]: http://hackage.haskell.org/packages/hschema 32 | 33 | 34 | ## Subtyping relation 35 | 36 | schemas relies on a simple subtyping relation between schemas to perform value conversions. The basic idea is that these conversions are fully guided by the source and target schemas and involve only simple projections and injections: 37 | 1. Projecting a subset of the source record fields. 38 | 2. Turning a source field of type `A` into a target field of type `Array A`. 39 | 40 | For more concrete details on the subtyping relation check the definition of `isSubtypeOf`. This function returns a witness, i.e. a conversion function, whenever the relation holds. 41 | 42 | Versioning makes use of this subtyping relation as follows. Downgrading a value `v_2 :: T_2` into a previous version `T_1` is accomplished via the witness of `schema(T_1) > schema(T_2)`. Similarly, upgrading a `v_1 :: T_1` message into a newer version `T_2` can be accomplished via the witness of `schema(T_1) < schema(T_2)`. Therefore, a type `T_1` can only be replaced by a type `T_2` in an downgrade-compatible way if `schema(T_1) > schema(T_2)`; if upgrades are required, then `schema(T_1) < schema(T_2)` is required too. 43 | 44 | The `<` relation is reflexive and transitive, but importantly not asymmetric or antisymmetric: it can be that both `T_1 < T_2` and `T_2 < T_1` and yet they are not the same type. For example, given a `S_2` schema that adds a required field to `S_1`, we would have that `S_1 > S_2` but not `S_1 < S_2`. However, if new the field was optional, then we would have `S_1 < S_2` too. In such case, we say that `S_1 ~ S_2` because they only differ on optional fields. 45 | For example, given a `S_3` schema that removes a field from `S_2`, we have: 46 | - `S_2 < S_3` therefore we can upgrade `S_2` values to `S_3` 47 | - `S_2 ~ S_3` if the removed field is optional, in which case we can also downgrade `S_3` values to `S_2` 48 | 49 | The `~` relation is an equivalence class, i.e. it is reflexive, symmetric and transitive. 50 | 51 | ## Alternative encodings 52 | 53 | Sometimes there is more than one way to encode a value. A field can be renamed or change its type, an optional field become mandatory, several fields can be merged into one, etc. Alternative encodings allow for backwards compatible schema evolution. 54 | This library support alternative encodings via the `Monoid` instance for typed schemas and the `Alternative` instance for `RecordFields`. 55 | 56 | The schema `A|B` encodes a value in two alternative ways `A` and `B`. A message created with this schema may use encodings A, B or both. 'encode' will always create messages with all the possible encodings. While messages with multiple alternative encodings are not desirable for serialization, the desired message can be carved out using the subtyping relation. All the following hold: 57 | ``` 58 | A < A|B (the coercion A -> A|B will produce a message with an A encoding) 59 | B < A|B (the coercion B -> A|B will produce a message with a B encoding) 60 | A|B < A (the coercion A|B -> A will succeed only if the message contains an A encoding) 61 | A|B < B (the coercoin A|B -> B will succeed only if the message contains a B encoding) 62 | ``` 63 | 64 | Typed schemas implement a limited form of alternative encodings via the `Alternative` instance for record fields. In the future a similar 'Alternative' instance for union constructors could be added. 65 | 66 | ## Walkthrough 67 | 68 | A fully commented example on how to define a service endpoint using *schemas*: 69 | [walkthrough](examples.lhs) 70 | 71 | ## Examples 72 | 73 | - [Person](example/Person.hs) 74 | - [Person2](example/Person2.hs) 75 | - [Person3](example/Person3.hs) 76 | -------------------------------------------------------------------------------- /src/Schemas/OpenApi2.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | {-# LANGUAGE DerivingStrategies #-} 4 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 5 | {-# LANGUAGE LambdaCase #-} 6 | {-# LANGUAGE OverloadedLists #-} 7 | {-# LANGUAGE OverloadedStrings #-} 8 | 9 | -- | This module defines a 'TypedSchema' for the 'Schema' datatype, 10 | -- inspired in the OpenApi 2.0 specification, which may be useful 11 | -- to render 'Schema' values in OpeApi 2.0 format 12 | module Schemas.OpenApi2 13 | ( OpenApi2Document(..) 14 | , OpenApi2Schema(..) 15 | , defOpenApi2Schema 16 | , OpenApi2Type(..) 17 | , OpenApi2Options(..) 18 | , defaultOptions 19 | , toOpenApi2Document 20 | , encodeAsOpenApi2Document 21 | ) 22 | where 23 | 24 | import Control.Monad 25 | import Control.Monad.Trans.Class 26 | import Control.Monad.Trans.Except 27 | import Control.Monad.Trans.Writer 28 | import Data.Aeson (Value) 29 | import Data.Function 30 | import Data.Functor 31 | import Data.HashMap.Strict (HashMap) 32 | import qualified Data.HashMap.Strict as Map 33 | import qualified Data.List.NonEmpty as NE 34 | import Data.Text (Text) 35 | import qualified Data.Text as Text 36 | import qualified Generics.SOP as SOP 37 | import GHC.Generics 38 | import Schemas 39 | import Schemas.SOP 40 | 41 | -- | Given a schema free of undiscriminated unions 42 | -- @encodeAsOpenApi2Document name schema@ produces an encoding of an 43 | -- OpenApi2 document that models the given schema. 44 | -- Failures are omitted, use 'toOpenApi2Document' if you care. 45 | encodeAsOpenApi2Document :: OpenApi2Options -> Text -> Schema -> Value 46 | encodeAsOpenApi2Document opts n sc = 47 | encode & ($ toOpenApi2Document opts (Map.fromList [(n, sc)])) 48 | 49 | -- | A catalog of definitions 50 | data OpenApi2Document = OpenApi2Document 51 | { definitions :: HashMap Text OpenApi2Schema 52 | , failures :: HashMap Text Reason 53 | } 54 | deriving (Show) 55 | 56 | instance Monoid OpenApi2Document where 57 | mempty = OpenApi2Document [] [] 58 | 59 | instance Semigroup OpenApi2Document where 60 | OpenApi2Document d f <> OpenApi2Document d' f' = 61 | OpenApi2Document (d <> d') (f <> f') 62 | 63 | instance HasSchema OpenApi2Document where 64 | schema = 65 | record $ OpenApi2Document 66 | <$> field "definitions" definitions 67 | <*> field "failures" failures 68 | 69 | -- | The representation of an OpenApi 2.0 schema 70 | data OpenApi2Schema = OpenApi2Schema 71 | { _type :: OpenApi2Type 72 | , additionalProperties :: Maybe OpenApi2Schema 73 | , discriminator :: Maybe Text 74 | , enum :: Maybe [Text] 75 | , format :: Maybe Text 76 | , items :: Maybe OpenApi2Schema 77 | , properties :: Maybe (HashMap Text OpenApi2Schema) 78 | , required :: Maybe [Text] 79 | } 80 | deriving (Eq, Generic, Show) 81 | deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo) 82 | 83 | instance HasSchema OpenApi2Schema where 84 | schema = named "OpenApi2" $ gSchema defOptions { fieldLabelModifier = dropWhile (== '_') } 85 | 86 | defOpenApi2Schema :: OpenApi2Type -> OpenApi2Schema 87 | defOpenApi2Schema t = 88 | OpenApi2Schema t Nothing Nothing Nothing Nothing Nothing Nothing Nothing 89 | 90 | data OpenApi2Type 91 | = OpenApi2Object 92 | | OpenApi2Array 93 | | OpenApi2Boolean 94 | | OpenApi2Integer 95 | | OpenApi2Number 96 | | OpenApi2String 97 | deriving (Bounded, Enum, Eq, Show) 98 | 99 | instance HasSchema OpenApi2Type where 100 | schema = Schemas.enum (Text.toLower . Text.pack . drop 8 . show) 101 | [minBound .. maxBound] 102 | 103 | newtype OpenApi2Options = OpenApi2Options 104 | { -- | Please tell me what to do with Prims 105 | primMapping :: Text -> Maybe OpenApi2Schema 106 | } 107 | 108 | defaultOptions :: OpenApi2Options 109 | defaultOptions = OpenApi2Options { primMapping = f } 110 | where 111 | f "Boolean" = Just $ defOpenApi2Schema OpenApi2Boolean 112 | f "String" = Just $ defOpenApi2Schema OpenApi2String 113 | f "Number" = Just $ defOpenApi2Schema OpenApi2Number 114 | f "Integer" = Just $ defOpenApi2Schema OpenApi2Integer 115 | f _ = Nothing 116 | 117 | toOpenApi2Document :: OpenApi2Options -> HashMap Text Schema -> OpenApi2Document 118 | toOpenApi2Document opts schemas = 119 | foldMap wrap (Map.toList topLevelSchemas) <> internalSchemas 120 | where 121 | results = runExcept . runWriterT . toOpenApi2 (primMapping opts) <$> schemas 122 | 123 | (topLevelSchemas, internalSchemas) = runWriter $ forM results $ \case 124 | Left reason -> pure $ Left reason 125 | Right (sc, inner) -> tell inner $> Right sc 126 | 127 | wrap (n, Left reason) = OpenApi2Document [] [(n, reason)] 128 | wrap (n, Right sc ) = OpenApi2Document [(n, sc)] [] 129 | 130 | newtype Reason = Unsupported Text 131 | deriving Show 132 | deriving newtype HasSchema 133 | 134 | -- | Alternatives and undiscriminated Unions are not supported 135 | toOpenApi2 136 | :: (Text -> Maybe OpenApi2Schema) 137 | -> Schema 138 | -> WriterT OpenApi2Document (Except Reason) OpenApi2Schema 139 | toOpenApi2 _rim Empty = lift $ throwE $ Unsupported "empty" 140 | toOpenApi2 prim (Array sc) = toOpenApi2 prim sc 141 | <&> \sc2 -> (defOpenApi2Schema OpenApi2Array) { items = Just sc2 } 142 | toOpenApi2 prim (StringMap sc) = toOpenApi2 prim sc <&> \sc2 -> 143 | (defOpenApi2Schema OpenApi2Object) { additionalProperties = Just sc2 } 144 | toOpenApi2 _rim (Enum vals) = pure $ (defOpenApi2Schema OpenApi2String) 145 | { Schemas.OpenApi2.enum = Just (NE.toList vals) 146 | } 147 | toOpenApi2 prim (Record fields) = do 148 | let req = [ n | (n, Field _ True) <- Map.toList fields ] 149 | pp <- traverse (toOpenApi2 prim . fieldSchema) fields 150 | return (defOpenApi2Schema OpenApi2Object) { properties = Just pp 151 | , required = Just req 152 | } 153 | toOpenApi2 prim (Union alts) = do 154 | altSchemas <- traverse (toOpenApi2 prim) (Map.fromList $ NE.toList alts) 155 | tell $ OpenApi2Document altSchemas [] 156 | return $ (defOpenApi2Schema OpenApi2Object) 157 | { discriminator = Just "tag" 158 | , required = Just ["tag"] 159 | , properties = Just [("tag", defOpenApi2Schema OpenApi2String)] 160 | } 161 | toOpenApi2 prim (Prim p) | Just y <- prim p = pure y 162 | toOpenApi2 _rim (Prim p) = lift $ throwE $ Unsupported $ "Unknown prim: " <> p 163 | toOpenApi2 _rim s@OneOf{} = 164 | lift $ throwE $ Unsupported $ "undiscriminated unions (OneOf): " <> Text.pack (show s) 165 | 166 | -- TODO future work 167 | -- fromOpenApi2 :: OpenApi2 -> Schema 168 | -- fromOpenApi2 _ = undefined 169 | 170 | -------------------------------------------------------------------------------- /src/Schemas/Class.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE AllowAmbiguousTypes #-} 2 | {-# LANGUAGE FlexibleInstances #-} 3 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 4 | {-# LANGUAGE LambdaCase #-} 5 | {-# LANGUAGE OverloadedLists #-} 6 | {-# LANGUAGE OverloadedStrings #-} 7 | {-# LANGUAGE RankNTypes #-} 8 | {-# LANGUAGE ScopedTypeVariables #-} 9 | {-# LANGUAGE StandaloneDeriving #-} 10 | module Schemas.Class where 11 | 12 | import Control.Lens hiding (_Empty, Empty, enum, (<.>)) 13 | import Data.Aeson (Value) 14 | import Data.Biapplicative 15 | import Data.Hashable 16 | import Data.HashMap.Strict (HashMap) 17 | import qualified Data.HashMap.Strict as Map 18 | import Data.HashSet (HashSet) 19 | import Data.List.NonEmpty (NonEmpty (..)) 20 | import Data.Maybe 21 | import Data.Scientific 22 | import Data.Text (Text, pack, unpack) 23 | import Data.Vector (Vector) 24 | import Numeric.Natural 25 | import Schemas.Internal 26 | import Schemas.Untyped 27 | 28 | -- HasSchema class and instances 29 | -- ----------------------------------------------------------------------------------- 30 | 31 | -- | The class of types that have a default schema 32 | class HasSchema a where 33 | -- | The default schema for this type 34 | schema :: TypedSchema a 35 | 36 | instance HasSchema () where 37 | schema = lmap (\() -> undefined) $ pureSchema () 38 | 39 | instance HasSchema Bool where 40 | schema = viaJSON "Boolean" 41 | 42 | instance HasSchema Double where 43 | schema = viaJSON "Number" 44 | 45 | instance HasSchema Scientific where 46 | schema = viaJSON "Number" 47 | 48 | instance HasSchema Int where 49 | schema = viaJSON "Integer" 50 | 51 | instance HasSchema Integer where 52 | schema = viaJSON "Integer" 53 | 54 | instance HasSchema Natural where 55 | schema = viaJSON "Integer" 56 | 57 | instance {-# OVERLAPPING #-} HasSchema String where 58 | schema = string 59 | 60 | instance HasSchema Text where 61 | schema = viaJSON "String" 62 | 63 | instance {-# OVERLAPPABLE #-} HasSchema a => HasSchema [a] where 64 | schema = list schema 65 | 66 | instance (HasSchema a) => HasSchema (Vector a) where 67 | schema = vector schema 68 | 69 | instance (Eq a, Hashable a, HasSchema a) => HasSchema (HashSet a) where 70 | schema = list schema 71 | 72 | instance (HasSchema a) => HasSchema (NonEmpty a) where 73 | schema = list schema 74 | 75 | instance HasSchema a => HasSchema (Identity a) where 76 | schema = dimap runIdentity Identity schema 77 | 78 | deriving instance HasSchema SchemaName 79 | 80 | instance HasSchema Schema where 81 | schema = named "Schema" $ union 82 | [ ("StringMap", alt $ prism' StringMap (\case StringMap x -> Just x ; _ -> Nothing)) 83 | , ("Array", alt $ prism' Array (\case Array x -> Just x ; _ -> Nothing)) 84 | , ("Enum", alt $ prism' Enum (\case Enum x -> Just x ; _ -> Nothing)) 85 | , ("Record", alt $ prism' Record (\case Record x -> Just x ; _ -> Nothing)) 86 | , ("Prim", alt $ prism' Prim (\case Prim x -> Just x ; _ -> Nothing)) 87 | , ("Union",altWith unionSchema _Union) 88 | , ("OneOf", alt $ prism' OneOf (\case OneOf x -> Just x ; _ -> Nothing)) 89 | , ("Named", altWith namedSchema $ prism' (uncurry Named) (\case Named s sc -> Just (s,sc) ; _ -> Nothing)) 90 | , ("Empty", alt $ prism' (const Empty) (\case Empty -> Just () ; _ -> Nothing)) 91 | ] 92 | where 93 | namedSchema = record $ (,) <$> field "name" fst <*> field "schema" snd 94 | unionSchema = list (record $ (,) <$> field "constructor" fst <*> field "schema" snd) 95 | 96 | instance HasSchema Field where 97 | schema = record $ 98 | Field <$> field "schema" fieldSchema 99 | <*> fmap (fromMaybe True) (optField "isRequired" (\x -> if isRequired x then Nothing else Just False)) 100 | 101 | 102 | instance HasSchema Value where 103 | schema = viaJSON "JSON" 104 | 105 | instance (HasSchema a, HasSchema b) => HasSchema (a,b) where 106 | schema = record $ (,) <$> field "$1" fst <*> field "$2" snd 107 | 108 | instance (HasSchema a, HasSchema b, HasSchema c) => HasSchema (a,b,c) where 109 | schema = record $ (,,) <$> field "$1" (view _1) <*> field "$2" (view _2) <*> field "$3" (view _3) 110 | 111 | instance (HasSchema a, HasSchema b, HasSchema c, HasSchema d) => HasSchema (a,b,c,d) where 112 | schema = 113 | record 114 | $ (,,,) 115 | <$> field "$1" (view _1) 116 | <*> field "$2" (view _2) 117 | <*> field "$3" (view _3) 118 | <*> field "$4" (view _4) 119 | 120 | instance (HasSchema a, HasSchema b, HasSchema c, HasSchema d, HasSchema e) => HasSchema (a,b,c,d,e) where 121 | schema = 122 | record 123 | $ (,,,,) 124 | <$> field "$1" (view _1) 125 | <*> field "$2" (view _2) 126 | <*> field "$3" (view _3) 127 | <*> field "$4" (view _4) 128 | <*> field "$5" (view _5) 129 | 130 | instance (HasSchema a, HasSchema b) => HasSchema (Either a b) where 131 | schema = union [("left", alt _Left), ("right", alt _Right)] 132 | <> union [("Left", alt _Left), ("Right", alt _Right)] 133 | 134 | instance (Eq key, Hashable key, HasSchema a, Key key) => HasSchema (HashMap key a) where 135 | schema = dimap toKeyed fromKeyed $ stringMap schema 136 | where 137 | fromKeyed :: HashMap Text a -> HashMap key a 138 | fromKeyed = Map.fromList . map (first fromKey) . Map.toList 139 | toKeyed :: HashMap key a -> HashMap Text a 140 | toKeyed = Map.fromList . map (first toKey) . Map.toList 141 | 142 | class Key a where 143 | fromKey :: Text -> a 144 | toKey :: a -> Text 145 | 146 | instance Key Text where 147 | fromKey = id 148 | toKey = id 149 | 150 | instance Key String where 151 | fromKey = unpack 152 | toKey = pack 153 | 154 | -- HasSchema aware combinators 155 | -- ----------------------------------------------------------------------------------- 156 | -- | Extract the default 'Schema' for a type 157 | schemaFor :: forall a . HasSchema a => Schema 158 | schemaFor = case extractSchema (schema @a) of x :| _ -> x 159 | 160 | validatorsFor :: forall a . HasSchema a => Validators 161 | validatorsFor = extractValidators (schema @a) 162 | 163 | -- | encode using the default schema 164 | encode :: HasSchema a => (a -> Value) 165 | encode = encodeWith schema 166 | 167 | -- | @encodeTo target@ returns an encoder from the default schema to the @target@ schema. 168 | encodeTo :: HasSchema a => Schema -> Either TracedMismatches (a -> Value) 169 | encodeTo = encodeToWith schema 170 | 171 | -- | Decode using the default schema. 172 | decode :: HasSchema a => Value -> Result a 173 | decode = decodeWith schema 174 | 175 | -- | @decodeFrom source@ returns a decoder from the @source@ schema to the default schema 176 | decodeFrom :: HasSchema a => Schema -> Result (Value -> Result a) 177 | decodeFrom = decodeFromWith schema 178 | 179 | -- | Coerce from 'sub' to 'sup'. Returns 'Nothing' if 'sub' is not a subtype of 'sup' 180 | coerce :: forall sub sup . (HasSchema sub, HasSchema sup) => Value -> Maybe Value 181 | coerce = case isSubtypeOf (validatorsFor @sub) (schemaFor @sub) (schemaFor @sup) of 182 | Right cast -> Just . cast 183 | _ -> const Nothing 184 | 185 | -- | @field name get@ introduces a field with the default schema for the type 186 | field :: HasSchema a => Text -> (from -> a) -> RecordFields from a 187 | field = fieldWith schema 188 | 189 | -- | @optField name get@ introduces an optional field with the default schema for the type 190 | optField :: forall a from. HasSchema a => Text -> (from -> Maybe a) -> RecordFields from (Maybe a) 191 | optField n get = optFieldWith (lmap get $ liftJust (schema @a)) n 192 | 193 | -- | @optFieldEither name get@ introduces an optional field with the default schema for the type 194 | optFieldEither 195 | :: forall a from e 196 | . HasSchema a 197 | => Text 198 | -> (from -> Either e a) 199 | -> e 200 | -> RecordFields from (Either e a) 201 | optFieldEither n f = optFieldEitherWith (lmap f (liftRight schema)) n 202 | 203 | -- | @alt name prism@ introduces a discriminated union alternative with the default schema 204 | alt :: HasSchema a => Prism' from a -> UnionAlt from 205 | alt = altWith schema 206 | -------------------------------------------------------------------------------- /src/Schemas/Untyped.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | {-# LANGUAGE DerivingStrategies #-} 4 | {-# LANGUAGE FlexibleInstances #-} 5 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 6 | {-# LANGUAGE OverloadedLists #-} 7 | {-# LANGUAGE OverloadedStrings #-} 8 | {-# LANGUAGE PatternSynonyms #-} 9 | {-# LANGUAGE RankNTypes #-} 10 | {-# LANGUAGE RecordWildCards #-} 11 | {-# LANGUAGE ScopedTypeVariables #-} 12 | {-# LANGUAGE TupleSections #-} 13 | {-# LANGUAGE ViewPatterns #-} 14 | {-# OPTIONS -Wno-name-shadowing #-} 15 | 16 | module Schemas.Untyped where 17 | 18 | import Control.Exception 19 | import Control.Lens hiding (Empty, enum, (<.>)) 20 | import Control.Monad 21 | import Control.Monad.Trans.Except 22 | import Data.Aeson (Value) 23 | import qualified Data.Aeson as A 24 | import Data.Aeson.Lens 25 | import Data.Biapplicative 26 | import Data.Either 27 | import Data.Foldable (asum) 28 | import Data.HashMap.Strict (HashMap) 29 | import qualified Data.HashMap.Strict as Map 30 | import Data.HashSet (HashSet) 31 | import Data.List (find, intersperse, intercalate) 32 | import Data.List.NonEmpty (NonEmpty (..)) 33 | import qualified Data.List.NonEmpty as NE 34 | import Data.Maybe 35 | import Data.Semigroup 36 | import Data.Text (Text, pack, unpack) 37 | import Data.Typeable 38 | import GHC.Exts (IsList (..), IsString(..)) 39 | import GHC.Generics (Generic) 40 | import Prelude hiding (lookup) 41 | import Text.Read 42 | import Text.Show.Functions () 43 | import Data.Data (Data) 44 | 45 | -- Schemas 46 | -- -------------------------------------------------------------------------------- 47 | 48 | newtype SchemaName = SchemaName String 49 | deriving stock (Data) 50 | deriving newtype (Eq, IsString) 51 | 52 | instance Show SchemaName where show (SchemaName s) = s 53 | 54 | -- | A schema for untyped data, such as JSON or XML. 55 | -- 56 | -- * introduction forms: 'extractSchema', 'schemaFor', 'mempty' 57 | -- * operations: 'isSubtypeOf', 'versions', 'coerce', 'validate' 58 | -- * composition: '(<>)' 59 | data Schema 60 | = Array Schema 61 | | StringMap Schema 62 | | Enum (NonEmpty Text) 63 | | Record (HashMap Text Field) 64 | | OneOf (NonEmpty Schema) -- ^ Decoding works for all alternatives, encoding only for one 65 | | Prim Text -- ^ Carries the name of primitive type 66 | | Named SchemaName Schema 67 | | Empty -- ^ The void schema 68 | deriving (Eq, Data, Generic) 69 | 70 | instance Monoid Schema where mempty = Empty 71 | instance Semigroup Schema where 72 | Empty <> x = x 73 | x <> Empty = x 74 | OneOf aa <> b = OneOf (aa <> [b]) 75 | b <> OneOf aa = OneOf ([b] <> aa) 76 | a <> b = OneOf [a,b] 77 | 78 | instance Show Schema where 79 | showsPrec = go [] where 80 | go _een p Empty = showParen (p>0) ("Empty " ++) 81 | go seen _ (Array sc) = ('[' :) . go seen 5 sc . (']' :) 82 | go seen p (StringMap sc) = showParen (p > 5) (("Map " ++) . go seen 5 sc) 83 | go _een p (Enum opts) = 84 | showParen (p > 5) (intercalate "|" (NE.toList $ fmap unpack opts) ++) 85 | go seen p (OneOf scc) = showParen (p > 5) $ foldr (.) id $ NE.intersperse 86 | (" | " ++) 87 | (fmap (go seen 6) scc) 88 | go seen _ (Record fields) = 89 | ('{' :) 90 | . foldr 91 | (.) 92 | id 93 | (intersperse 94 | (", " ++) 95 | (fmap 96 | (\(r, Field {..}) -> 97 | (unpack r ++) . ((if isRequired then " :: " else " ?? ") ++) . go seen 0 fieldSchema 98 | ) 99 | (Map.toList fields) 100 | ) 101 | ) 102 | . ('}' :) 103 | go _een _ (Prim t ) = (unpack t ++) 104 | go seen p (Named n sc) = 105 | if n `elem` seen then 106 | ("let " ++) 107 | . (show n ++) 108 | . (" = " ++) 109 | . self 110 | . (" in " ++) 111 | . (show n ++) 112 | else 113 | (show n ++) 114 | where 115 | self = go (n : seen) p sc 116 | 117 | data Field = Field 118 | { fieldSchema :: Schema 119 | , isRequired :: Bool -- ^ defaults to True 120 | } 121 | deriving (Data, Eq, Generic) 122 | 123 | instance Show Field where 124 | showsPrec p (Field sc True) = showsPrec p sc 125 | showsPrec p (Field sc False) = ("?" ++) . showsPrec p sc 126 | 127 | fieldSchemaL :: Applicative f => (Schema -> f Schema) -> Field -> f Field 128 | fieldSchemaL f Field{..} = Field <$> f fieldSchema <*> pure isRequired 129 | 130 | pattern Unit :: Schema 131 | pattern Unit <- Record [] where Unit = Record [] 132 | 133 | pattern Union :: NonEmpty (Text, Schema) -> Schema 134 | pattern Union alts <- (preview _Union -> Just alts) where 135 | Union alts = review _Union alts 136 | 137 | _Unit :: Prism' Schema () 138 | _Unit = prism' build match 139 | where 140 | build () = Record [] 141 | 142 | match (Record []) = Just () 143 | match _ = Nothing 144 | 145 | _Union :: Prism' Schema (NonEmpty (Text, Schema)) 146 | _Union = prism' build match 147 | where 148 | build = foldMap (\(n,sc) -> Record [(n, Field sc True)]) 149 | 150 | match (OneOf scc) = traverse viewAlt scc 151 | match x = (:| []) <$> viewAlt x 152 | 153 | viewAlt :: Schema -> Maybe (Text, Schema) 154 | viewAlt (Record [(n,Field sc True)]) = Just (n, sc) 155 | viewAlt _ = Nothing 156 | 157 | -- ------------------------------------------------------------------------------------------------------ 158 | -- Validation 159 | 160 | type Trace = [Text] 161 | 162 | data Mismatch 163 | = MissingRecordField { name :: Text } 164 | | MissingEnumChoices { choices :: NonEmpty Text } 165 | | OptionalRecordField { name :: Text } 166 | | InvalidRecordField { name :: Text} 167 | | InvalidEnumValue { given :: Text, options :: NonEmpty Text} 168 | | InvalidRecordValue { value :: Value } 169 | | InvalidConstructor { name :: Text} 170 | | InvalidUnionValue { contents :: Value} 171 | | SchemaMismatch {a, b :: Schema} 172 | | ValueMismatch {expected :: Schema, got :: Value} 173 | | EmptySchema 174 | | PrimValidatorMissing { name :: Text } 175 | | PrimError {name, primError :: Text} 176 | | PrimMismatch {have, want :: Text} 177 | | InvalidChoice{choiceNumber :: Int} 178 | | UnusedFields (HashSet Text) 179 | | AllAlternativesFailed { mismatches :: [(Trace,Mismatch)]} 180 | | UnexpectedAllOf 181 | | NoMatches 182 | deriving (Eq, Show, Typeable) 183 | 184 | instance Exception Mismatch 185 | 186 | type Validators = HashMap Text ValidatePrim 187 | type ValidatePrim = Value -> Maybe Text 188 | 189 | -- | Structural validation of a JSON value against a schema. 190 | -- Ignores extraneous fields in records 191 | validate :: Validators -> Schema -> Value -> [(Trace, Mismatch)] 192 | validate validators sc v = either (fmap (first reverse)) (\() -> []) $ runExcept (go [] sc v) where 193 | failWith :: Trace -> Mismatch -> Except [(Trace, Mismatch)] () 194 | failWith ctx e = throwE [(ctx, e)] 195 | 196 | go :: Trace -> Schema -> Value -> Except [(Trace, Mismatch)] () 197 | go ctx (Prim n) x = case Map.lookup n validators of 198 | Nothing -> failWith ctx (PrimValidatorMissing n) 199 | Just v -> case v x of 200 | Nothing -> pure () 201 | Just err -> failWith ctx (PrimError n err) 202 | go ctx (StringMap sc) (A.Object xx) = ifor_ xx $ \i -> go (i : ctx) sc 203 | go ctx (Array sc) (A.Array xx) = 204 | ifor_ xx $ \i -> go (pack ("[" <> show i <> "]") : ctx) sc 205 | go ctx (Enum opts) (A.String s) = 206 | if s `elem` opts then pure () else failWith ctx (InvalidEnumValue s opts) 207 | go ctx (Record ff) (A.Object xx) = ifor_ ff $ \n (Field sc opt) -> 208 | case (opt, Map.lookup n xx) of 209 | (_ , Just y ) -> go (n : ctx) sc y 210 | (True, Nothing) -> pure () 211 | _ -> failWith ctx (MissingRecordField n) 212 | go ctx (Union constructors) v@(A.Object xx) = case toList xx of 213 | [(n, v)] | Just sc <- lookup n constructors -> go (n : ctx) sc v 214 | | otherwise -> failWith ctx (InvalidConstructor n) 215 | _ -> throwE [(ctx, InvalidUnionValue v)] 216 | go ctx (OneOf scc) v = case decodeAlternatives v of 217 | [(v, 0)] -> msum $ fmap (\sc -> go ctx sc v) scc 218 | alts -> msum $ fmap 219 | (\(v, n) -> 220 | fromMaybe (failWith ctx (InvalidChoice n)) $ selectPath n $ fmap 221 | (\sc -> go (pack (show n) : ctx) sc v) 222 | (toList scc) 223 | ) 224 | alts 225 | go ctx a b = failWith ctx (ValueMismatch a b) 226 | 227 | -- ------------------------------------------------------------------------------------------------------ 228 | -- Subtype relation 229 | 230 | -- | @sub `isSubtypeOf` sup@ returns a witness that @sub@ is a subtype of @sup@, i.e. a cast function @sub -> sup@ 231 | -- 232 | -- > Array Bool `isSubtypeOf` Bool 233 | -- Just 234 | -- > Record [("a", Bool)] `isSubtypeOf` Record [("a", Number)] 235 | -- Nothing 236 | isSubtypeOf :: Validators -> Schema -> Schema -> Either [(Trace, Mismatch)] (Value -> Value) 237 | isSubtypeOf validators sub sup = runExcept $ go [] [] sup sub 238 | where 239 | failWith :: Trace -> Mismatch -> Except [(Trace, Mismatch)] b 240 | failWith ctx m = throwE [(reverse ctx, m)] 241 | 242 | -- TODO go: fix confusing order of arguments 243 | go 244 | :: [(SchemaName, Except [(Trace, Mismatch)] (Value -> Value))] 245 | -> Trace 246 | -> Schema 247 | -> Schema 248 | -> Except [(Trace, Mismatch)] (Value -> Value) 249 | -- go _ _ sup sub | pTraceShow ("isSubtypeOf", sub, sup) False = undefined 250 | go env ctx (Named a sa) (Named b sb) | a == b = 251 | case lookup a env of 252 | Just sol -> sol 253 | Nothing -> 254 | let sol = go ((a,sol) : env) ctx sa sb 255 | in sol 256 | go _ _ Empty Empty = pure id 257 | go _nv _tx Unit _ = pure $ const emptyValue 258 | go _nv _tx (Array _) Unit = pure $ const (A.Array []) 259 | go _nv _tx (Record _) Unit = pure $ const emptyValue 260 | go _nv _tx (StringMap _) Unit = pure $ const emptyValue 261 | go _nv _tx OneOf{} Unit = pure $ const emptyValue 262 | go _nv ctx (Prim a) (Prim b ) = do 263 | unless (a == b) $ failWith ctx (PrimMismatch b a) 264 | pure id 265 | go env ctx (Array a) (Array b) = do 266 | f <- go env ("[]" : ctx) a b 267 | pure $ over (_Array . traverse) f 268 | go env ctx (StringMap a) (StringMap b) = do 269 | f <- go env ("Map" : ctx) a b 270 | pure $ over (_Object . traverse) f 271 | go _nv ctx (Enum opts) (Enum opts') = 272 | case NE.nonEmpty $ NE.filter (`notElem` opts) opts' of 273 | Nothing -> pure id 274 | Just xx -> failWith ctx $ MissingEnumChoices xx 275 | go env ctx (Union opts) (Union opts') = do 276 | ff <- forM opts' $ \(n, sc) -> do 277 | sc' :: Schema <- maybe (failWith ctx $ InvalidConstructor n) return $ lookup n (toList opts) 278 | f <- go env (n : ctx) sc' sc 279 | return $ over (_Object . ix n) f 280 | return (foldr (.) id ff) 281 | go env ctx (Record opts) (Record opts') = do 282 | forM_ (Map.toList opts) $ \(n, f) -> 283 | unless (not (isRequired f) || Map.member n opts') $ 284 | failWith ctx $ MissingRecordField n 285 | ff <- forM (Map.toList opts') $ \(n', f'@(Field sc' _)) -> do 286 | case Map.lookup n' opts of 287 | Nothing -> do 288 | pure $ over _Object (Map.delete n') 289 | Just f@(Field sc _) -> do 290 | unless (not (isRequired f) || isRequired f') $ 291 | failWith ctx $ OptionalRecordField n' 292 | witness <- go env (n' : ctx) sc sc' 293 | pure $ over (_Object . ix n') witness 294 | return (foldr (.) id ff) 295 | go env ctx sup (OneOf [sub]) = go env ctx sup sub 296 | go env ctx sup (OneOf sub ) = do 297 | alts <- traverse (\sc -> (sc, ) <$> go env ctx sup sc) sub 298 | return $ \v -> head $ mapMaybe 299 | (\(sc, f) -> if null (validate validators sc v) then Just (f v) else Nothing) 300 | (toList alts) 301 | go env ctx (OneOf sup) sub = asum $ fmap (\x -> go env ctx x sub) sup 302 | go env ctx (Array a) b = do 303 | f <- go env ctx a b 304 | pure (A.Array . fromList . (: []) . f) 305 | -- go _tx a b | a == b = pure id 306 | go _nv ctx a b = failWith ctx (SchemaMismatch a b) 307 | 308 | -- ---------------------------------------------- 309 | -- Utils 310 | 311 | type Path = Int 312 | 313 | selectPath :: Path -> [a] -> Maybe a 314 | selectPath 0 (x : _) = Just x 315 | selectPath n (_ : xx) = selectPath (pred n) xx 316 | selectPath _ _ = Nothing 317 | 318 | tag :: Int -> Text 319 | tag i = "#" <> pack (show i) 320 | 321 | decodeAlternatives :: Value -> [(Value, Path)] 322 | decodeAlternatives obj@(A.Object x) = 323 | case 324 | [ (v, n) | (unpack -> '#' : (readMaybe -> Just n), v) <- Map.toList x ] 325 | of 326 | [] -> [(obj, 0)] 327 | other -> other 328 | decodeAlternatives x = [(x,0)] 329 | -- | Generalized lookup for Foldables 330 | lookup :: (Eq a, Foldable f) => a -> f (a,b) -> Maybe b 331 | lookup a = fmap snd . find ((== a) . fst) 332 | 333 | -- Is there more than one choice here ? Maybe this should be configuration 334 | emptyValue :: Value 335 | emptyValue = A.object [] 336 | -------------------------------------------------------------------------------- /example/Walkthrough.lhs: -------------------------------------------------------------------------------- 1 | Walkthrough for users and maintainers 2 | ========================================== 3 | 4 | > {-# LANGUAGE DeriveAnyClass #-} 5 | > {-# LANGUAGE DeriveGeneric #-} 6 | > {-# LANGUAGE DerivingStrategies #-} 7 | > {-# LANGUAGE OverloadedLabels #-} 8 | > {-# LANGUAGE OverloadedLists #-} 9 | > {-# LANGUAGE OverloadedStrings #-} 10 | > {-# LANGUAGE StandaloneDeriving #-} 11 | 12 | > module Walkthrough where 13 | > import Data.Aeson (Value) 14 | > import Data.Aeson.Encode.Pretty (encodePretty) 15 | > import Data.ByteString.Lazy.Char8 (putStrLn) 16 | > import Data.Either 17 | > import Data.Maybe (fromMaybe) 18 | > import Data.Generics.Labels 19 | > import qualified Generics.SOP as SOP 20 | > import GHC.Generics (Generic) 21 | > import Schemas 22 | > import Schemas.Internal 23 | > import Schemas.SOP 24 | > import Text.Pretty.Simple 25 | 26 | Motivation 27 | ---------- 28 | 29 | Consider a service exposing a JSON endpoint with the Response type below, 30 | which includes additional debug data not intended for sharing with the callers of this service. 31 | 32 | How would one use *schemas* to encode this response while leaving out the debug data? 33 | 34 | > data Mode = ModeI Int | ModeB Bool deriving Show 35 | 36 | > data Response = Response { 37 | > responseId :: Int, 38 | > mode :: Mode, 39 | > responseDetails :: Maybe String, 40 | > responseDebugData :: String 41 | > } deriving Show 42 | 43 | > exampleResponse :: Response 44 | > exampleResponse = Response 1 (ModeB True) (Just "details") "debug" 45 | 46 | The first thing to do is defining a typed schema, which can be automated using generics: 47 | 48 | > deriving instance Generic Mode 49 | > deriving anyclass instance SOP.Generic Mode 50 | > deriving anyclass instance SOP.HasDatatypeInfo Mode 51 | > instance HasSchema Mode where schema = gSchema defOptions 52 | 53 | > deriving instance Generic Response 54 | > deriving anyclass instance SOP.Generic Response 55 | > deriving anyclass instance SOP.HasDatatypeInfo Response 56 | > instance HasSchema Response where schema = gSchema defOptions 57 | 58 | Defining the schemas manually is also possible if more control is desired. 59 | 60 | > modeSchema :: TypedSchema Mode 61 | > modeSchema = union 62 | > [ ("I", alt #_ModeI) -- The funny hash indicates a label which 63 | > , ("B", alt #_ModeB) -- desugars to a prism via Data.Generics.Labels 64 | > ] 65 | 66 | > responseSchema :: TypedSchema Response 67 | > responseSchema = record $ Response 68 | > <$> field "id" responseId 69 | > <*> fieldWith modeSchema "mode" mode 70 | > <*> optField "details" responseDetails 71 | > <*> field "debugInfo" responseDebugData 72 | 73 | 'TypedSchema' is more than a schema, it's a formula that can be used to derive 74 | encoders, decoders, as well as a concrete schema (or many if the typed schema 75 | makes use of alternatives). 76 | 77 | The next step is to define the concrete schema that the client will use when 78 | making a request to the server. The client schema defines the type of the 79 | response that the client expects to receive from us. It must be a subtype of the 80 | schema generated by the `extractSchema` interpretation. 81 | 82 | ``` 83 | *Main Text.Pretty.Simple NE> s = NE.head $ extractSchema responseSchema 84 | *Main Text.Pretty.Simple NE> :t s 85 | s :: Schema 86 | *Main Text.Pretty.Simple NE> pPrint s 87 | { debugInfo :: String 88 | , mode :: { B :: Boolean } | { I :: Integer } 89 | , details ?? String 90 | , id :: Integer 91 | } 92 | ``` 93 | 94 | In our case, before using this schema we want to remove the `debugInfo` field, 95 | so that it will not be included in the response. We can make this edit directly 96 | on the JSON representation of the untyped schema above, which can be produced 97 | by using `Schemas.encode`: 98 | 99 | ``` 100 | *Main Text.Pretty.Simple NE B> B.putStrLn $ Data.Aeson.Encode.Pretty.encodePretty $ encode s 101 | { 102 | "Record": { 103 | "debugInfo": { 104 | "schema": { 105 | "Prim": "String" 106 | } 107 | }, 108 | "mode": { 109 | "schema": { 110 | "Union": [ 111 | { 112 | "schema": { 113 | "Prim": "Boolean" 114 | }, 115 | "constructor": "B" 116 | }, 117 | { 118 | "schema": { 119 | "Prim": "Integer" 120 | }, 121 | "constructor": "I" 122 | } 123 | ] 124 | } 125 | }, 126 | "details": { 127 | "schema": { 128 | "Prim": "String" 129 | }, 130 | "isRequired": false 131 | }, 132 | "id": { 133 | "schema": { 134 | "Prim": "Integer" 135 | } 136 | } 137 | } 138 | } 139 | ``` 140 | 141 | After deleting the `debugInfo` field, the final schema will be: 142 | 143 | ``` 144 | { 145 | "Record": { 146 | "mode": { 147 | "schema": { 148 | "Union": [ 149 | { 150 | "schema": { 151 | "Prim": "Boolean" 152 | }, 153 | "constructor": "B" 154 | }, 155 | { 156 | "schema": { 157 | "Prim": "Integer" 158 | }, 159 | "constructor": "I" 160 | } 161 | ] 162 | } 163 | }, 164 | "details": { 165 | "schema": { 166 | "Prim": "String" 167 | }, 168 | "isRequired": false 169 | }, 170 | "id": { 171 | "schema": { 172 | "Prim": "Integer" 173 | } 174 | } 175 | } 176 | } 177 | ``` 178 | 179 | We can verify that the modified schema is a valid target using the predicate: 180 | 181 | > isValidTargetResponseSchema :: Schema -> Bool 182 | > isValidTargetResponseSchema = isLeft . encodeToWith responseSchema 183 | 184 | To implement the endpoint, we need to obtain and decode the requested schema, 185 | then use it to drive the encoding of a `Response` value, as shown by the code 186 | below: 187 | 188 | > endpoint :: Value -> Response -> Either String Value 189 | > endpoint targetSchema response = 190 | > case encodeToWith responseSchema decodedTargetSchema of 191 | > Left mismatch -> 192 | > -- the source and target schemas are incompatible 193 | > Left (show mismatch) 194 | > Right encoder -> 195 | > Right (encoder response) 196 | > where 197 | > -- run with up to 1000 recursive steps 198 | > decodedTargetSchema = 199 | > fromMaybe (error "ran out of gas") $ 200 | > either (error.show) id $ 201 | > runResult 1000 $ decode targetSchema 202 | 203 | To decode the produced JSON in the client-side end, one can use *schemas* perhaps, or 204 | a hand-rolled JSON parser. Currently schemas offers some support for deriving 205 | OpenApi specifications in the `Schemas.OpenApi` module. 206 | 207 | TypedSchema interpretations 208 | ------------------------------- 209 | 210 | A `TypedSchemaFlex from to` is a formula which can be given the following interpretations: 211 | 212 | - `encodeToWith` (target :: Schema) :: from -> Aeson.Value -- an encoder for a target schema 213 | - `decodeFromWith`(source :: Schema) :: Aeson.Value -> Maybe to -- a decoder from a source schema 214 | - `extractSchema` :: [Schema] -- a set of concrete schemas 215 | - `runSchema` :: from -> Either Mismatch to -- a projection 216 | 217 | The `runSchema` projection is total iff the `TypedSchemaFlex from to ` formula is total, 218 | and in the case where `from ~ to` it should be the identity function. 219 | 220 | > id_response :: Response -> Response 221 | > id_response = either (error . show) id . runSchema responseSchema 222 | 223 | Backwards compatible changes 224 | -------------------------------- 225 | 226 | By decoupling the source and target schemas, we can make modifications to either 227 | of them in a backwards compatible way. For instance the following modifications 228 | are backwards compatible for encoding: 229 | 230 | - remove fields from target schema 231 | - remove optional fields from source schema 232 | - add fields to source schema 233 | - add optional fields to response schema 234 | - add alternatives to enum/union in response schema 235 | - remove alternatives from enum/union in source schema 236 | 237 | In this context, changes to the source schema really mean changes to the Haskell 238 | datatype and to the corresponding `TypedSchema` value, and changes to the target 239 | schema mean edits to the `Schema` value. 240 | 241 | For example, below is the sequence of steps to add a new mandatory field: 242 | 243 | 1. Extend the Haskell type and the corresponding `TypedSchema` (the source schema) 244 | 2. Prove backwards compatibility via the predicate `isRight (encodeTo targetSchema)` 245 | 3. Redeploy the service 246 | 4. Eventually, update the clients with new target schemas that include the new field. 247 | 248 | The key point is that 3 and 4 need not happen at the same time, as the 249 | updated service is still able to serve clients using the old target schema. 250 | 251 | The addition of the `religion` field in the `Person2` example is a backwards 252 | compatible change. 253 | 254 | Non backwards compatible changes 255 | -------------------------------- 256 | 257 | Examples of non backwards compatible changes for encoding: 258 | 259 | - renaming of fields / alternatives 260 | - removal of mandatory fields from source schema 261 | - addition of mandatory fields in target schema 262 | 263 | The `Monoid TypedSchema` instance can be used to provide multiple schemas for a 264 | type, which can be used to implement non backwards compatible changes. Similarly, 265 | applicative record schemas have an `Alternative` instance for this. 266 | The search is greedy, so ordering of alternatives matter. 267 | 268 | The renaming of the `education` field in the `Person2` example is a non backwards 269 | compatible change. 270 | 271 | Recursive datatypes 272 | ------------------- 273 | 274 | Schemas for recursive datatypes need to make use of the `named` combinator 275 | as seen in the [Person3](examples/Person3.hs) example. This is currently 276 | not automatically inserted by `gSchema`. 277 | 278 | `Person3` contains an example of a recursive datatype. 279 | 280 | Recursive data 281 | -------------- 282 | 283 | While recursive datatypes are supported, recursive data is not. For instance, 284 | the default schema for `Schema` is a recursive value and therefore cannot be 285 | encoded. Note that other non-recursive schemas can be encoded just fine, even 286 | if the `Schema` datatype itself is recursive. 287 | 288 | `Person3` contains an example of circular data which cannot be encoded with this library. 289 | 290 | How alternatives work 291 | ---------------------- 292 | 293 | A `TypedSchema a` is really a forest of schemas, where the `TAllOf` constructor 294 | introduces a new subtree. This allows to provide multiple schemas for a datatype, 295 | allowing for backwards incompatible changes. The multiple schemas are tried in 296 | order, and the first successful one is used. 297 | 298 | A `Schema`, on the other hand, is a single tree. It follows that the functions 299 | `encodeToWith` and `decodeFromWith` perform a search to match the source and 300 | target schemas, where one of them is a forest. This search is potentially 301 | exponential if one tries to find the optimal match, so instead the search is 302 | greedy and the first match is used even if it's not optimal. 303 | 304 | The fields `education` and `age` in the `Person2` example use alternatives to 305 | encode non backwards compatible changes, and the commentary at the bottom of 306 | the module shows the forest of schemas that comes out. 307 | 308 | I now think that alternatives create more problems than they solve 309 | and should probably be removed: 310 | 311 | - The optimal search algorithm has exponential complexity, which forced me to use 312 | an `IterT` monad to treat non-termination as an effect. I could only make this 313 | work for decoding, introduces overhead and complexity, etc., 314 | 315 | - Eventually I gave up on optimal search and instead switched to a greedy 316 | algorithm, making the `<>` operator non-commutative and leading to human error. 317 | The `IterT` stayed as non-termination is still an issue for circular data and 318 | recursive schemas. 319 | 320 | - Mismatch errors are crap, even for schemas that don't use alternatives, 321 | 322 | - The denotational implementation (see next section) cannot be used. 323 | 324 | How projections work 325 | --------------------- 326 | 327 | Given a source schema S and a target schema T such that S is a subtype of T, 328 | the `isSubtypeOf` function provides a coercion as evidence of the subtyping 329 | relation. 330 | 331 | Denotationally, `encodeToWith` is equivalent to a normal `encodeWith` (from S to S) 332 | followed by a cast using the subtyping coercion. Similarly, `decodeFromWith` 333 | applies the coercion, and then decodes. 334 | 335 | This denotation used to match the implementation very closely in schemas 0.2.0, 336 | see https://github.com/pepeiborra/schemas/blob/v0.2.0.2/src/Schemas/Internal.hs 337 | This is not the case anymore and `encodeToWith`/`decodeFromWith` perform their 338 | own subtyping checking, which is ver unfortunate. To see why, consider the 339 | implications of alternatives: 340 | 341 | - For encoding, the denotational approach would involve creating the forest of 342 | all possible JSON values, which would be very inefficient. 343 | - For decoding, the denotational approach would have similarly involve creating 344 | a forest of casts. 345 | -------------------------------------------------------------------------------- /example/Person2.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | {-# LANGUAGE DerivingStrategies #-} 4 | {-# LANGUAGE DuplicateRecordFields #-} 5 | {-# LANGUAGE FlexibleInstances #-} 6 | {-# LANGUAGE OverloadedLists #-} 7 | {-# LANGUAGE OverloadedStrings #-} 8 | 9 | module Person2 where 10 | 11 | import Control.Applicative 12 | import Data.Generics.Labels () 13 | import Data.List.NonEmpty (NonEmpty) 14 | import qualified Data.List.NonEmpty as NE 15 | import Data.Maybe 16 | import Data.String 17 | import GHC.Exts (IsList(..)) 18 | import GHC.Generics 19 | import qualified Generics.SOP as SOP 20 | import Person 21 | import Schemas 22 | 23 | -- | The v2 of the Person schema adds a new optional field 'religion', 24 | -- makes the 'age' field optional, 25 | -- and renames 'studies' to 'education' 26 | data Person2 = Person2 27 | { name :: String 28 | , age :: Maybe Int -- now optional 29 | , addresses :: [String] 30 | , religion :: Maybe Religion -- new 31 | , education :: NonEmpty Education -- renamed and turned into a list 32 | } 33 | deriving (Generic, Eq, Show) 34 | deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo) 35 | 36 | data Religion = Catholic | Anglican | Muslim | Hindu 37 | deriving (Bounded, Enum, Eq, Show) 38 | 39 | instance HasSchema Religion where 40 | schema = enum (fromString . show) (fromList enumerate) 41 | 42 | enumerate :: (Bounded a, Enum a) => [a] 43 | enumerate = [minBound ..] 44 | 45 | instance HasSchema Person2 where 46 | schema = 47 | record 48 | $ Person2 49 | <$> field "name" Person2.name 50 | <*> ( optField "age" Person2.age 51 | -- the 'age' field used to be mandatory 52 | -- so we provide an alternative mandatory field definition 53 | -- with a default value for the 'Nothing' case 54 | <|> fieldWith (dimap (fromMaybe (-1)) Just schema) "age" Person2.age 55 | ) 56 | <*> field "addresses" Person2.addresses 57 | <*> optField "religion" Person2.religion 58 | <*> ( field "education" Person2.education 59 | -- the education field used to be called studies and be a singleton 60 | <|> (NE.:| []) <$> field "studies" (NE.head . Person2.education) 61 | ) 62 | 63 | pepe2 :: Person2 64 | pepe2 = Person2 "Pepe" 65 | (Just 38) 66 | ["2 Edward Square", "La Mar 10"] 67 | Nothing 68 | [PhD "Computer Science", Degree "Engineering" ] 69 | 70 | -- >>> import qualified Data.ByteString.Lazy.Char8 as B 71 | -- >>> import Data.Aeson.Encode.Pretty 72 | -- >>> import Data.Either 73 | -- >>> B.putStrLn $ encodePretty $ encode pepe2 74 | -- { 75 | -- "education": [ 76 | -- { 77 | -- "PhD": "Computer Science" 78 | -- }, 79 | -- { 80 | -- "Degree": "Engineering" 81 | -- } 82 | -- ], 83 | -- "addresses": [ 84 | -- "2 Edward Square", 85 | -- "La Mar 10" 86 | -- ], 87 | -- "age": 38, 88 | -- "name": "Pepe" 89 | -- } 90 | -- >>> B.putStrLn $ encodePretty $ fromRight undefined (encodeTo (schemaFor @Person2)) pepe2 91 | -- { 92 | -- "education": [ 93 | -- { 94 | -- "PhD": "Computer Science" 95 | -- }, 96 | -- { 97 | -- "Degree": "Engineering" 98 | -- } 99 | -- ], 100 | -- "addresses": [ 101 | -- "2 Edward Square", 102 | -- "La Mar 10" 103 | -- ], 104 | -- "age": 38, 105 | -- "name": "Pepe" 106 | -- } 107 | 108 | -- Person2 is a subtype of Person therefore we can encode a Person2 as a Person 109 | -- >>> import qualified Data.ByteString.Lazy.Char8 as B 110 | -- >>> import Data.Aeson.Encode.Pretty 111 | -- >>> B.putStrLn $ encodePretty $ fromRight undefined (encodeTo (schemaFor @Person)) pepe2 112 | -- { 113 | -- "addresses": [ 114 | -- "2 Edward Square", 115 | -- "La Mar 10" 116 | -- ], 117 | -- "age": 38, 118 | -- "studies": { 119 | -- "PhD": "Computer Science" 120 | -- }, 121 | -- "name": "Pepe" 122 | -- } 123 | 124 | -- We can also upgrade a Person into a Person2, because the new field is optional 125 | -- >>> import Text.Pretty.Simple 126 | -- >>> pPrintNoColor $ fromRight undefined (decodeFrom @Person2 (schemaFor @Person)) (encode pepe) 127 | -- Right 128 | -- ( Person2 129 | -- { name = "Pepe" 130 | -- , age = Just 38 131 | -- , addresses = 132 | -- [ "2 Edward Square" 133 | -- , "La Mar 10" 134 | -- ] 135 | -- , religion = Nothing 136 | -- , education = PhD { unPhD = "Computer Science" } :| [] 137 | -- } 138 | -- ) 139 | 140 | -- >>> B.putStrLn $ encodePretty $ encode (schemaFor @Person2) 141 | -- { 142 | -- "Record": { 143 | -- "education": { 144 | -- "schema": { 145 | -- "Array": { 146 | -- "Union": [ 147 | -- { 148 | -- "schema": { 149 | -- "Prim": "String" 150 | -- }, 151 | -- "constructor": "Degree" 152 | -- }, 153 | -- { 154 | -- "schema": { 155 | -- "Prim": "String" 156 | -- }, 157 | -- "constructor": "PhD" 158 | -- }, 159 | -- { 160 | -- "schema": { 161 | -- "Record": {} 162 | -- }, 163 | -- "constructor": "NoEducation" 164 | -- } 165 | -- ] 166 | -- } 167 | -- } 168 | -- }, 169 | -- "religion": { 170 | -- "schema": { 171 | -- "Enum": [ 172 | -- "Catholic", 173 | -- "Anglican", 174 | -- "Muslim", 175 | -- "Hindu" 176 | -- ] 177 | -- }, 178 | -- "isRequired": false 179 | -- }, 180 | -- "addresses": { 181 | -- "schema": { 182 | -- "Array": { 183 | -- "Prim": "String" 184 | -- } 185 | -- } 186 | -- }, 187 | -- "age": { 188 | -- "schema": { 189 | -- "Prim": "Integer" 190 | -- }, 191 | -- "isRequired": false 192 | -- }, 193 | -- "name": { 194 | -- "schema": { 195 | -- "Prim": "String" 196 | -- } 197 | -- } 198 | -- } 199 | -- } 200 | 201 | -- >>> import Data.Aeson.Encode.Pretty 202 | -- >>> mapM_ (B.putStrLn . encodePretty . encode) (extractSchema $ schema @Person2) 203 | -- { 204 | -- "Record": { 205 | -- "education": { 206 | -- "schema": { 207 | -- "Array": { 208 | -- "Union": [ 209 | -- { 210 | -- "schema": { 211 | -- "Prim": "String" 212 | -- }, 213 | -- "constructor": "Degree" 214 | -- }, 215 | -- { 216 | -- "schema": { 217 | -- "Prim": "String" 218 | -- }, 219 | -- "constructor": "PhD" 220 | -- }, 221 | -- { 222 | -- "schema": { 223 | -- "Record": {} 224 | -- }, 225 | -- "constructor": "NoEducation" 226 | -- } 227 | -- ] 228 | -- } 229 | -- } 230 | -- }, 231 | -- "religion": { 232 | -- "schema": { 233 | -- "Enum": [ 234 | -- "Catholic", 235 | -- "Anglican", 236 | -- "Muslim", 237 | -- "Hindu" 238 | -- ] 239 | -- }, 240 | -- "isRequired": false 241 | -- }, 242 | -- "addresses": { 243 | -- "schema": { 244 | -- "Array": { 245 | -- "Prim": "String" 246 | -- } 247 | -- } 248 | -- }, 249 | -- "age": { 250 | -- "schema": { 251 | -- "Prim": "Integer" 252 | -- }, 253 | -- "isRequired": false 254 | -- }, 255 | -- "name": { 256 | -- "schema": { 257 | -- "Prim": "String" 258 | -- } 259 | -- } 260 | -- } 261 | -- } 262 | -- { 263 | -- "Record": { 264 | -- "religion": { 265 | -- "schema": { 266 | -- "Enum": [ 267 | -- "Catholic", 268 | -- "Anglican", 269 | -- "Muslim", 270 | -- "Hindu" 271 | -- ] 272 | -- }, 273 | -- "isRequired": false 274 | -- }, 275 | -- "addresses": { 276 | -- "schema": { 277 | -- "Array": { 278 | -- "Prim": "String" 279 | -- } 280 | -- } 281 | -- }, 282 | -- "age": { 283 | -- "schema": { 284 | -- "Prim": "Integer" 285 | -- }, 286 | -- "isRequired": false 287 | -- }, 288 | -- "studies": { 289 | -- "schema": { 290 | -- "Union": [ 291 | -- { 292 | -- "schema": { 293 | -- "Prim": "String" 294 | -- }, 295 | -- "constructor": "Degree" 296 | -- }, 297 | -- { 298 | -- "schema": { 299 | -- "Prim": "String" 300 | -- }, 301 | -- "constructor": "PhD" 302 | -- }, 303 | -- { 304 | -- "schema": { 305 | -- "Record": {} 306 | -- }, 307 | -- "constructor": "NoEducation" 308 | -- } 309 | -- ] 310 | -- } 311 | -- }, 312 | -- "name": { 313 | -- "schema": { 314 | -- "Prim": "String" 315 | -- } 316 | -- } 317 | -- } 318 | -- } 319 | -- { 320 | -- "Record": { 321 | -- "education": { 322 | -- "schema": { 323 | -- "Array": { 324 | -- "Union": [ 325 | -- { 326 | -- "schema": { 327 | -- "Prim": "String" 328 | -- }, 329 | -- "constructor": "Degree" 330 | -- }, 331 | -- { 332 | -- "schema": { 333 | -- "Prim": "String" 334 | -- }, 335 | -- "constructor": "PhD" 336 | -- }, 337 | -- { 338 | -- "schema": { 339 | -- "Record": {} 340 | -- }, 341 | -- "constructor": "NoEducation" 342 | -- } 343 | -- ] 344 | -- } 345 | -- } 346 | -- }, 347 | -- "religion": { 348 | -- "schema": { 349 | -- "Enum": [ 350 | -- "Catholic", 351 | -- "Anglican", 352 | -- "Muslim", 353 | -- "Hindu" 354 | -- ] 355 | -- }, 356 | -- "isRequired": false 357 | -- }, 358 | -- "addresses": { 359 | -- "schema": { 360 | -- "Array": { 361 | -- "Prim": "String" 362 | -- } 363 | -- } 364 | -- }, 365 | -- "age": { 366 | -- "schema": { 367 | -- "Prim": "Integer" 368 | -- } 369 | -- }, 370 | -- "name": { 371 | -- "schema": { 372 | -- "Prim": "String" 373 | -- } 374 | -- } 375 | -- } 376 | -- } 377 | -- { 378 | -- "Record": { 379 | -- "religion": { 380 | -- "schema": { 381 | -- "Enum": [ 382 | -- "Catholic", 383 | -- "Anglican", 384 | -- "Muslim", 385 | -- "Hindu" 386 | -- ] 387 | -- }, 388 | -- "isRequired": false 389 | -- }, 390 | -- "addresses": { 391 | -- "schema": { 392 | -- "Array": { 393 | -- "Prim": "String" 394 | -- } 395 | -- } 396 | -- }, 397 | -- "age": { 398 | -- "schema": { 399 | -- "Prim": "Integer" 400 | -- } 401 | -- }, 402 | -- "studies": { 403 | -- "schema": { 404 | -- "Union": [ 405 | -- { 406 | -- "schema": { 407 | -- "Prim": "String" 408 | -- }, 409 | -- "constructor": "Degree" 410 | -- }, 411 | -- { 412 | -- "schema": { 413 | -- "Prim": "String" 414 | -- }, 415 | -- "constructor": "PhD" 416 | -- }, 417 | -- { 418 | -- "schema": { 419 | -- "Record": {} 420 | -- }, 421 | -- "constructor": "NoEducation" 422 | -- } 423 | -- ] 424 | -- } 425 | -- }, 426 | -- "name": { 427 | -- "schema": { 428 | -- "Prim": "String" 429 | -- } 430 | -- } 431 | -- } 432 | -- } 433 | -------------------------------------------------------------------------------- /test/SchemasSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ViewPatterns #-} 2 | {-# LANGUAGE AllowAmbiguousTypes #-} 3 | {-# LANGUAGE ImpredicativeTypes #-} 4 | {-# LANGUAGE OverloadedLists #-} 5 | {-# LANGUAGE OverloadedStrings #-} 6 | {-# LANGUAGE PatternSynonyms #-} 7 | {-# LANGUAGE ScopedTypeVariables #-} 8 | {-# LANGUAGE TypeApplications #-} 9 | 10 | module SchemasSpec where 11 | 12 | import Control.Exception 13 | import Control.Lens (_Just, _Nothing, _Empty, _Cons) 14 | import Control.Monad (join) 15 | import Control.Monad.Trans.Except (Except, ExceptT(..)) 16 | import qualified Data.Aeson as A 17 | import qualified Data.Coerce 18 | import Data.Either 19 | import Data.Foldable 20 | import Data.Functor.Identity 21 | import Data.Generics 22 | import qualified Data.List.NonEmpty as NE 23 | import Data.Maybe 24 | import Generators 25 | import Looper 26 | import Person 27 | import Person2 28 | import Person3 29 | import Person4 30 | import Schemas 31 | import qualified Schemas.Attempt as Attempt 32 | import Schemas.Internal (liftAttempt) 33 | import Schemas.Untyped (Validators) 34 | import System.Timeout 35 | import Test.Hspec 36 | import Test.Hspec.Runner (configQuickCheckMaxSuccess, hspecWith, defaultConfig) 37 | import Test.QuickCheck (arbitrary, sized, forAll, suchThat) 38 | import Text.Show.Functions () 39 | import Unions 40 | 41 | main :: IO () 42 | main = hspecWith defaultConfig{configQuickCheckMaxSuccess = Just 10000} spec 43 | 44 | spec :: Spec 45 | spec = do 46 | describe "encode" encodeSpec 47 | describe "encodeTo" encodeToSpec 48 | describe "isSubtypeOf" isSubtypeOfSpec 49 | 50 | describe "extractSchema" $ do 51 | it "Named" $ 52 | shouldNotDiverge $ evaluate $ extractSchema $ schema @Schema 53 | it "Unions" $ 54 | extractSchema (union [("Just", alt (_Just @())), ("Nothing", alt _Nothing)]) 55 | `shouldBe` [Union [("Nothing", Unit) ,("Just", Unit)]] 56 | 57 | describe "HasSchema" $ do 58 | it "Left is a constructor of Either" $ do 59 | shouldBeAbleToDecode @(Either () ()) [Union [constructor' "Left" Unit]] 60 | it "left is a constructor of Either too" $ do 61 | shouldBeAbleToDecode @(Either () ()) [Union [constructor' "left" Unit]] 62 | 63 | describe "examples" examplesSpec 64 | 65 | encodeSpec :: Spec 66 | encodeSpec = do 67 | it "prims" $ do 68 | let encoder = encode 69 | shouldNotDiverge $ evaluate encoder 70 | shouldNotDiverge $ evaluate $ encoder True 71 | it "unions" $ do 72 | let encoder = encode 73 | shouldNotDiverge $ evaluate encoder 74 | shouldNotDiverge $ evaluate $ encoder (Left ()) 75 | shouldNotDiverge $ evaluate $ encoder (Right ()) 76 | 77 | it "recursive schemas" $ do 78 | let encoder = encodeWith listSchema 79 | shouldNotDiverge $ evaluate encoder 80 | shouldNotDiverge $ evaluate $ encoder [()] 81 | 82 | it "is the inverse of decoding for canonical schemas" $ 83 | forAll (canonical <$> arbitrary) $ \sc -> 84 | getSuccess (pure encode >>= decode . ($ sc)) == Just sc 85 | 86 | encodeToSpec :: Spec 87 | encodeToSpec = do 88 | it "is lazy" $ do 89 | evaluate (attemptSuccessOrError (encodeToWith (record $ Just <$> field "bottom" fromJust) (Record [makeField "bottom" prim True])) (Nothing :: Maybe Bool)) 90 | `shouldThrow` \(_ :: SomeException) -> True 91 | let encoded = 92 | attemptSuccessOrError 93 | (encodeToWith (record $ Just <$> field "bottom" fromJust) (Record [])) 94 | (Nothing :: Maybe Bool) 95 | encoded `shouldBe` A.Object [] 96 | 97 | it "SomeNone Some" $ do 98 | let encoded = encodeWith schemaSomeNone (Some ()) 99 | shouldNotDiverge $ evaluate encoded 100 | encoded `shouldBe` A.Object [] 101 | 102 | it "NoneSome Some" $ do 103 | let encoded = encodeWith schemaNoneSome (Some ()) 104 | shouldNotDiverge $ evaluate encoded 105 | encoded `shouldBe` A.Object [] 106 | 107 | it "SomeNone None" $ do 108 | let encoded = encodeWith schemaSomeNone (None @(Either () ())) 109 | shouldNotDiverge $ evaluate encoded 110 | encoded `shouldBe` A.Object [] 111 | 112 | it "NoneSome None" $ do 113 | let encoded = encodeWith schemaNoneSome (None @(Either () ())) 114 | shouldNotDiverge $ evaluate encoded 115 | encoded `shouldBe` A.Object [] 116 | 117 | it "Three" $ do 118 | let encoded = encodeWith (schemaThree schemaNoneSome schema) (Three @(Some ()) @()) 119 | shouldNotDiverge $ evaluate encoded 120 | encoded `shouldBe` A.Object [] 121 | 122 | it "Three" $ do 123 | let encoded = encodeWith (schemaThree' schema schemaNoneSome) (Three @() @(Some ())) 124 | shouldNotDiverge $ evaluate encoded 125 | encoded `shouldBe` A.Object [] 126 | 127 | describe "Either" $ do 128 | let -- A schema supporting both camelCase and lowercase either 129 | source :: TypedSchema (Either () ()) 130 | source = schema 131 | 132 | it "lowerCase" $ do 133 | let target = Union [("right", schemaFor @()), ("left", schemaFor @())] 134 | encoder = encodeToWith source target 135 | Right f = encoder 136 | encoder `shouldSatisfy` isRight 137 | f (Right ()) `shouldBe` A.object [("right", A.Object [])] 138 | 139 | it "camelCase" $ do 140 | let target = Union [("Right", schemaFor @()), ("Left", schemaFor @())] 141 | encoder = encodeToWith source target 142 | Right f = encoder 143 | f (Right ()) `shouldBe` A.object [("Right", A.Object [])] 144 | 145 | describe "Either (nested)" $ do 146 | let -- A schema supporting both camelCase and lowercase either 147 | source :: TypedSchema ((), Either () ()) 148 | source = schema 149 | 150 | wrap sc = Record [("$1", Field (schemaFor @()) True), ("$2", Field sc True)] 151 | 152 | wrapVal v = A.object [("$1", A.Object []), ("$2", v)] 153 | 154 | it "lowerCase" $ do 155 | let target = wrap $ Union [("right", schemaFor @()), ("left", schemaFor @())] 156 | encoder = encodeToWith source target 157 | Right f = encoder 158 | encoder `shouldSatisfy` isRight 159 | f ((),Right ()) `shouldBe` wrapVal (A.object [("right", A.Object [])]) 160 | 161 | it "camelCase" $ do 162 | let target = wrap $ Union [("Right", schemaFor @()), ("Left", schemaFor @())] 163 | encoder = encodeToWith source target 164 | Right f = encoder 165 | encoder `shouldSatisfy` isRight 166 | f ((),Right ()) `shouldBe` wrapVal (A.object [("Right", A.Object [])]) 167 | 168 | describe "canEncode" $ do 169 | 170 | it "Unions of 1 constructor" $ do 171 | union [("Just", alt (_Just @()))] `shouldBeAbleToEncodeTo` [Union [("Just", Unit)]] 172 | 173 | isSubtypeOfSpec :: Spec 174 | isSubtypeOfSpec = do 175 | it "is reflexive (in absence of OneOf)" $ forAll (sized genSchema `suchThat` (not . hasOneOf)) $ \sc -> 176 | sc `shouldBeSubtypeOf` sc 177 | it "subtypes can add fields" $ do 178 | Record [makeField "a" prim True, makeField "def" prim True] 179 | `shouldBeSubtypeOf` Record [makeField "def" prim True] 180 | Record [makeField "a" prim False, makeField "def" prim True] 181 | `shouldBeSubtypeOf` Record [makeField "def" prim True] 182 | it "subtypes cannot turn a Required makeField into Optional" $ do 183 | Record [makeField "a" prim False] 184 | `shouldNotBeSubtypeOf` Record [makeField "a" prim True] 185 | it "subtypes can turn an Optional makeField into Required" $ do 186 | Record [makeField "a" prim True] 187 | `shouldBeSubtypeOf` Record [makeField "a" prim False] 188 | it "subtypes can relax the type of a field" $ do 189 | Record [makeField "a" prim True] 190 | `shouldBeSubtypeOf` Record [makeField "a" (Array prim) True] 191 | it "subtypes can relax the type of a constructor field" $ do 192 | Union [constructor' "a" prim] 193 | `shouldBeSubtypeOf` Union [constructor' "a" (Array prim)] 194 | it "subtypes cannot remove Required fields" $ do 195 | Record [makeField "def" prim True] `shouldNotBeSubtypeOf` Record 196 | [makeField "def" prim True, makeField "a" prim True] 197 | it "subtypes can remove Optional fields" $ do 198 | Record [makeField "def" prim True] `shouldBeSubtypeOf` Record 199 | [makeField "def" prim True, makeField "a" prim False] 200 | it "subtypes can remove enum choices" $ do 201 | Enum ["def"] `shouldBeSubtypeOf` Enum ["A", "def"] 202 | it "subtypes cannot add enum choices" $ do 203 | Enum ["A", "def"] `shouldNotBeSubtypeOf` Enum ["def"] 204 | it "subtypes can remove constructors" $ do 205 | Union [constructor' "B" Unit] 206 | `shouldBeSubtypeOf` Union [constructor' "A" Unit, constructor' "B" Unit] 207 | it "subtypes cannot add constructors" $ do 208 | Union [constructor' "A" prim, constructor' "B" Unit] 209 | `shouldNotBeSubtypeOf` Union [constructor' "A" prim] 210 | it "subtypes can drop an array" $ do 211 | prim `shouldBeSubtypeOf` Array prim 212 | it "subtypes cannot introduce an array" $ do 213 | Array prim `shouldNotBeSubtypeOf` prim 214 | 215 | examplesSpec :: Spec 216 | examplesSpec = do 217 | describe "Schema" $ 218 | schemaSpec schema (schemaFor @Person2) 219 | let person4_v0 = schemaFor @Person4 220 | person2_v0 = schemaFor @Person2 221 | person2_v2 = extractSchema (schema @Person2) NE.!! 2 222 | person3_v0 = schemaFor @Person3 223 | person4_vPerson3 = person3_v0 224 | encoder_p4v0 = encodeTo person4_v0 225 | encoder_p3_to_p4 = encodeTo person4_vPerson3 226 | encoder_p2v0 = encodeTo person2_v0 227 | encoder_p3v0 = encodeTo @Person3 person3_v0 228 | decoder_p2v0 = decodeFrom @Person4 person2_v0 229 | decoder_p2v2 = decodeFrom person2_v2 230 | 231 | describe "NoneSome Bool" $ 232 | schemaSpec schemaNoneSome (None :: Some Bool) 233 | 234 | describe "SomeNone Bool" $ 235 | schemaSpec schemaSomeNone (None :: Some Bool) 236 | 237 | describe "NoneSome (Either () ())" $ 238 | schemaSpec schemaNoneSome (None :: Some (Either () ())) 239 | 240 | describe "SomeNone (Either () ())" $ 241 | schemaSpec schemaSomeNone (None :: Some (Either () ())) 242 | 243 | describe "Three Bool Int" $ 244 | schemaSpec (schemaThree schema schema) (Three :: Three Bool Int) 245 | 246 | describe "Three Int Bool" $ 247 | schemaSpec (schemaThree' schema schema) (Three :: Three Int Bool) 248 | 249 | describe "Person" $ do 250 | schemaSpec schema pepe 251 | 252 | describe "Person2" $ do 253 | schemaSpec schema pepe2 254 | describe "Person2 < Person" $ do 255 | it "Encode 2->1" $ 256 | shouldBeAbleToEncode @Person2 (extractSchema @Person schema) 257 | it "Decode 1->2" $ 258 | shouldBeAbleToDecode @Person2 (extractSchema @Person schema) 259 | it "Decode 2->1" $ 260 | shouldNotBeAbleToDecode @Person (extractSchema @Person2 schema) 261 | it "pepe2 `as` Person" $ do 262 | let encoder = encodeTo (schemaFor @Person) 263 | encoded = attemptSuccessOrError encoder pepe2 264 | encoder `shouldSatisfy` isRight 265 | decode encoded `shouldBe` Success pepe 266 | it "pepe `as` Person2" $ do 267 | let decoder = decodeFrom (schemaFor @Person) 268 | decoder `shouldSatisfy` isSuccess 269 | (pure encode >>= getSuccessOrError decoder . ($ pepe)) 270 | `shouldBe` Success pepe2{Person2.education = [Person.studies pepe]} 271 | 272 | describe "Person3" $ do 273 | -- disabled because encode diverges and does not support IterT yet 274 | -- schemaSpec schema pepe3 275 | it "can show the Person 3 (circular) schema" $ 276 | shouldNotDiverge $ evaluate $ length $ show $ schemaFor @Person3 277 | it "can compute an encoder for Person3 (circular schema)" $ 278 | shouldNotDiverge $ evaluate encoder_p3v0 279 | it "can encode a finite example" $ do 280 | 281 | shouldNotDiverge $ evaluate $ encode martin 282 | shouldNotDiverge $ evaluate $ attemptSuccessOrError encoder_p3v0 martin 283 | 284 | describe "Person4" $ do 285 | schemaSpec schema pepe4 286 | let encoded_pepe4 = attemptSuccessOrError encoder_p4v0 pepe4 287 | encoded_pepe3 = attemptSuccessOrError encoder_p3_to_p4 pepe3{Person3.spouse = Nothing} 288 | encoded_pepe2 = attemptSuccessOrError encoder_p2v0 pepe2 289 | it "can compute an encoder for Person4" $ do 290 | shouldNotDiverge $ evaluate encoder_p4v0 291 | encoder_p4v0 `shouldSatisfy` isRight 292 | it "can compute an encoder to Person3 in finite time" $ do 293 | shouldNotDiverge $ evaluate encoder_p3_to_p4 294 | it "can compute an encoder to Person2 in finite time" $ do 295 | shouldNotDiverge $ evaluate encoder_p2v0 296 | it "can encode a Person4" $ do 297 | shouldNotDiverge $ evaluate $ A.encode encoded_pepe4 298 | it "can encode a Person2 as Person4 in finite time" $ do 299 | shouldNotDiverge $ evaluate $ A.encode encoded_pepe2 300 | it "can decode a fully defined record with source schema" $ do 301 | let res = getSuccessOrError (decodeFrom person4_v0) encoded_pepe4 302 | shouldNotDiverge $ evaluate res 303 | res `shouldBe` Success pepe4 304 | it "can decode a fully defined record without source schema" $ do 305 | let res = decode encoded_pepe4 306 | shouldNotDiverge $ evaluate res 307 | res `shouldBe` Success pepe4 308 | it "cannot construct a Person2 v0 decoder" $ 309 | decoder_p2v0 `shouldSatisfy` isFailure 310 | it "can construct a Person2 v1 decoder" $ 311 | decoder_p2v2 `shouldSatisfy` isSuccess 312 | it "can decode a Person2 v1" $ do 313 | let res = getSuccessOrError decoder_p2v2 encoded_pepe2 314 | holds = res == Success pepe4 315 | shouldNotDiverge $ evaluate holds 316 | shouldNotDiverge $ evaluate $ length $ show res 317 | res `shouldBe` Success pepe4 318 | describe "Looper" $ do 319 | schemaSpec schema looper1 320 | 321 | schemaSpec :: forall a. (Eq a, Show a) => TypedSchema a -> a -> Spec 322 | schemaSpec sc ex = do 323 | let encoder = encodeToWith sc s 324 | decoder = decodeFromWith sc s 325 | s = NE.head $ extractSchema sc 326 | encodedExample = attemptSuccessOrError encoder ex 327 | it "Can extract untyped schema" $ 328 | shouldNotDiverge $ evaluate s 329 | it "Can encode itself" $ do 330 | shouldNotDiverge $ evaluate encoder 331 | encoder `shouldSatisfy` isRight 332 | it "Can decode itself" $ do 333 | shouldNotDiverge $ evaluate decoder 334 | decoder `shouldSatisfy` isSuccess 335 | it "Does not diverge decoding bad input" $ do 336 | let d = join $ Attempt.attemptSuccess $ runResult 1000 $ decodeFromWith sc (NE.head $ extractSchema sc) 337 | shouldNotDiverge $ evaluate d 338 | shouldNotDiverge $ evaluate $ join $ join $ Attempt.attemptSuccess $ runResult 1000 $ traverse ($ A.String "Foo") d 339 | it "Roundtrips ex" $ do 340 | let res = getSuccessOrError decoder encodedExample 341 | shouldNotDiverge $ evaluate encodedExample 342 | shouldNotDiverge $ evaluate res 343 | runResult 1000 res `shouldBe` Right (Just ex) 344 | it "Roundtrips ex (2)" $ do 345 | let res = pure (encodeWith sc) >>= decodeWith sc . ($ ex) 346 | shouldNotDiverge $ evaluate res 347 | runResult 1000 res `shouldBe` Right (Just ex) 348 | 349 | shouldBeSubtypeOf :: HasCallStack => Schema -> Schema -> Expectation 350 | shouldBeSubtypeOf a b = case isSubtypeOf primValidators a b of 351 | Right _ -> pure () 352 | _ -> expectationFailure $ show a <> " should be a subtype of " <> show b 353 | 354 | shouldNotBeSubtypeOf :: HasCallStack => Schema -> Schema -> Expectation 355 | shouldNotBeSubtypeOf a b = case isSubtypeOf primValidators a b of 356 | Right _ -> expectationFailure $ show a <> " should not be a subtype of " <> show b 357 | _ -> pure () 358 | 359 | shouldDiverge :: (HasCallStack, Show a) => IO a -> Expectation 360 | shouldDiverge act = do 361 | res <- timeout 1000000 act 362 | case res of 363 | Just{} -> expectationFailure "Did not diverge" 364 | Nothing -> return () 365 | 366 | shouldNotDiverge :: (HasCallStack, Show a) => IO a -> Expectation 367 | shouldNotDiverge act = do 368 | res <- timeout 1000000 act 369 | case res of 370 | Nothing -> error "Did not terminate after 1s" 371 | Just {} -> return () 372 | 373 | shouldBeAbleToEncode :: forall a . HasCallStack => (HasSchema a) => NE.NonEmpty Schema -> Expectation 374 | shouldBeAbleToEncode = shouldBeAbleToEncodeTo (schema @a) 375 | 376 | shouldBeAbleToEncodeTo :: forall a . HasCallStack => TypedSchema a -> NE.NonEmpty Schema -> Expectation 377 | shouldBeAbleToEncodeTo tsc sc = asumEither (fmap (encodeToWith tsc) sc) `shouldSatisfy` isRight 378 | 379 | shouldBeAbleToDecode :: forall a . HasCallStack => (HasSchema a) => NE.NonEmpty Schema -> Expectation 380 | shouldBeAbleToDecode sc = asum (fmap (decodeFrom @a) sc) `shouldSatisfy` isSuccess 381 | 382 | shouldNotBeAbleToDecode :: forall a . HasCallStack => (HasSchema a) => NE.NonEmpty Schema -> Expectation 383 | shouldNotBeAbleToDecode sc = asum (fmap (decodeFrom @a) sc) `shouldSatisfy` not . isSuccess 384 | 385 | makeField :: a -> Schema -> Bool -> (a, Field) 386 | makeField n t isReq = (n, Field t isReq) 387 | 388 | constructor' :: a -> b -> (a, b) 389 | constructor' n t = (n, t) 390 | 391 | prim :: Schema 392 | prim = Prim "A" 393 | 394 | primValidators :: Validators 395 | primValidators = validatorsFor @(Schema, Double, Int, Bool) 396 | 397 | getSuccessOrError :: Result a -> a 398 | getSuccessOrError = either (error . show) (fromMaybe (error "too many delays")) . Attempt.runAttempt . runResult 1000 399 | 400 | attemptSuccessOrError :: Show e => Either e a -> a 401 | attemptSuccessOrError = either (error.show) id 402 | 403 | pattern Success :: a -> Result a 404 | pattern Success x <- (runResult 1000 -> Attempt.Success (Just x)) 405 | where Success x = liftAttempt $ Attempt.Success x 406 | 407 | getSuccess :: Result a -> Maybe a 408 | getSuccess = join . Attempt.attemptSuccess . runResult 1000 409 | 410 | isSuccess :: Result a -> Bool 411 | isSuccess = isJust . getSuccess 412 | 413 | isFailure :: Result a -> Bool 414 | isFailure = not . isSuccess 415 | 416 | -- | Parallel 'asum' for 'Either' 417 | asumEither :: forall e a . (Monoid e) => NE.NonEmpty (Either e a) -> Either e a 418 | asumEither = Data.Coerce.coerce asumExcept 419 | where 420 | asumExcept :: NE.NonEmpty (Except e a) -> Except e a 421 | asumExcept = asum 422 | 423 | listSchema :: HasSchema a => TypedSchema [a] 424 | listSchema = named "list" $ union 425 | [ ("Nil", alt _Empty) 426 | , ( "Cons" 427 | , altWith 428 | (record $ (,) <$> field "head" fst <*> fieldWith listSchema "tail" snd) 429 | _Cons 430 | ) 431 | ] 432 | 433 | canonical :: Schema -> Schema 434 | canonical = everywhere (mkT simplify) 435 | where 436 | simplify (OneOf [x]) = x 437 | simplify other = other 438 | -------------------------------------------------------------------------------- /src/Schemas/Internal.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ApplicativeDo #-} 2 | {-# LANGUAGE DerivingVia #-} 3 | {-# LANGUAGE FlexibleContexts #-} 4 | {-# LANGUAGE FlexibleInstances #-} 5 | {-# LANGUAGE GADTs #-} 6 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 7 | {-# LANGUAGE ImpredicativeTypes #-} 8 | {-# LANGUAGE LambdaCase #-} 9 | {-# LANGUAGE OverloadedLists #-} 10 | {-# LANGUAGE OverloadedStrings #-} 11 | {-# LANGUAGE RecordWildCards #-} 12 | {-# LANGUAGE ScopedTypeVariables #-} 13 | {-# LANGUAGE TupleSections #-} 14 | {-# LANGUAGE TypeOperators #-} 15 | {-# OPTIONS -Wno-name-shadowing #-} 16 | 17 | module Schemas.Internal where 18 | 19 | import Control.Alternative.Free 20 | import Control.Applicative (Alternative (..)) 21 | import Control.Lens hiding (Empty, allOf, enum, (<.>)) 22 | import Control.Monad.Except 23 | import Control.Monad.Trans.Iter 24 | import Control.Monad.State 25 | import Data.Aeson (Value) 26 | import qualified Data.Aeson as A 27 | import Data.Biapplicative 28 | import Data.Bitraversable 29 | import Data.Coerce 30 | import Data.Either 31 | import Data.Foldable (asum) 32 | import Data.Functor.Compose 33 | import Data.HashMap.Strict (HashMap) 34 | import qualified Data.HashMap.Strict as Map 35 | import qualified Data.HashSet as Set 36 | import Data.List (find) 37 | import Data.List.NonEmpty (NonEmpty (..)) 38 | import qualified Data.List.NonEmpty as NE 39 | import Data.Maybe 40 | import Data.Semigroup 41 | import Data.Text (Text, pack) 42 | import Data.Tuple 43 | import Data.Vector (Vector) 44 | import qualified Data.Vector as V 45 | import Data.Void 46 | import GHC.Exts (IsList (..)) 47 | import Numeric.Natural 48 | import Prelude hiding (lookup) 49 | import Schemas.Attempt as Attempt 50 | import Schemas.Untyped as U 51 | 52 | import Unsafe.Coerce 53 | 54 | -- Typed schemas 55 | -- -------------------------------------------------------------------------------- 56 | 57 | -- | @TypedSchemaFlex enc dec@ is a schema for encoding from @enc@ and decoding to @dec@. 58 | -- Usually we want @enc@ and @dec@ to be the same type but this flexibility comes in handy 59 | -- for composition 60 | -- 61 | -- * introduction forms: 'record', 'enum', 'schema' 62 | -- * operations: 'encodeToWith', 'decodeFrom', 'extractSchema' 63 | -- * composition: 'dimap', 'union', 'stringMap', 'liftPrism' 64 | -- 65 | data TypedSchemaFlex from a where 66 | -- TypedSchemaFlex wants to be a GADT, but that would preclude a Profunctor instance. 67 | -- So instead it's a normal ADT with embedded coercions 68 | 69 | -- | Name wrapper, to support recursive schemas 70 | TNamed :: SchemaName 71 | -> TypedSchemaFlex from' a' 72 | -> (a' -> a) -- coercion 73 | -> (from -> from') -- coercion 74 | -> TypedSchemaFlex from a -- TypedSchema from' a' 75 | TEnum :: NonEmpty (Text, a) 76 | -> (from -> Text) 77 | -> TypedSchemaFlex from a 78 | TArray :: TypedSchemaFlex b b 79 | -> (Vector b -> a) -- coercion 80 | -> (from -> Vector b) -- coercion 81 | -> TypedSchemaFlex from a -- TypedSchema (Vector b) (Vector b) 82 | TMap :: TypedSchemaFlex b b 83 | -> (HashMap Text b -> a) -- coercion 84 | -> (from -> HashMap Text b) -- coercion 85 | -> TypedSchemaFlex from a -- TypedSchema (HashMap Text b) (HashMap Text b) 86 | -- | Encoding and decoding from/to all the alternatives, with search 87 | TAllOf :: NonEmpty (TypedSchemaFlex from a) 88 | -> TypedSchemaFlex from a 89 | -- | Decoding from all alternatives but encoding only to one, for unions 90 | TOneOf :: TypedSchemaFlex from' a' 91 | -> TypedSchemaFlex from'' a'' 92 | -> (Either a' a'' -> a) -- coercion 93 | -> (from -> Either from' from'') -- coercion 94 | -> TypedSchemaFlex from a -- TypedSchema (Either from' from'') (Either a' a'') 95 | -- | Unit for TAllOf 96 | TEmpty :: (Void -> a) -- coercion 97 | -> (from -> Void) -- coercion 98 | -> TypedSchemaFlex from a -- TypedSchema Void Void 99 | -- | Embed a Value isomorphism as a primitive schema 100 | TPrim :: Text 101 | -> (Value -> A.Result a) -- coercion 102 | -> (from -> Value) -- coercion 103 | -> TypedSchemaFlex from a -- TypedSchema Value (A.Result a) 104 | -- | Schema for a record type 105 | RecordSchema :: RecordFields from a -> TypedSchemaFlex from a 106 | 107 | instance Show (TypedSchemaFlex from a) where 108 | show = show . NE.head . extractSchema 109 | 110 | instance Functor (TypedSchemaFlex from) where 111 | fmap = rmap 112 | 113 | instance Profunctor TypedSchemaFlex where 114 | dimap g f (TEmpty tof fromf) = TEmpty (f . tof) (fromf . g) 115 | dimap g f (TNamed n sc tof fromf) = TNamed n sc (f . tof) (fromf . g) 116 | dimap g f (TAllOf scc ) = TAllOf (dimap g f <$> scc) 117 | dimap g f (TOneOf sca scb to fr ) = TOneOf sca scb (f . to) (fr . g) 118 | dimap g f (TEnum opts fromf ) = TEnum (second f <$> opts) (fromf . g) 119 | dimap g f (TArray sc tof fromf ) = TArray sc (f . tof) (fromf . g) 120 | dimap g f (TMap sc tof fromf ) = TMap sc (f . tof) (fromf . g) 121 | dimap g f (TPrim n tof fromf ) = TPrim n (fmap f . tof) (fromf . g) 122 | dimap g f (RecordSchema sc ) = RecordSchema (dimap g f sc) 123 | 124 | instance Monoid (TypedSchemaFlex Void Void) where 125 | mempty = TEmpty id id 126 | 127 | instance Semigroup (TypedSchemaFlex f a) where 128 | -- | Allows defining multiple schemas for the same thing, effectively implementing versioning. 129 | x <> TEmpty{} = x 130 | TEmpty{} <> x = x 131 | TAllOf aa <> b = allOf (aa <> [b]) 132 | a <> TAllOf bb = allOf ([a] <> bb) 133 | a <> b = allOf [a, b] 134 | 135 | sconcat = allOf 136 | 137 | type TypedSchema a = TypedSchemaFlex a a 138 | 139 | -- | @named n sc@ annotates a schema with a name, allowing for circular schemas. 140 | named :: SchemaName -> TypedSchemaFlex from' a -> TypedSchemaFlex from' a 141 | named n sc = TNamed n sc id id 142 | 143 | -- | @enum values mapping@ construct a schema for a non empty set of values with a 'Text' mapping 144 | enum :: Eq a => (a -> Text) -> NonEmpty a -> TypedSchema a 145 | enum showF opts = TEnum 146 | alts 147 | (fromMaybe (error "invalid alt") . flip lookup altMap) 148 | where 149 | altMap = fmap swap alts --TODO fast lookup 150 | alts = opts <&> \x -> (showF x, x) 151 | 152 | -- | @stringMap sc@ is the schema for a stringmap where the values have schema @sc@ 153 | stringMap :: TypedSchema a -> TypedSchema (HashMap Text a) 154 | stringMap sc = TMap sc id id 155 | 156 | -- | @list sc@ is the schema for a list of values with schema @sc@ 157 | list :: IsList l => TypedSchema (Item l) -> TypedSchema l 158 | list schema = TArray schema (fromList . V.toList) (V.fromList . toList) 159 | 160 | -- | @vector sc@ is the schema for a vector of values with schema @sc@ 161 | vector :: TypedSchema a -> TypedSchema (Vector a) 162 | vector sc = TArray sc id id 163 | 164 | -- | @viaJson label@ constructs a schema reusing existing 'aeson' instances. The resulting schema 165 | -- is opaque and cannot be subtyped and/or versioned, so this constructor should be used sparingly. 166 | -- The @label@ is used to describe the extracted 'Schema'. 167 | viaJSON :: (A.FromJSON a, A.ToJSON a) => Text -> TypedSchema a 168 | viaJSON n = TPrim n A.fromJSON A.toJSON 169 | 170 | -- | Apply an isomorphism to a schema 171 | viaIso :: Iso' a b -> TypedSchema a -> TypedSchema b 172 | viaIso iso sc = withIso iso $ \from to -> dimap to from sc 173 | 174 | -- | The schema of String values 175 | string :: TypedSchema String 176 | string = viaJSON "String" 177 | 178 | -- | A schema for types that can be parsed and pretty-printed. The resulting schema is opaque and cannot 179 | -- be subtyped/versioned, so this constructor is best used for primitive value 180 | readShow :: (Read a, Show a) => TypedSchema a 181 | readShow = dimap show read string 182 | 183 | -- | 'eitherSchema' and 'emptySchema' make 'TypedSchemaFlex' an almost instance of 'SumProfunctor' (no 'Choice') 184 | eitherSchema 185 | :: TypedSchemaFlex from a 186 | -> TypedSchemaFlex from' a' 187 | -> TypedSchemaFlex (Either from from') (Either a a') 188 | eitherSchema sc sc' = TOneOf sc sc' id id 189 | 190 | -- | The vacuous schema 191 | emptySchema :: TypedSchema Void 192 | emptySchema = TEmpty id id 193 | 194 | -- | The schema that can be trivially decoded and encoded 195 | pureSchema :: a -> TypedSchemaFlex a a 196 | pureSchema a = record (pure a) 197 | 198 | allOf :: NonEmpty (TypedSchemaFlex from a) -> TypedSchemaFlex from a 199 | allOf x = allOf' $ sconcat $ fmap f x where 200 | f (TAllOf xx) = NE.toList xx 201 | f TEmpty{} = [] 202 | f x = [x] 203 | allOf' [] = error "empty allOf" 204 | allOf' [x] = x 205 | allOf' x = TAllOf $ NE.fromList x 206 | 207 | -- -------------------------------------------------------------------------------- 208 | -- Applicative records 209 | 210 | data RecordField from a where 211 | RequiredAp ::{ fieldName :: Text -- ^ Name of the field 212 | , fieldTypedSchema :: TypedSchemaFlex from a 213 | } -> RecordField from a 214 | OptionalAp ::{ fieldName :: Text 215 | , fieldTypedSchema :: TypedSchemaFlex from a 216 | , fieldDefaultValue :: a 217 | } -> RecordField from a 218 | 219 | -- | Lens for the 'fieldName' attribute 220 | fieldNameL :: Lens' (RecordField from a) Text 221 | fieldNameL f (RequiredAp n sc) = (`RequiredAp` sc) <$> f n 222 | fieldNameL f OptionalAp {..} = 223 | (\fieldName -> OptionalAp { .. }) <$> f fieldName 224 | 225 | instance Profunctor RecordField where 226 | dimap f g (RequiredAp name sc) = RequiredAp name (dimap f g sc) 227 | dimap f g (OptionalAp name sc v) = OptionalAp name (dimap f g sc) (g v) 228 | 229 | -- | An 'Alternative' profunctor for defining record schemas with versioning. 230 | -- 231 | -- @ 232 | -- schemaPerson = Person 233 | -- \<$\> (field "name" name \<|\> field "full name" name) 234 | -- \<*\> (field "age" age \<|\> pure -1) 235 | -- @ 236 | -- Alternatives are searched greedily in a top-down order. 237 | newtype RecordFields from a = RecordFields {getRecordFields :: Alt (RecordField from) a} 238 | deriving newtype (Alternative, Applicative, Functor, Monoid, Semigroup) 239 | 240 | instance Profunctor RecordFields where 241 | dimap f g = RecordFields . hoistAlt (lmap f) . fmap g . getRecordFields 242 | 243 | -- | Map a function over all the field names 244 | overFieldNames :: (Text -> Text) -> RecordFields from a -> RecordFields from a 245 | overFieldNames f = 246 | RecordFields . hoistAlt (over fieldNameL f) . getRecordFields 247 | 248 | -- | Wrap an applicative record schema 249 | record :: RecordFields from a -> TypedSchemaFlex from a 250 | record = RecordSchema 251 | 252 | -- | @fieldWith sc n get@ introduces a field 253 | fieldWith :: TypedSchema a -> Text -> (from -> a) -> RecordFields from a 254 | fieldWith schema n get = fieldWith' (lmap get schema) n 255 | 256 | -- | Generalised version of 'fieldWith' 257 | fieldWith' :: TypedSchemaFlex from a -> Text -> RecordFields from a 258 | fieldWith' schema n = RecordFields $ liftAlt (RequiredAp n schema) 259 | 260 | -- | Project a schema through a Prism. 261 | liftPrism :: Prism s t a b -> TypedSchemaFlex a b -> TypedSchemaFlex t t -> TypedSchemaFlex s t 262 | liftPrism p sc other = withPrism p $ \t f -> TOneOf other sc (either id t) f 263 | 264 | -- | Returns a partial schema. 265 | -- When encoding/decoding a Nothing value, 266 | -- an optional field will be omitted, and a required field will cause 267 | -- this alternative to be aborted. 268 | liftJust :: TypedSchemaFlex a b -> TypedSchemaFlex (Maybe a) (Maybe b) 269 | liftJust sc = liftPrism _Just sc $ TEmpty absurd $ error "liftJust" 270 | 271 | -- | Returns a partial schema. 272 | -- When encoding/decoding a Left value, 273 | -- an optional field will be omitted, and a required field will cause 274 | -- this alternative to be aborted. 275 | liftRight :: TypedSchemaFlex a b -> TypedSchemaFlex (Either c a) (Either c b) 276 | liftRight sc = liftPrism _Right sc $ TEmpty absurd $ error "liftRight" 277 | 278 | optFieldWith 279 | :: forall a from 280 | . TypedSchemaFlex from (Maybe a) 281 | -> Text 282 | -> RecordFields from (Maybe a) 283 | optFieldWith = optFieldGeneral Nothing 284 | 285 | -- | The most general introduction form for optional alts 286 | optFieldGeneral 287 | :: forall a from . a ->TypedSchemaFlex from a -> Text -> RecordFields from a 288 | optFieldGeneral def schema n = RecordFields $ liftAlt (OptionalAp n schema def) 289 | 290 | -- | A generalized version of 'optFieldEither'. 291 | optFieldEitherWith 292 | :: TypedSchemaFlex from (Either e a) 293 | -> Text 294 | -> e 295 | -> RecordFields from (Either e a) 296 | optFieldEitherWith sc n e = optFieldGeneral (Left e) sc n 297 | 298 | extractFieldsHelper 299 | :: Alternative f 300 | => (forall a . RecordField from a -> f b) 301 | -> RecordFields from a 302 | -> f [b] 303 | extractFieldsHelper f = runAlt_ (\x -> (: []) <$> f x) . getRecordFields 304 | 305 | -- -------------------------------------------------------------------------------- 306 | -- Typed Unions 307 | 308 | 309 | -- | An alternative in a union type 310 | data UnionAlt from where 311 | UnionAlt :: Prism' from b -> TypedSchema b -> UnionAlt from 312 | 313 | -- | Declare an alternative in a union type 314 | altWith :: TypedSchema a -> Prism' from a -> UnionAlt from 315 | altWith sc p = UnionAlt p sc 316 | 317 | -- | Discriminated unions that record the name of the chosen constructor in the schema 318 | -- 319 | -- @ 320 | -- data Education = Degree Text | PhD Text | NoEducation 321 | -- 322 | -- schemaEducation = union 323 | -- [ (\"NoEducation\", alt #_NoEducation) 324 | -- , (\"Degree\" , alt #_Degree) 325 | -- , (\"PhD\" , alt #_PhD) 326 | -- ] 327 | -- @ 328 | union :: NonEmpty (Text, UnionAlt from) -> TypedSchema from 329 | union (a :| rest) = go (a:rest) where 330 | go ((n, UnionAlt p sc) : rest) = liftPrism p (RecordSchema $ fieldWith' sc n) $ go rest 331 | go [] = TEmpty absurd (error "incomplete union definition") 332 | 333 | -- | Undiscriminated union that do not record the name of the constructor in the schema 334 | -- 335 | -- @ 336 | -- data Education = Degree Text | PhD Text | NoEducation 337 | -- 338 | -- schemaEducation = oneOf 339 | -- [ alt #_NoEducation 340 | -- , alt #_Degree 341 | -- , alt #_PhD 342 | -- ] 343 | -- @ 344 | -- Alternatives are searched greedily in a top-down order. 345 | oneOf :: NonEmpty (UnionAlt from) -> TypedSchema from 346 | oneOf (a :| rest) = go (a:rest) where 347 | go (UnionAlt p sc : rest) = liftPrism p sc $ go rest 348 | go [] = TEmpty absurd (error "incomplete oneOf definition") 349 | 350 | -- -------------------------------------------------------------------------------- 351 | -- Schema extraction from a TypedSchema 352 | 353 | -- | Extract an untyped schema that can be serialized. 354 | -- 355 | -- For schemas with alternatives, this enumerates all the possible 356 | -- versions lazily. 357 | -- Beware when using on schemas with multiple alternatives, 358 | -- as the number of versions is exponential. 359 | extractSchema :: TypedSchemaFlex from a -> NonEmpty Schema 360 | extractSchema (TNamed n sc _ _) = Named n <$> extractSchema sc 361 | extractSchema (TPrim n _ _ ) = pure $ Prim n 362 | extractSchema (TOneOf s s' _ _) = (<>) <$> extractSchema s <*> extractSchema s' 363 | extractSchema (TAllOf scc ) = extractSchema =<< scc 364 | extractSchema (TEnum opts _ ) = pure $ Enum (fst <$> opts) 365 | extractSchema (TArray sc _ _ ) = Array <$> extractSchema sc 366 | extractSchema (TMap sc _ _ ) = StringMap <$> extractSchema sc 367 | extractSchema (RecordSchema rs) = 368 | case foldMap (pure . Record . fromList) (extractFields rs) of 369 | [] -> pure Empty 370 | other -> fromList other 371 | extractSchema TEmpty{} = pure Empty 372 | 373 | -- | Extract all the field groups (from alternatives) in the record 374 | extractFields :: RecordFields from to -> [[(Text, Field)]] 375 | extractFields = 376 | runAlt_ (\x -> (: []) <$> NE.toList (extractField x)) . getRecordFields where 377 | 378 | extractField :: RecordField from to -> NonEmpty (Text, Field) 379 | extractField (RequiredAp n sc) = 380 | (\s -> (n, (`Field` True) s)) <$> extractSchema sc 381 | extractField (OptionalAp n sc _) = 382 | (\s -> (n, (`Field` False) s)) <$> extractSchema sc 383 | 384 | 385 | -- | Returns all the primitive validators embedded in this typed schema 386 | extractValidators :: TypedSchemaFlex from a -> Validators 387 | extractValidators = go where 388 | go :: TypedSchemaFlex from a -> Validators 389 | go (TPrim n parse _) = 390 | [ ( n 391 | , \x -> case parse x of 392 | A.Success _ -> Nothing 393 | A.Error e -> Just (pack e) 394 | ) 395 | ] 396 | go (TOneOf a b _ _) = go a <> go b 397 | go (TAllOf scc ) = foldMap go scc 398 | go (TArray sc _ _) = go sc 399 | go (TMap sc _ _) = go sc 400 | go (RecordSchema rs) = 401 | mconcat $ mconcat (extractFieldsHelper (pure . go . fieldTypedSchema) rs) 402 | go _ = [] 403 | 404 | -- --------------------------------------------------------------------------------------- 405 | -- Results 406 | 407 | type TracedMismatches = [(Trace, Mismatch)] 408 | 409 | newtype IterAltT m a = IterAlt {runIterAlt :: IterT m a} 410 | deriving newtype (Applicative, Functor, Monad, MonadError e, MonadState s, MonadTrans, MonadFree Identity, Eq, Show) 411 | 412 | instance (MonadPlus m) => Alternative (IterAltT m) where 413 | empty = IterAlt (lift empty) 414 | IterAlt a <|> IterAlt b = IterAlt $ IterT $ runIterT a <|> runIterT b 415 | 416 | runDelay :: Monad m => Natural -> IterAltT m a -> m (Maybe a) 417 | runDelay n = retract . cutoff (fromIntegral n) . runIterAlt 418 | 419 | -- | A monad encapsulating failure as well as non-termination 420 | newtype Result a = Result { getResult :: IterAltT (Attempt TracedMismatches) a} 421 | deriving newtype (Applicative, Alternative, Functor, Monad, MonadError TracedMismatches, MonadFree Identity, Eq, Show) 422 | 423 | liftAttempt :: Attempt TracedMismatches a -> Result a 424 | liftAttempt = Result . lift 425 | 426 | -- | Run a 'Result' up with bounded depth. Returns nothing if it runs out of steps. 427 | runResult :: MonadError TracedMismatches g => Natural -> Result a -> g (Maybe a) 428 | runResult maxSteps = execAttempt . runDelay maxSteps . getResult 429 | 430 | -- --------------------------------------------------------------------------------------- 431 | -- Encoding to JSON 432 | 433 | -- | Given a typed schema, produce a JSON encoder to the first version produced by 'extractSchema' 434 | encodeWith :: TypedSchemaFlex from a -> (from -> Value) 435 | encodeWith sc = ensureSuccess encoder 436 | where 437 | encoder = encodeToWith sc (NE.head $ extractSchema sc) 438 | ensureSuccess = either (error.show) id 439 | 440 | -- | Given source and target schemas, produce a JSON encoder 441 | encodeToWith :: TypedSchemaFlex from a -> Schema -> Either TracedMismatches (from -> Value) 442 | encodeToWith sc target = runAttempt $ 443 | (fmap.fmap) (fromMaybe (error "Empty schema")) 444 | (go [] [] sc target) 445 | where 446 | failWith ctx m = throwError [(reverse ctx, m)] 447 | 448 | go 449 | :: forall from a 450 | . [(SchemaName, Attempt TracedMismatches (Void -> Maybe Value))] 451 | -> Trace 452 | -> TypedSchemaFlex from a 453 | -> Schema 454 | -> Attempt TracedMismatches (from -> Maybe Value) 455 | go env ctx (TNamed n sct _ fromf) (Named n' sc) | n == n' = 456 | case lookup n env of 457 | Just res -> 458 | lmap (unsafeCoerce . fromf) <$> res 459 | Nothing -> 460 | let res = go ((n, resDynLater) : env) ctx sct sc 461 | resDyn = lmap unsafeCoerce <$> res 462 | resDynLater = (pure . fromMaybe (error "impossible") . attemptSuccess) resDyn 463 | in lmap fromf <$> res 464 | go _ _ _ Empty = empty 465 | go _ _tx (TEmpty _ _) _ = pure $ const empty 466 | go _ ctx (TPrim n _ fromf) (Prim n') 467 | | n == n' = pure $ pure . fromf 468 | | otherwise = failWith ctx (PrimMismatch n n') 469 | go i ctx (TArray sc _ fromf) (Array t) = do 470 | f <- go i ("[]" : ctx) sc t 471 | return $ A.Array <.> traverse f . fromf 472 | go i ctx (TMap sc _ fromf) (StringMap t) = do 473 | f <- go i ("Map" : ctx) sc t 474 | return $ A.Object <.> traverse f . fromf 475 | go _ ctx (TEnum opts fromf) (Enum optsTarget) = do 476 | case NE.nonEmpty $ NE.filter (`notElem` optsTarget) (fst <$> opts) of 477 | Nothing -> pure $ pure . A.String . fromf 478 | Just xx -> failWith ctx $ MissingEnumChoices xx 479 | go n ctx (TAllOf scc) t = asum $ imap (\i sc -> go n (tag i : ctx) sc t) scc 480 | go n ctx (TOneOf a b _ fromf) t = do 481 | encoderA <- go n ("L" : ctx) a t 482 | encoderB <- go n ("R" : ctx) b t 483 | pure $ \x -> either encoderA encoderB (fromf x) 484 | go i ctx sc (OneOf tt) = asum $ fmap (go i ctx sc) (tt <> [Empty]) 485 | go i ctx (RecordSchema rec) (Record target) = do 486 | let candidates = runAlt_ extractField (getRecordFields rec) 487 | case find (\candidate -> Set.fromList (map fst candidate) == targetFields) candidates of 488 | Nothing -> failWith ctx $ 489 | SchemaMismatch (NE.head $ extractSchema $ RecordSchema rec) (Record target) 490 | Just solution -> pure $ \x -> do 491 | fields <- traverse (\(_,f) -> case f x of Nothing -> Nothing ; Just (n,y) -> Just $ (n,) <$> y) solution 492 | return $ A.object $ catMaybes fields 493 | where 494 | targetFields = Set.fromList (Map.keys target) 495 | 496 | liftGo = either (const empty) pure . runAttempt 497 | 498 | extractField 499 | :: forall from a 500 | . RecordField from a 501 | -> [] [(Text, from -> Maybe (Text, Maybe Value))] 502 | extractField RequiredAp {..} = case Map.lookup fieldName target of 503 | Nothing -> pure [] 504 | Just targetField -> do 505 | f <- liftGo $ 506 | go i (fieldName : ctx) 507 | fieldTypedSchema 508 | (fieldSchema targetField) 509 | return $ 510 | let encoder = fmap ((fieldName,) . Just) . f 511 | in [(fieldName, encoder)] 512 | 513 | extractField OptionalAp {..} = case Map.lookup fieldName target of 514 | Nothing -> pure [] 515 | Just targetField -> do 516 | guard $ not (isRequired targetField) 517 | f <- liftGo $ go i (fieldName : ctx) 518 | fieldTypedSchema 519 | (fieldSchema targetField) 520 | 521 | return $ 522 | let encoder = Just . (fieldName,) . f 523 | in [(fieldName, encoder)] 524 | go i ctx sc (Array t) = do 525 | f <- go i ctx sc t 526 | return $ A.Array . fromList . (: []) <.> f 527 | go _ _tx _ Unit = pure $ const (pure emptyValue) 528 | go _ ctx other src = 529 | failWith ctx (SchemaMismatch (NE.head $ extractSchema other) src) 530 | 531 | -- -------------------------------------------------------------------------- 532 | -- Decoding 533 | 534 | -- | Runs a schema as a function @enc -> dec@. Loops for infinite/circular data 535 | runSchema :: TypedSchemaFlex enc dec -> enc -> Either [Mismatch] dec 536 | runSchema sc = runExcept . go sc 537 | where 538 | go :: forall from a . TypedSchemaFlex from a -> from -> Except [Mismatch] a 539 | go (TEmpty toF fromF ) x = pure $ toF $ fromF x 540 | -- TODO handle circular data 541 | go (TNamed _ sc tof fromF) a = tof <$> go sc (fromF a) 542 | go (TPrim n toF fromF ) from = case toF (fromF from) of 543 | A.Success a -> pure a 544 | A.Error e -> failWith (PrimError n (pack e)) 545 | go (TEnum opts fromF) from = case lookup enumValue opts of 546 | Just x -> pure x 547 | Nothing -> failWith $ InvalidEnumValue enumValue (fst <$> opts) 548 | where enumValue = fromF from 549 | go (TMap _sc toF fromF) from = pure $ toF (fromF from) 550 | go (TArray _sc toF fromF) from = pure $ toF (fromF from) 551 | go (TAllOf scc ) from = msum $ (`go` from) <$> scc 552 | go (TOneOf sc sc' toF fF) from = toF <$> bitraverse (go sc) (go sc') (fF from) 553 | go (RecordSchema alts ) from = runAlt f (getRecordFields alts) 554 | where 555 | f :: RecordField from b -> Except [Mismatch] b 556 | f RequiredAp {..} = go fieldTypedSchema from 557 | f OptionalAp {..} = go fieldTypedSchema from 558 | 559 | failWith e = throwError [e] 560 | 561 | -- | Evaluates a schema as a value of type 'dec'. Can only succeed if the schema contains a 'TPure' alternative 562 | evalSchema :: forall enc dec . TypedSchemaFlex enc dec -> Maybe dec 563 | evalSchema TEmpty{} = Nothing 564 | -- TODO handle circular data 565 | evalSchema (TNamed _ sc tof _) = tof <$> evalSchema sc 566 | evalSchema (TPrim _ _ _) = empty 567 | evalSchema (TEnum _ _) = empty 568 | evalSchema (TMap _sc _ _) = empty 569 | evalSchema (TArray _sc _ _) = empty 570 | evalSchema (TAllOf scc ) = msum $ evalSchema <$> scc 571 | evalSchema (TOneOf sc sc' toF _) = 572 | toF <$> ((Left <$> evalSchema sc) <|> (Right <$> evalSchema sc')) 573 | evalSchema (RecordSchema alts ) = runAlt f (getRecordFields alts) 574 | where 575 | f :: RecordField from b -> Maybe b 576 | f RequiredAp {..} = evalSchema fieldTypedSchema 577 | f OptionalAp {..} = evalSchema fieldTypedSchema 578 | 579 | -- | Given a JSON 'Value' and a typed schema, extract a Haskell value 580 | decodeWith :: TypedSchemaFlex from a -> Value -> Result a 581 | decodeWith sc v = decoder >>= ($ v) 582 | where decoder = decodeFromWith sc (NE.head $ extractSchema sc) 583 | 584 | decodeFromWith 585 | :: TypedSchemaFlex from a -> Schema -> Result(Value -> Result a) 586 | -- TODO merge runSchema and decodeFromWith ? 587 | -- TODO expose non-termination as an effect 588 | decodeFromWith sc source = Result $ todoExposeNonTermination $ go [] [] sc source 589 | where 590 | todoExposeNonTermination = lift 591 | 592 | failWith ctx e = throwError [(reverse ctx, e)] 593 | 594 | go 595 | :: [(SchemaName, Attempt TracedMismatches (Value -> Result Void))] 596 | -> Trace 597 | -> TypedSchemaFlex from a 598 | -> Schema 599 | -> Attempt TracedMismatches (Value -> Result a) 600 | go _ ctx TEmpty{} Empty = pure $ const $ failWith ctx EmptySchema 601 | go env ctx (TNamed n sc tof _) (Named n' s) | n == n' = case lookup n env of 602 | Just sol -> 603 | (fmap . fmap . fmap) (tof . unsafeCoerce) sol 604 | Nothing -> 605 | let sol = go ((n, solDynLater) : env) ctx sc s 606 | solDelayed = (fmap . fmap) delay sol 607 | solDyn = (fmap . fmap . fmap) unsafeCoerce solDelayed 608 | solDynLater = pure $ fromMaybe (error "impossible") $ attemptSuccess solDyn 609 | in (fmap . fmap . fmap) tof sol 610 | go _nv ctx (TEnum optsTarget _) s@(Enum optsSource) = 611 | case 612 | NE.nonEmpty 613 | $ NE.filter (`notElem` map fst (NE.toList optsTarget)) optsSource 614 | of 615 | Just xx -> failWith ctx $ MissingEnumChoices xx 616 | Nothing -> pure $ \case 617 | A.String x -> 618 | maybe (failWith ctx (InvalidEnumValue x (fst <$> optsTarget))) pure 619 | $ lookup x optsTarget 620 | other -> failWith ctx (ValueMismatch s other) 621 | go env ctx (TArray sc tof _) s@(Array src) = do 622 | f <- go env ("[]" : ctx) sc src 623 | pure $ \case 624 | A.Array x -> tof <$> traverse f x 625 | other -> failWith ctx (ValueMismatch s other) 626 | go env ctx (TMap sc tof _) s@(StringMap src) = do 627 | f <- go env ("Map" : ctx) sc src 628 | pure $ \case 629 | A.Object x -> tof <$> traverse f x 630 | other -> failWith ctx (ValueMismatch s other) 631 | go _nv ctx (TPrim n tof _) (Prim src) 632 | | n /= src = failWith ctx (PrimMismatch n src) 633 | | otherwise = pure $ \x -> case tof x of 634 | A.Error e -> failWith ctx (PrimError n (pack e)) 635 | A.Success a -> pure a 636 | go env ctx (TAllOf scc ) src = do 637 | let parsers = map (\sc -> runAttempt $ go env ctx sc src) (NE.toList scc) 638 | case partitionEithers parsers of 639 | (ee, []) -> failWith ctx (AllAlternativesFailed (concat ee)) 640 | (_ , pp) -> do 641 | pure $ \x -> asum (map ($ x) pp) 642 | go env ctx (TOneOf sc sc' tof _) src = do 643 | let parserL = runAttempt $ (Left <.>) <$> go env ctx sc src 644 | let parserR = runAttempt $ (Right <.>) <$> go env ctx sc' src 645 | -- parserR comes first 646 | -- This is because of how liftPrism and oneOf work 647 | case partitionEithers [parserR, parserL] of 648 | (ee, []) -> failWith ctx (AllAlternativesFailed (concat ee)) 649 | (_ , pp) -> do 650 | pure $ \x -> tof <$> asum (map ($ x) pp) 651 | go env ctx (RecordSchema (RecordFields rec)) (Record src) = unliftGo $ coerce $ runAlt f' rec 652 | where 653 | sourceFields = Map.keysSet src 654 | 655 | liftGo = either (const empty) pure . runAttempt 656 | 657 | unliftGo = maybe (failWith ctx NoMatches) (pure . snd) 658 | . find @[] (\(tgtFields,_) -> null $ Set.difference sourceFields (fromList tgtFields)) 659 | 660 | f' :: RecordField from a -> ([] `Compose`(,) [Text] `Compose` (->) Value `Compose` Result) a 661 | f' x = coerce (f x) 662 | 663 | f :: RecordField from a -> [([Text], Value -> Result a)] 664 | f RequiredAp {..} = case Map.lookup fieldName src of 665 | Nothing -> empty 666 | Just srcField -> do 667 | guard $ isRequired srcField 668 | f <- liftGo $ go env (fieldName : ctx) fieldTypedSchema (fieldSchema srcField) 669 | pure $ 670 | let decoder v = 671 | case v of 672 | A.Object o -> case Map.lookup fieldName o of 673 | Nothing -> 674 | failWith (fieldName : ctx) (MissingRecordField fieldName) 675 | Just v -> f v 676 | other -> failWith ctx (InvalidRecordValue other) 677 | in ([fieldName], decoder) 678 | f OptionalAp {..} = case Map.lookup fieldName src of 679 | Nothing -> pure ([], const $ pure fieldDefaultValue) 680 | Just srcField -> do 681 | f <- liftGo $ go env (fieldName : ctx) fieldTypedSchema (fieldSchema srcField) 682 | pure $ 683 | let decoder v = case v of 684 | A.Object o -> case Map.lookup fieldName o of 685 | Nothing -> pure fieldDefaultValue 686 | Just v -> f v 687 | other -> failWith ctx (InvalidRecordValue other) 688 | in ([fieldName], decoder) 689 | 690 | go env ctx s (OneOf xx) = asum $ fmap (go env ctx s) xx 691 | go _nv ctx s src = 692 | failWith ctx (SchemaMismatch (NE.head $ extractSchema s) src) 693 | 694 | -- ---------------------------------------------- 695 | -- Utils 696 | 697 | runAlt_ 698 | :: (Alternative g, Monoid m) => (forall a . f a -> g m) -> Alt f b -> g m 699 | runAlt_ f = fmap getConst . getCompose . runAlt (Compose . fmap Const . f) 700 | 701 | (<.>) :: Functor f => (b -> c) -> (a -> f b) -> a -> f c 702 | f <.> g = fmap f . g 703 | 704 | infixr 8 <.> 705 | --------------------------------------------------------------------------------