├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .scalafix.conf ├── .scalafix3.conf ├── .scalafmt.conf ├── CHANGELOG.md ├── README.md ├── build.sbt ├── circe └── src │ ├── main │ ├── scala-2 │ │ └── morphling │ │ │ └── circe │ │ │ ├── FromJson.scala │ │ │ ├── ToFilter.scala │ │ │ └── ToJson.scala │ └── scala-3 │ │ └── morphling │ │ └── circe │ │ ├── FromJson.scala │ │ ├── ToFilter.scala │ │ └── ToJson.scala │ └── test │ └── scala │ └── morphling │ └── circe │ ├── CircePack.scala │ ├── CirceSpec.scala │ ├── Implicits.scala │ ├── JsonFilterSpec.scala │ └── annotated │ ├── CirceAnnotatedSpec.scala │ ├── Implicits.scala │ └── JsonFilterAnnotatedSpec.scala ├── copying.txt ├── core └── src │ ├── main │ ├── scala-2 │ │ └── morphling │ │ │ ├── Constructors.scala │ │ │ ├── HFunctor.scala │ │ │ ├── Schema.scala │ │ │ ├── SchemaF.scala │ │ │ ├── annotated │ │ │ └── Schema.scala │ │ │ └── package.scala │ └── scala-3 │ │ └── morphling │ │ ├── HFunctor.scala │ │ ├── Schema.scala │ │ ├── SchemaF.scala │ │ └── annotated │ │ └── Schema.scala │ └── test │ ├── scala-2 │ └── morphling │ │ ├── protocol │ │ ├── SType.scala │ │ └── annotated │ │ │ ├── Restriction.scala │ │ │ └── STypeAnn.scala │ │ └── samples │ │ ├── Deannotator.scala │ │ ├── Person.scala │ │ ├── Role.scala │ │ ├── annotated │ │ ├── AnnPerson.scala │ │ ├── AnnRole.scala │ │ └── Server.scala │ │ └── package.scala │ └── scala-3 │ └── morphling │ ├── protocol │ ├── SType.scala │ └── annotated │ │ ├── Restriction.scala │ │ └── STypeAnn.scala │ └── samples │ ├── Deannotator.scala │ ├── Person.scala │ ├── Role.scala │ ├── Samples.scala │ └── annotated │ ├── AnnPerson.scala │ ├── AnnRole.scala │ └── Server.scala ├── project ├── Settings.scala ├── build.properties └── plugins.sbt ├── reactivemongo └── src │ ├── main │ ├── scala-2 │ │ └── morphling │ │ │ └── reactivemongo │ │ │ ├── FromBson.scala │ │ │ └── ToBson.scala │ ├── scala-3 │ │ └── morphling │ │ │ └── reactivemongo │ │ │ ├── FromBson.scala │ │ │ └── ToBson.scala │ └── scala │ │ └── morphling │ │ └── reactivemongo │ │ └── MultipleKeysFound.scala │ └── test │ └── scala │ └── morphling │ └── reactivemongo │ ├── Implicits.scala │ ├── ReactivemongoPack.scala │ ├── ReactivemongoSpec.scala │ └── annotated │ ├── Implicits.scala │ └── ReactivemongoAnnotatedSpec.scala ├── scalacheck └── src │ ├── main │ ├── scala-2 │ │ └── morphling │ │ │ └── scalacheck │ │ │ └── ToGen.scala │ └── scala-3 │ │ └── morphling │ │ └── scalacheck │ │ └── ToGen.scala │ └── test │ └── scala │ └── morphling │ └── scalacheck │ ├── GenPack.scala │ ├── Implicits.scala │ └── annotated │ └── Implicits.scala ├── tapir └── src │ ├── main │ ├── scala-2 │ │ └── morphling │ │ │ └── tapir │ │ │ └── ToSchema.scala │ ├── scala-3 │ │ └── morphling │ │ │ └── tapir │ │ │ └── ToSchema.scala │ └── scala │ │ └── morphling │ │ └── tapir │ │ └── ProductSchema.scala │ └── test │ └── scala │ ├── morphling │ └── tapir │ │ ├── Implicits.scala │ │ ├── SchemaPack.scala │ │ ├── TapirSchemaSpec.scala │ │ └── annotated │ │ ├── Implicits.scala │ │ └── TapirSchemaAnnotatedSpec.scala │ └── sttp │ └── tapir │ └── docs │ └── apispec │ └── schema │ └── SchemaRenderer.scala └── typedschema └── src ├── main └── scala │ └── morphling │ └── tschema │ └── ToTypeable.scala └── test └── scala └── morphling └── tschema ├── Implicits.scala ├── TSchemaSpec.scala ├── TypeablePack.scala └── annotated ├── Implicits.scala └── TSchemaAnnotatedSpec.scala /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | strategy: 8 | matrix: 9 | scala-version: [ "2.12", "2.13", "3" ] 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v4 14 | with: 15 | fetch-depth: 0 16 | 17 | - uses: actions/setup-java@v4 18 | with: 19 | distribution: temurin 20 | java-version: 17 21 | check-latest: true 22 | 23 | - uses: sbt/setup-sbt@v1 24 | 25 | - name: Coursier cache 26 | uses: coursier/cache-action@v6 27 | 28 | - name: Compile and test 29 | run: sbt "project root2_12; scalafixAll --check; test" 30 | if: matrix.scala-version == '2.12' 31 | 32 | - name: Compile and test 33 | run: sbt "project root; scalafixAll --check; test" 34 | if: matrix.scala-version == '2.13' 35 | 36 | - name: Compile and test 37 | run: sbt "project root3; scalafixAll --check; test" 38 | if: matrix.scala-version == '3' 39 | 40 | release: 41 | needs: [build] 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v4 45 | with: 46 | fetch-depth: 0 47 | 48 | - uses: actions/setup-java@v4 49 | with: 50 | distribution: temurin 51 | java-version: 17 52 | check-latest: true 53 | 54 | - uses: sbt/setup-sbt@v1 55 | 56 | - name: Coursier cache 57 | uses: coursier/cache-action@v6 58 | 59 | - name: Publish artifacts 60 | env: 61 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 62 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 63 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 64 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 65 | run: sbt ci-release 66 | 67 | ci-passed: 68 | runs-on: ubuntu-latest 69 | needs: release 70 | steps: 71 | - run: ':' 72 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | **/target 4 | .DS_Store 5 | .bsp 6 | .idea 7 | .metals 8 | .theia 9 | .bloop 10 | .vscode 11 | **/metals.sbt -------------------------------------------------------------------------------- /.scalafix.conf: -------------------------------------------------------------------------------- 1 | rules = [ 2 | OrganizeImports 3 | ] 4 | 5 | OrganizeImports { 6 | preset = INTELLIJ_2020_3 7 | targetDialect = Auto 8 | coalesceToWildcardImportThreshold = null 9 | expandRelative = true 10 | } -------------------------------------------------------------------------------- /.scalafix3.conf: -------------------------------------------------------------------------------- 1 | rules = [ 2 | OrganizeImports 3 | ] 4 | 5 | OrganizeImports { 6 | preset = INTELLIJ_2020_3 7 | targetDialect = Auto 8 | coalesceToWildcardImportThreshold = null 9 | expandRelative = true 10 | removeUnused = false 11 | } -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.5.9" 2 | align = most 3 | align.openParenCallSite = false 4 | align.openParenDefnSite = false 5 | align.tokens = [ 6 | { code = "extends", owner = "Defn.(Class|Trait|Object)" } 7 | { code = "//", owner = ".*" } 8 | { code = "{", owner = "Template" } 9 | { code = "}", owner = "Template" } 10 | { code = "%", owner = "Term.ApplyInfix" } 11 | { code = "=>", owner = "Case" } 12 | { code = "%%",owner = "Term.ApplyInfix" } 13 | { code = "%%%",owner = "Term.ApplyInfix" } 14 | { code = "<-", owner = "Enumerator.Generator" } 15 | { code = "->", owner = "Term.ApplyInfix" } 16 | { code = "=", owner = "(Enumerator.Val|Defn.(Va(l|r)|Def|Type))" } 17 | ] 18 | continuationIndent.defnSite = 4 19 | docstrings.style = Asterisk 20 | encoding = UTF-8 21 | importSelectors = singleLine 22 | maxColumn = 120 23 | newlines.beforeTypeBounds = unfold 24 | newlines.avoidForSimpleOverflow = [tooLong, punct, slc] 25 | optIn.configStyleArguments = true 26 | project.git = true 27 | rewrite.rules = [ 28 | PreferCurlyFors 29 | Imports, 30 | RedundantBraces 31 | RedundantParens 32 | SortModifiers 33 | ] 34 | rewrite.imports.expand = true 35 | rewrite.imports.sort = ascii 36 | rewrite.imports.groups = [ 37 | ["javax?..*", "scala..*"] 38 | [".*"] 39 | ] 40 | rewrite.sortModifiers.order = [ 41 | implicit 42 | final 43 | sealed 44 | abstract 45 | override 46 | private 47 | protected 48 | lazy 49 | open 50 | transparent 51 | inline 52 | infix 53 | opaque 54 | ] 55 | style = IntelliJ 56 | trailingCommas = preserve 57 | runner.dialect = scala213source3 58 | fileOverride { 59 | "glob:**/scala-3/**" { 60 | runner.dialect = scala3 61 | } 62 | } 63 | 64 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## version 4.0.0 2 | 3 | > 22.09.2024 4 | 5 | - simulacrum-scalafix is removed 6 | - update `glass` 7 | 8 | ## version 3.1.0 9 | 10 | > 20.10.2022 11 | 12 | - Tapir support 13 | 14 | ## version 3.0.0 15 | 16 | > 13.09.2022 17 | 18 | - complete Scala 3 support 19 | - upgrade `circe` 20 | - upgrade `reactivemongo` 21 | 22 | ## version 2.7.0-glass 23 | 24 | > 26.08.2022 25 | 26 | - replace `tofu-optics` with glass 27 | 28 | ## version 2.7.0 29 | 30 | > 18.03.2021 31 | 32 | - upgrade `circe` 33 | - upgrade `tofu` 34 | - upgrade `typed-schema` 35 | - upgrade `scalacheck` 36 | - upgrade `reactivemongo` 37 | 38 | ## version 2.6.0 39 | 40 | > 24.02.2021 41 | 42 | - change organization to `com.github.danslapman` 43 | - now hosts on Sonatype OSS 44 | - renamed HFunctor.hfmap to HFunctor.hlift 45 | - experimental HFunctor Scala 3 implementation 46 | 47 | ## version 2.5.2 48 | 49 | > 13.10.2020 50 | 51 | - add HMutu.transformInner 52 | - add example of schema de-annotating (see Deannotator & CirceSpec) 53 | 54 | ## version 2.5.1 55 | 56 | > 10.09.2020 57 | 58 | - improve SwaggerTypeable for discriminated AltSchema 59 | 60 | ## version 2.5 61 | 62 | > 02.09.2020 63 | 64 | - re-introduce methods removed in 2.2 and 2.3 65 | 66 | ## version 2.4 67 | 68 | > 02.09.2020 69 | 70 | - upgrade `typed-schema` 71 | - upgrade `tofu` 72 | 73 | ## version 2.3 74 | 75 | > 19.08.2020 76 | 77 | - fix annoying overload ambiguity (for annotated schemas) 78 | 79 | ## version 2.2 80 | 81 | > 19.08.2020 82 | 83 | - fix annoying overload ambiguity 84 | 85 | ## version 2.1 86 | 87 | > 01.06.2020 88 | 89 | - upgrade `typed-schema` 90 | - upgrade `tofu` 91 | 92 | ## version 2.0 93 | 94 | > 18.03.2020 95 | 96 | Identical to 2.0-RC2, just fixed some deprecated stuff in tests 97 | 98 | ## version 2.0-RC2 99 | 100 | > 15.01.2020 101 | 102 | Bump tofu 103 | 104 | ## version 2.0-RC1 105 | 106 | > 15.01.2020 107 | 108 | Replace monocle with tofu-optics 109 | 110 | ## version 1.5.1 111 | 112 | > 10.12.2019 113 | 114 | - upgrade `reactivemongo` 115 | - upgrade `typed-schema` 116 | 117 | ## version 1.5 118 | 119 | > 14.11.2019 120 | 121 | - upgrade `circe` 122 | - upgrade `reactivemongo` 123 | - upgrade `typed-schema` 124 | 125 | ## version 1.4 126 | 127 | > 19.09.2019 128 | 129 | Introduce `ToFilter` typeclass for Json filtering 130 | 131 | ## version 1.3 132 | 133 | > 11.09.2019 134 | 135 | Uncurry `unsafeOneOfDiscr` to avoid overload clashing 136 | 137 | ## version 1.2 138 | 139 | > 11.09.2019 140 | 141 | - add convenience overloads for a bunch of methods `annotated.Schema` 142 | - bump mouse 143 | 144 | ## version 1.1.1 145 | 146 | > 11.09.2019 147 | 148 | - bump `monocle` 149 | 150 | ## version 1.1 151 | 152 | > 11.09.2019 153 | 154 | - Support schema annotations 155 | - upgrade `reactivemongo` 156 | 157 | ## version 1.0 158 | 159 | > 01.09.2019 160 | 161 | Identical to 1.0-beta15 162 | 163 | ## version 1.0-beta15 164 | 165 | > 02.08.2019 166 | 167 | Fix `typed-schema` dependency 168 | 169 | ## version 1.0-beta14 170 | 171 | > 06.06.2019 172 | 173 | Improve `absent` field constructor 174 | 175 | ## version 1.0-beta13 176 | 177 | > 06.06.2019 178 | 179 | Introduce `absent` for representing properties that are always absent 180 | 181 | ## version 1.0-beta12 182 | 183 | > 31.05.2019 184 | 185 | Introduce `unsafeOneOfDiscr` 186 | 187 | ## version 1.0-beta11 188 | 189 | > 30.05.2019 190 | 191 | PropSchema construction methods now accept `Lens` 192 | 193 | ## version 1.0-beta10 194 | 195 | > 30.05.2019 196 | 197 | Improve `constant` field constructor 198 | 199 | ## version 1.0-beta9 200 | 201 | > 29.05.2019 202 | 203 | - upgrade `typed-schema` 204 | - upgrade `kind-projector` 205 | 206 | ## version 1.0-beta8 207 | 208 | > 27.05.2019 209 | 210 | Display discriminator values in swagger as patterns 211 | 212 | ## version 1.0-beta7 213 | 214 | > 06.05.2019 215 | 216 | First-class support for constant fields in schemas 217 | 218 | ## version 1.0-beta6 219 | 220 | > 18.04.2019 221 | 222 | - rename `toTypeable` into `typeable` 223 | - rename `toGen` into `gen` 224 | 225 | ## version 1.0-beta5 226 | 227 | > 17.04.2019 228 | 229 | Circe module can produce AccumulatedDecoders 230 | 231 | ## version 1.0-beta4 232 | 233 | > 14.04.2019 234 | 235 | Minor fix in reactivemongo module 236 | 237 | ## version 1.0-beta3 238 | 239 | > 11.04.2019 240 | 241 | Support for default values of required properties 242 | 243 | ## Version 1.0-beta2 244 | 245 | > 01.04.2019 246 | 247 | Support for OneOf schemas with discriminator fields 248 | 249 | ## Version 1.0-beta1 250 | 251 | > 31.03.2019 252 | 253 | Initial release with support of 254 | - circe 255 | - reactivemongo 256 | - scalacheck 257 | - typed-schema -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # morphling [![Release Artifacts][Badge-SonatypeReleases]][Link-SonatypeReleases] 2 | Cats-based Scala library for free applicative schemas. Core module of morphling 3 | initially was a cats-based port of the excellent Kris Nuttycombe's [xenomorph](https://github.com/nuttycom/xenomorph) 4 | 5 | # Getting started 6 | 7 | All You need is ~~love~~: 8 | 9 | ``` 10 | libraryDependencies ++= Seq( 11 | "com.github.danslapman" %% "morphling" % "4.0.0", //core module 12 | "com.github.danslapman" %% "morphling-circe" % "4.0.0", 13 | "com.github.danslapman" %% "morphling-reactivemongo" % "4.0.0", 14 | "com.github.danslapman" %% "morphling-typed-schema" % "4.0.0", 15 | "com.github.danslapman" %% "morphling-scalacheck" % "4.0.0", 16 | "com.github.danslapman" %% "morphling-tapir" % "4.0.0" 17 | ) 18 | ``` 19 | 20 | # Version compatibility table 21 | 22 | | morphling | cats | circe | reactivemongo | typed-schema | scalacheck | tofu | glass | tapir | 23 | |-----------|------| ----- |-------------------| ------------ | ---------- | ---- | ----- | ----- | 24 | | 4.0 | 2.8 | 0.14.2 | 1.0.3 / 1.1.0-RC6 | 0.14.3 | 1.15.3 | - | 0.3 | 1.0.0 | 25 | | 3.1 | 2.8 | 0.14.2 | 1.0.3 / 1.1.0-RC6 | 0.14.3 | 1.15.3 | - | 0.1 | 1.0.0 | 26 | | 3.0 | 2.8 | 0.14.2 | 1.0.3 / 1.1.0-RC6 | 0.14.3 | 1.15.3 | - | 0.1 | - | 27 | | 2.7-glass | 2.8 | 0.13.0 | 1.0.3 | 0.14.3 | 1.15.3 | - | 0.1 | - | 28 | | 2.7 | 2.4.2 | 0.13.0 | 1.0.3 | 0.14.3 | 1.15.3 | 0.10.0 | - | - | 29 | | 2.6 | 2.4.2 | 0.12.3 | 0.19.3 | 0.12.5.1 | 1.14.3 | 0.7.9 | - | - | 30 | | 2.4 | 2.0.0 | 0.12.3 | 0.19.3 | 0.12.5.1 | 1.14.3 | 0.7.9 | - | - | 31 | | 2.1 | 2.0.0 | 0.12.3 | 0.19.3 | 0.12.4 | 1.14.3 | 0.7.4 | - | - | 32 | | 2.0 | 2.0.0 | 0.12.3 | 0.19.3 | 0.11.1 | 1.14.3 | 0.6.1 | - | - | 33 | | 1.5.1 | 2.0.0 | 0.12.3 | 0.19.3 | 0.11.1 | 1.14.0 | - | - | - | 34 | | 1.5 | 2.0.0 | 0.12.3 | 0.19.0 | 0.11.0 | 1.14.0 | - | - | - | 35 | | 1.1 | 2.0.0 | 0.11.1 | 0.17.0 | 0.11.0-beta6 | 1.14.0 | - | - | - | 36 | | 1.0 | 1.6.1 | 0.11.1 | 0.16.4 | 0.11.0-beta6 | 1.14.0 | - | - | - | 37 | 38 | # Setting up protocol 39 | First of all, You need to define a set of "scalar" types You like to support. 40 | They can be `Int`s, `BigInt`s, `Instant`s, any type You mean to treat as scalar, actually. 41 | You can find an example protocol in tests of `core` module: 42 | 43 | ```scala 44 | import morphling.HMutu 45 | import morphling.Schema._ 46 | 47 | sealed trait SType[F[_], I] 48 | 49 | case class SNullT[F[_]]() extends SType[F, Unit] 50 | case class SBoolT[F[_]]() extends SType[F, Boolean] 51 | 52 | case class SIntT[F[_]]() extends SType[F, Int] 53 | case class SLongT[F[_]]() extends SType[F, Long] 54 | 55 | case class SFloatT[F[_]]() extends SType[F, Float] 56 | case class SDoubleT[F[_]]() extends SType[F, Double] 57 | 58 | case class SCharT[F[_]]() extends SType[F, Char] 59 | case class SStrT[F[_]]() extends SType[F, String] 60 | 61 | case class SArrayT[F[_], I](elem: F[I]) extends SType[F, Vector[I]] 62 | ``` 63 | 64 | Also it will be convenient to define some helper methods (will see their purpose later): 65 | ```scala 66 | object SType { 67 | type SSchema[I] = HMutu[SType, Schema, I] 68 | 69 | val sNull = prim(HMutu[SType, Schema, Unit](SNullT())) 70 | val sBool = prim(HMutu[SType, Schema, Boolean](SBoolT())) 71 | val sInt = prim(HMutu[SType, Schema, Int](SIntT())) 72 | val sLong = prim(HMutu[SType, Schema, Long](SLongT())) 73 | val sFloat = prim(HMutu[SType, Schema, Float](SFloatT())) 74 | val sDouble = prim(HMutu[SType, Schema, Double](SDoubleT())) 75 | val sChar = prim(HMutu[SType, Schema, Char](SCharT())) 76 | val sStr = prim(HMutu[SType, Schema, String](SStrT())) 77 | 78 | def sArray[I](elem: Schema[SSchema, I]) = prim(HMutu[SType, Schema, Vector[I]](SArrayT(elem))) 79 | } 80 | ``` 81 | 82 | # Creating a Schema 83 | 84 | Now we can define a schema for an arbitrary type using our protocol: 85 | 86 | ```scala 87 | import cats.syntax.apply._ 88 | import morphling.Schema 89 | import morphling.Schema._ 90 | import glass.macros._ 91 | import SType._ //Defined above 92 | 93 | case @Optics class Server(host: String, port: Int) 94 | object Server { 95 | val serverSchema: Schema[SSchema, Server] = rec( 96 | ( 97 | required("host", sStr, Server.host), 98 | required("port", sInt, Server.port) 99 | ).mapN(Server.apply) 100 | ) 101 | } 102 | ``` 103 | 104 | That's it. 105 | 106 | # Generating instances 107 | 108 | `morphling` provides a set of modules which enables generation of typeclasses 109 | from schema instances. To use them You need to define an "implementation" 110 | of protocol You previously defined. Let's do it for circe Encoding: 111 | 112 | ```scala 113 | import cats._ 114 | import io.circe.{Decoder, Encoder, Json} 115 | import SType.SSchema 116 | import morphling.Schema.Schema 117 | 118 | def sTypeEncoder[F[_]: ToJson]: (SType[F, *] ~> Encoder) = 119 | new (SType[F, *] ~> Encoder) { 120 | import ToJson._ 121 | 122 | override def apply[A](st: SType[F, A]): Encoder[A] = st match { 123 | case SNullT() => Encoder.encodeUnit 124 | case SBoolT() => Encoder.encodeBoolean 125 | case SIntT() => Encoder.encodeInt 126 | case SLongT() => Encoder.encodeLong 127 | case SFloatT() => Encoder.encodeFloat 128 | case SDoubleT() => Encoder.encodeDouble 129 | case SCharT() => Encoder.encodeChar 130 | case SStrT() => Encoder.encodeString 131 | case SArrayT(elem) => Encoder.encodeVector(elem.encoder) 132 | } 133 | } 134 | 135 | implicit val primFromJson: FromJson[SSchema] = new FromJson[SSchema] { 136 | val decoder = new (SSchema ~> Decoder) { 137 | def apply[I](s: SSchema[I]): Decoder[I] = sTypeDecoder[SSchema[I]#Inner].apply(s.unmutu) 138 | } 139 | ``` 140 | 141 | With such a transformation defined we can derive an `Encoder` for `Server`: 142 | 143 | ```scala 144 | val encoder = Server.schema.encoder 145 | ``` 146 | 147 | [Link-SonatypeReleases]: https://oss.sonatype.org/content/repositories/releases/com/github/danslapman/morphling_2.13/ "Sonatype Releases" 148 | 149 | [Badge-SonatypeReleases]: https://img.shields.io/nexus/r/https/oss.sonatype.org/com.github.danslapman/morphling_2.13.svg "Sonatype Releases" 150 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | ThisBuild / scalaVersion := "2.13.16" 2 | ThisBuild / organization := "com.github.danslapman" 3 | 4 | publish := {} 5 | publishArtifact := false 6 | publish / skip := true 7 | 8 | val versions = Map( 9 | "cats" -> "2.8.0", 10 | "circe" -> "0.14.2", 11 | "mouse" -> "1.0.11", 12 | "scalacheck" -> "1.15.3", 13 | "scalatest" -> "3.2.11", 14 | "simulacrum" -> "1.1.0", 15 | "scalatestplus-scalacheck" -> "3.2.11.0", 16 | "glass" -> "0.3.0" 17 | ) 18 | 19 | val scalaVersions = Seq("2.12.20", "2.13.16", "3.3.5") 20 | 21 | lazy val morphling = (projectMatrix in file("core")) 22 | .jvmPlatform(scalaVersions = scalaVersions) 23 | .settings(Settings.common) 24 | .settings( 25 | name := "morphling", 26 | ThisBuild / parallelExecution := false, 27 | libraryDependencies ++= Seq( 28 | "org.typelevel" %% "cats-core" % versions("cats"), 29 | "org.typelevel" %% "cats-free" % versions("cats"), 30 | "org.typelevel" %% "alleycats-core" % versions("cats"), 31 | "tf.tofu" %% "glass-core" % versions("glass"), 32 | "tf.tofu" %% "glass-macro" % versions("glass") % Test, 33 | "org.scalatest" %% "scalatest" % versions("scalatest") % Test 34 | ), 35 | libraryDependencies ++= { 36 | (CrossVersion.partialVersion(scalaVersion.value): @unchecked) match { 37 | case Some((2, _)) => 38 | Seq("com.chuusai" %% "shapeless" % "2.3.3") 39 | case Some((3, _)) => Seq.empty[ModuleID] 40 | } 41 | } 42 | ) 43 | 44 | lazy val `morphling-scalacheck` = (projectMatrix in file("scalacheck")) 45 | .dependsOn(morphling % "test->test;compile->compile") 46 | .jvmPlatform(scalaVersions = scalaVersions) 47 | .settings(Settings.common) 48 | .settings( 49 | name := "morphling-scalacheck", 50 | ThisBuild / parallelExecution := false, 51 | libraryDependencies ++= Seq( 52 | "org.typelevel" %% "mouse" % versions("mouse"), 53 | "org.scalacheck" %% "scalacheck" % versions("scalacheck") 54 | ), 55 | libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { 56 | case Some((2, _)) => 57 | Seq("io.github.leviysoft" %% "simulacrum" % versions("simulacrum")) 58 | case _ => Seq.empty[ModuleID] 59 | }) 60 | ) 61 | 62 | lazy val `morphling-circe` = (projectMatrix in file("circe")) 63 | .dependsOn(morphling % "test->test;compile->compile", `morphling-scalacheck` % "test->test") 64 | .jvmPlatform(scalaVersions = scalaVersions) 65 | .settings(Settings.common) 66 | .settings( 67 | name := "morphling-circe", 68 | ThisBuild / parallelExecution := false, 69 | libraryDependencies ++= Seq( 70 | "io.circe" %% "circe-core" % versions("circe"), 71 | "org.typelevel" %% "mouse" % versions("mouse"), 72 | "org.scalatest" %% "scalatest" % versions("scalatest") % Test, 73 | "org.scalacheck" %% "scalacheck" % versions("scalacheck") % Test, 74 | "org.scalatestplus" %% "scalacheck-1-15" % versions("scalatestplus-scalacheck") % Test 75 | ), 76 | libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { 77 | case Some((2, _)) => 78 | Seq("io.github.leviysoft" %% "simulacrum" % versions("simulacrum")) 79 | case _ => Seq.empty[ModuleID] 80 | }), 81 | libraryDependencies += { 82 | (CrossVersion.partialVersion(scalaVersion.value): @unchecked) match { 83 | case Some((2, _)) => "com.ironcorelabs" %% "cats-scalatest" % "3.1.1" % Test 84 | case Some((3, _)) => "com.ironcorelabs" %% "cats-scalatest" % "4.0.0" % Test 85 | } 86 | } 87 | ) 88 | 89 | lazy val `morphling-reactivemongo` = (projectMatrix in file("reactivemongo")) 90 | .dependsOn(morphling % "test->test;compile->compile", `morphling-scalacheck` % "test->test") 91 | .jvmPlatform(scalaVersions = scalaVersions) 92 | .settings(Settings.common) 93 | .settings( 94 | name := "morphling-reactivemongo", 95 | ThisBuild / parallelExecution := false, 96 | libraryDependencies ++= Seq( 97 | "org.reactivemongo" %% "reactivemongo-bson-api" % "1.0.3", 98 | "org.typelevel" %% "mouse" % versions("mouse"), 99 | "org.scalatest" %% "scalatest" % versions("scalatest") % Test, 100 | "org.scalacheck" %% "scalacheck" % versions("scalacheck") % Test, 101 | "org.scalatestplus" %% "scalacheck-1-15" % versions("scalatestplus-scalacheck") % Test 102 | ), 103 | libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { 104 | case Some((2, _)) => 105 | Seq("io.github.leviysoft" %% "simulacrum" % versions("simulacrum")) 106 | case _ => Seq.empty[ModuleID] 107 | }), 108 | libraryDependencies ++= { 109 | (CrossVersion.partialVersion(scalaVersion.value): @unchecked) match { 110 | case Some((2, _)) => 111 | Seq( 112 | "org.reactivemongo" %% "reactivemongo-bson-api" % "1.0.3" 113 | ) 114 | case Some((3, _)) => 115 | Seq( 116 | "org.reactivemongo" %% "reactivemongo-bson-api" % "1.1.0-RC6" 117 | ) 118 | } 119 | } 120 | ) 121 | 122 | lazy val `morphling-typed-schema` = (projectMatrix in file("typedschema")) 123 | .dependsOn(morphling % "test->test;compile->compile") 124 | .jvmPlatform(scalaVersions = scalaVersions.init) 125 | .settings(Settings.common) 126 | .settings( 127 | name := "morphling-typed-schema", 128 | ThisBuild / parallelExecution := false, 129 | libraryDependencies ++= Seq( 130 | "ru.tinkoff" %% "typed-schema-swagger" % "0.14.3", 131 | "org.typelevel" %% "mouse" % versions("mouse"), 132 | "org.scalatest" %% "scalatest" % versions("scalatest") % Test, 133 | "com.stephenn" %% "scalatest-circe" % "0.0.2" % Test, 134 | "org.scalaz" %% "scalaz-core" % "7.2.29" % Test 135 | ), 136 | libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { 137 | case Some((2, _)) => 138 | Seq("io.github.leviysoft" %% "simulacrum" % versions("simulacrum")) 139 | case _ => 140 | Seq.empty[ModuleID] 141 | }) 142 | ) 143 | 144 | lazy val `morphling-tapir` = (projectMatrix in file("tapir")) 145 | .dependsOn(morphling % "test->test;compile->compile") 146 | .jvmPlatform(scalaVersions = scalaVersions) 147 | .settings(Settings.common) 148 | .settings( 149 | name := "morphling-tapir", 150 | ThisBuild / parallelExecution := false, 151 | libraryDependencies ++= Seq( 152 | "com.softwaremill.sttp.tapir" %% "tapir-core" % "1.0.0", 153 | "org.typelevel" %% "mouse" % versions("mouse"), 154 | "org.scalatest" %% "scalatest" % versions("scalatest") % Test, 155 | "org.scalacheck" %% "scalacheck" % versions("scalacheck") % Test, 156 | "org.scalatestplus" %% "scalacheck-1-15" % versions("scalatestplus-scalacheck") % Test, 157 | "com.stephenn" %% "scalatest-circe" % "0.2.5" % Test, 158 | "com.softwaremill.sttp.tapir" %% "tapir-openapi-docs" % "1.0.0" % Test, 159 | "com.softwaremill.sttp.apispec" %% "openapi-circe" % "0.2.1" % Test 160 | ), 161 | libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { 162 | case Some((2, _)) => 163 | Seq("io.github.leviysoft" %% "simulacrum" % versions("simulacrum")) 164 | case _ => Seq.empty[ModuleID] 165 | }) 166 | ) 167 | 168 | lazy val root = (projectMatrix in file(".")) 169 | .aggregate( 170 | morphling, 171 | `morphling-circe`, 172 | `morphling-scalacheck`, 173 | `morphling-reactivemongo`, 174 | `morphling-typed-schema`, 175 | `morphling-tapir` 176 | ) 177 | .jvmPlatform(scalaVersions = scalaVersions) 178 | .settings(Settings.common) 179 | .settings( 180 | crossScalaVersions := Nil, 181 | publish := {}, 182 | publishArtifact := false, 183 | publish / skip := true 184 | ) 185 | -------------------------------------------------------------------------------- /circe/src/main/scala-2/morphling/circe/FromJson.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.* 4 | import cats.data.EitherK 5 | import cats.free.* 6 | import cats.syntax.either.* 7 | import io.circe.{Decoder, DecodingFailure, HCursor} 8 | import morphling.* 9 | import morphling.Schema.* 10 | import morphling.annotated.Schema.AnnotatedSchema 11 | import mouse.boolean.* 12 | import simulacrum_.typeclass 13 | 14 | @typeclass 15 | trait FromJson[S[_]] { 16 | def decoder: S ~> Decoder 17 | } 18 | 19 | object FromJson { 20 | implicit class FromJsonOps[F[_], A](fa: F[A]) { 21 | def decoder(implicit FJ: FromJson[F]): Decoder[A] = FJ.decoder(fa) 22 | } 23 | 24 | implicit def schemaFromJson[P[_]: FromJson]: FromJson[Schema[P, *]] = new FromJson[Schema[P, *]] { 25 | override val decoder: Schema[P, *] ~> Decoder = new (Schema[P, *] ~> Decoder) { 26 | override def apply[I](schema: Schema[P, I]): Decoder[I] = 27 | HFix.cataNT[SchemaF[P, *[_], *], Decoder](decoderAlg[P]).apply(schema) 28 | } 29 | } 30 | 31 | implicit def annSchemaFromJson[P[_]: FromJson, A[_]: *[_] ~> λ[T => Endo[Decoder[T]]]]: FromJson[AnnotatedSchema[P, A, *]] = 32 | new FromJson[AnnotatedSchema[P, A, *]] { 33 | override val decoder: AnnotatedSchema[P, A, *] ~> Decoder = new (AnnotatedSchema[P, A, *] ~> Decoder) { 34 | override def apply[I](schema: AnnotatedSchema[P, A, I]): Decoder[I] = 35 | HFix.cataNT[HEnvT[A, SchemaF[P, *[_], *], *[_], *], Decoder](annDecoderAlg[P, A]).apply(schema) 36 | } 37 | } 38 | 39 | def decoderAlg[P[_]: FromJson]: HAlgebra[SchemaF[P, *[_], *], Decoder] = 40 | new HAlgebra[SchemaF[P, *[_], *], Decoder] { 41 | def apply[I](s: SchemaF[P, Decoder, I]): Decoder[I] = s match { 42 | case PrimSchema(p) => FromJson[P].decoder(p) 43 | 44 | case OneOfSchema(alts, None) => 45 | Decoder.instance { (c: HCursor) => 46 | val results = for { 47 | fields <- c.keys.toList.map(_.toList) 48 | altResult <- alts.toList flatMap { case Alt(id, base, prism) => 49 | fields 50 | .contains(id) 51 | .option( 52 | c.downField(id).as(base).map(prism.upcast) 53 | ) 54 | .toList 55 | } 56 | } yield altResult 57 | 58 | val altIds = alts.map(_.id) 59 | results match { 60 | case x :: Nil => x 61 | case Nil => Left(DecodingFailure(s"No fields found matching any of $altIds", c.history)) 62 | case _ => Left(DecodingFailure(s"More than one matching field found among $altIds", c.history)) 63 | } 64 | } 65 | 66 | case OneOfSchema(alts, Some(discriminatorField)) => 67 | Decoder.instance { (c: HCursor) => 68 | for { 69 | altId <- c.downField(discriminatorField).as[String] 70 | Alt(_, base, prism) <- alts 71 | .find(_.id == altId) 72 | .toRight(DecodingFailure(s"No '$discriminatorField' case of value '$altId'", c.history)) 73 | altResult <- c.as(base).map(prism.upcast) 74 | } yield altResult 75 | } 76 | 77 | case RecordSchema(rb) => 78 | decodeObj(rb) 79 | 80 | case IsoSchema(base, iso) => 81 | base.map(iso.get) 82 | } 83 | } 84 | 85 | def annDecoderAlg[P[_]: FromJson, Ann[_]](implicit 86 | interpret: Ann ~> λ[T => Endo[Decoder[T]]] 87 | ): HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], Decoder] = 88 | new HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], Decoder] { 89 | override def apply[I](s: HEnvT[Ann, SchemaF[P, *[_], *], Decoder, I]): Decoder[I] = 90 | interpret(s.ask).apply(decoderAlg[P].apply(s.fa)) 91 | } 92 | 93 | def decodeObj[I](rb: FreeApplicative[PropSchema[I, Decoder, *], I]): Decoder[I] = 94 | rb.foldMap( 95 | new (PropSchema[I, Decoder, *] ~> Decoder) { 96 | def apply[B](ps: PropSchema[I, Decoder, B]): Decoder[B] = ps match { 97 | case Required(field, base, _, None) => 98 | Decoder.instance(_.downField(field).as(base)) 99 | 100 | case Required(field, base, _, Some(default)) => 101 | Decoder.instance(_.downField(field).as(base)).handleErrorWith(_ => Decoder.const(default)) 102 | 103 | case opt: Optional[I, Decoder, i] => 104 | Decoder.instance(_.downField(opt.fieldName).as[B](Decoder.decodeOption(opt.base))) 105 | 106 | case Constant(_, value, _) => Decoder.const(value) 107 | 108 | case abs: Absent[I, Decoder, i] => 109 | Decoder.instance(_ => Option.empty[i].asRight[DecodingFailure]) 110 | } 111 | } 112 | ) 113 | 114 | implicit def eitherKFromJson[P[_]: FromJson, Q[_]: FromJson]: FromJson[EitherK[P, Q, *]] = 115 | new FromJson[EitherK[P, Q, *]] { 116 | override val decoder: EitherK[P, Q, *] ~> Decoder = new (EitherK[P, Q, *] ~> Decoder) { 117 | override def apply[A](p: EitherK[P, Q, A]): Decoder[A] = 118 | p.run.fold( 119 | FromJson[P].decoder(_), 120 | FromJson[Q].decoder(_), 121 | ) 122 | } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /circe/src/main/scala-2/morphling/circe/ToFilter.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.* 4 | import cats.data.{Const, EitherK} 5 | import cats.free.* 6 | import cats.instances.function.* 7 | import cats.instances.option.* 8 | import cats.syntax.all.* 9 | import io.circe.{Json, JsonObject} 10 | import morphling.* 11 | import morphling.Schema.Schema 12 | import morphling.annotated.Schema.AnnotatedSchema 13 | import mouse.option.* 14 | import simulacrum_.typeclass 15 | 16 | /** 17 | * Allows to filter Json via specific schema 18 | */ 19 | @typeclass 20 | trait ToFilter[S[_]] { 21 | def filter: S ~> Const[Json => Option[Json], *] 22 | } 23 | 24 | object ToFilter { 25 | type Subset[T] = T => Option[T] 26 | type JsonFilter[T] = Const[Subset[Json], T] 27 | 28 | implicit class ToFilterOps[S[_], A](s: S[A]) { 29 | def jsonFilter(implicit TF: ToFilter[S]): Subset[Json] = TF.filter(s).getConst 30 | } 31 | 32 | implicit def schemaToFilter[P[_]: ToFilter]: ToFilter[Schema[P, *]] = new ToFilter[Schema[P, *]] { 33 | override val filter: Schema[P, *] ~> JsonFilter = new (Schema[P, *] ~> JsonFilter) { 34 | override def apply[I](schema: Schema[P, I]): JsonFilter[I] = 35 | HFix.cataNT[SchemaF[P, *[_], *], JsonFilter](filterAlg[P]).apply(schema) 36 | } 37 | } 38 | 39 | implicit def annSchemaToFilter[P[_]: ToFilter, A[_]: *[_] ~> λ[T => Endo[JsonFilter[T]]]] 40 | : ToFilter[AnnotatedSchema[P, A, *]] = 41 | new ToFilter[AnnotatedSchema[P, A, *]] { 42 | override val filter: AnnotatedSchema[P, A, *] ~> JsonFilter = new (AnnotatedSchema[P, A, *] ~> JsonFilter) { 43 | override def apply[I](schema: AnnotatedSchema[P, A, I]): JsonFilter[I] = 44 | HFix.cataNT[HEnvT[A, SchemaF[P, *[_], *], *[_], *], JsonFilter](annFilterAlg).apply(schema) 45 | } 46 | } 47 | 48 | def filterAlg[P[_]: ToFilter]: HAlgebra[SchemaF[P, *[_], *], JsonFilter] = 49 | new HAlgebra[SchemaF[P, *[_], *], JsonFilter] { 50 | override def apply[I](schema: SchemaF[P, JsonFilter, I]): JsonFilter[I] = schema match { 51 | case s: PrimSchema[P, JsonFilter, I] => ToFilter[P].filter(s.prim) 52 | case s: OneOfSchema[P, JsonFilter, I] => 53 | Const.of { 54 | s.discriminator.cata( 55 | dField => 56 | s.alts.map { case Alt(_, f, _) => 57 | extractField(dField) |+| f.getConst 58 | }.fold, 59 | s.alts.map { case Alt(id, f, _) => 60 | extractFieldContentsStrict(id, f.getConst) 61 | }.fold 62 | ) 63 | } 64 | case s: RecordSchema[P, JsonFilter, I] => recordFilter[P, I](s.props) 65 | case s: IsoSchema[P, JsonFilter, i0, I] => s.base.retag[I] 66 | } 67 | } 68 | 69 | def annFilterAlg[P[_]: ToFilter, Ann[_]](implicit 70 | interpret: Ann ~> λ[T => Endo[JsonFilter[T]]] 71 | ): HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], JsonFilter] = 72 | new HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], JsonFilter] { 73 | override def apply[A](schema: HEnvT[Ann, SchemaF[P, *[_], *], JsonFilter, A]): JsonFilter[A] = 74 | interpret.apply(schema.ask).apply(filterAlg[P].apply(schema.fa)) 75 | } 76 | 77 | def recordFilter[P[_]: ToFilter, I](rb: FreeApplicative[PropSchema[I, JsonFilter, *], I]): JsonFilter[I] = 78 | rb.foldMap[JsonFilter]( 79 | new (PropSchema[I, JsonFilter, *] ~> JsonFilter) { 80 | override def apply[B](ps: PropSchema[I, JsonFilter, B]): JsonFilter[B] = 81 | ps match { 82 | case req: Required[I, JsonFilter, i] => 83 | Const.of(extractFieldContentsStrict(req.fieldName, req.base.getConst)) 84 | case opt: Optional[I, JsonFilter, i] => Const.of(extractFieldContents(opt.fieldName, opt.base.getConst)) 85 | case _ => Const.of(sjm.empty) 86 | } 87 | } 88 | ) 89 | 90 | implicit def eitherKToFilter[P[_]: ToFilter, Q[_]: ToFilter]: ToFilter[EitherK[P, Q, *]] = 91 | new ToFilter[EitherK[P, Q, *]] { 92 | override val filter = new (EitherK[P, Q, *] ~> JsonFilter) { 93 | def apply[A](p: EitherK[P, Q, A]): JsonFilter[A] = 94 | p.run.fold(ToFilter[P].filter(_), ToFilter[Q].filter(_)) 95 | } 96 | } 97 | 98 | private def extractField(name: String): Subset[Json] = { j => 99 | j.mapObject(_.filterKeys(_ == name)).asObject.filter(_.nonEmpty).map(Json.fromJsonObject) 100 | } 101 | 102 | private def extractFieldContents(name: String, inner: Subset[Json]): Subset[Json] = { j => 103 | j.mapObject(jo => 104 | JsonObject.fromIterable(jo.filterKeys(_ == name).toIterable.flatMap { case (k, v) => inner(v).map(k -> _) }) 105 | ).asObject 106 | .map(Json.fromJsonObject) 107 | } 108 | 109 | private def extractFieldContentsStrict(name: String, inner: Subset[Json]): Subset[Json] = { j => 110 | j.mapObject(jo => 111 | JsonObject.fromIterable(jo.filterKeys(_ == name).toIterable.flatMap { case (k, v) => inner(v).map(k -> _) }) 112 | ).asObject 113 | .filter(_.nonEmpty) 114 | .map(Json.fromJsonObject) 115 | } 116 | 117 | implicit private val semiJ: Semigroup[Json] = _ deepMerge _ 118 | 119 | implicit private val sjm: Monoid[Subset[Json]] = new Monoid[Subset[Json]] { 120 | override val empty: Subset[Json] = _ => None 121 | 122 | override def combine(x: Subset[Json], y: Subset[Json]): Subset[Json] = 123 | x &&& y andThen { case (lhs, rhs) => lhs |+| rhs } 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /circe/src/main/scala-2/morphling/circe/ToJson.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.* 4 | import cats.data.State.* 5 | import cats.data.{EitherK, State} 6 | import cats.free.* 7 | import io.circe.syntax.* 8 | import io.circe.{Encoder, Json, JsonObject} 9 | import morphling.* 10 | import morphling.Schema.* 11 | import morphling.annotated.Schema.AnnotatedSchema 12 | import mouse.option.* 13 | import simulacrum_.typeclass 14 | 15 | @typeclass 16 | trait ToJson[S[_]] { 17 | def encoder: S ~> Encoder 18 | } 19 | 20 | object ToJson { 21 | implicit class ToJsonOps[F[_], A](private val fa: F[A]) { 22 | def encoder(implicit TJ: ToJson[F]): Encoder[A] = TJ.encoder(fa) 23 | } 24 | 25 | implicit def schemaToJson[P[_]: ToJson]: ToJson[Schema[P, *]] = new ToJson[Schema[P, *]] { 26 | override val encoder: Schema[P, *] ~> Encoder = new (Schema[P, *] ~> Encoder) { 27 | override def apply[I](schema: Schema[P, I]): Encoder[I] = 28 | HFix.cataNT[SchemaF[P, *[_], *], Encoder](serializeAlg).apply(schema) 29 | } 30 | } 31 | 32 | implicit def annSchemaToJson[P[_]: ToJson, A[_]]: ToJson[AnnotatedSchema[P, A, *]] = 33 | new ToJson[AnnotatedSchema[P, A, *]] { 34 | override val encoder: AnnotatedSchema[P, A, *] ~> Encoder = new (AnnotatedSchema[P, A, *] ~> Encoder) { 35 | override def apply[I](schema: AnnotatedSchema[P, A, I]): Encoder[I] = 36 | HFix 37 | .cataNT[SchemaF[P, *[_], *], Encoder](serializeAlg) 38 | .apply( 39 | HFix.forget[SchemaF[P, *[_], *], A].apply(schema) 40 | ) 41 | } 42 | } 43 | 44 | def serializeAlg[P[_]: ToJson]: HAlgebra[SchemaF[P, *[_], *], Encoder] = 45 | new HAlgebra[SchemaF[P, *[_], *], Encoder] { 46 | def apply[I](schema: SchemaF[P, Encoder, I]): Encoder[I] = 47 | schema match { 48 | case s: PrimSchema[P, Encoder, I] => ToJson[P].encoder(s.prim) 49 | 50 | case s: OneOfSchema[P, Encoder, I] => 51 | (value: I) => 52 | s.discriminator.cata( 53 | discriminator => 54 | s.alts 55 | .map { case alt @ Alt(id, base, prism) => 56 | prism.getOption(value).map(alt.base(_).mapObject((discriminator := alt.id) +: _)) 57 | } 58 | .collect { case Some(json) => json } 59 | .head, 60 | s.alts 61 | .map { case Alt(id, base, prism) => 62 | prism.getOption(value).map(base(_)).map(json => Json.obj(id -> json)) 63 | } 64 | .collect { case Some(json) => json } 65 | .head 66 | ) 67 | 68 | case s: RecordSchema[P, Encoder, I] => 69 | serializeObjF[P, I](s.props) 70 | 71 | case s: IsoSchema[P, Encoder, i0, I] => 72 | s.base.contramap(s.eqv.upcast(_)) 73 | } 74 | } 75 | 76 | def serializeObjF[P[_]: ToJson, I](rb: FreeApplicative[PropSchema[I, Encoder, *], I]): Encoder[I] = { (value: I) => 77 | Json.fromJsonObject( 78 | rb.foldMap[State[JsonObject, *]]( 79 | new (PropSchema[I, Encoder, *] ~> State[JsonObject, *]) { 80 | def apply[B](ps: PropSchema[I, Encoder, B]): State[JsonObject, B] = 81 | for { 82 | _ <- modify { (obj: JsonObject) => 83 | ps match { 84 | case req: Required[I, Encoder, i] => 85 | (req.fieldName, req.base(req.extract.extract(value))) +: obj 86 | 87 | case opt: Optional[I, Encoder, i] => 88 | opt.extract.extract(value).cata(v => (opt.fieldName, opt.base(v)) +: obj, obj) 89 | 90 | case Constant(_, _, _) => obj 91 | 92 | case Absent(_, _) => obj 93 | } 94 | } 95 | } yield ps.extract.extract(value) 96 | } 97 | ).runS(JsonObject.empty) 98 | .value 99 | ) 100 | } 101 | 102 | implicit def eitherKToJson[P[_]: ToJson, Q[_]: ToJson]: ToJson[EitherK[P, Q, *]] = 103 | new ToJson[EitherK[P, Q, *]] { 104 | override val encoder = new (EitherK[P, Q, *] ~> Encoder) { 105 | def apply[A](p: EitherK[P, Q, A]): Encoder[A] = 106 | p.run.fold(ToJson[P].encoder(_), ToJson[Q].encoder(_)) 107 | } 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /circe/src/main/scala-3/morphling/circe/FromJson.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.* 4 | import cats.data.EitherK 5 | import cats.free.* 6 | import cats.syntax.either.* 7 | import io.circe.{Decoder, DecodingFailure, HCursor} 8 | import morphling.* 9 | import morphling.Schema.* 10 | import morphling.annotated.Schema.AnnotatedSchema 11 | import morphling.given 12 | import mouse.boolean.* 13 | 14 | trait FromJson[S[_]] extends Serializable { 15 | def decoder: S ~> Decoder 16 | 17 | extension [F[_], A](fa: F[A])(using FJ: FromJson[F]) { 18 | def decoder: Decoder[A] = FJ.decoder(fa) 19 | } 20 | } 21 | 22 | object FromJson { 23 | def apply[P[_]](using fj: FromJson[P]): FromJson[P] = fj 24 | 25 | given [P[_]: FromJson]: FromJson[Schema[P, _]] = 26 | new FromJson[Schema[P, _]] { 27 | override val decoder: Schema[P, _] ~> Decoder = new (Schema[P, _] ~> Decoder) { 28 | override def apply[I](schema: Schema[P, I]): Decoder[I] = 29 | HFix.cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], Decoder](decoderAlg[P]).apply(schema) 30 | } 31 | } 32 | 33 | given [P[_]: FromJson, A[_]: [Y[_]] =>> Y ~> ([T] =>> Endo[Decoder[T]])]: FromJson[AnnotatedSchema[P, A, *]] = 34 | new FromJson[AnnotatedSchema[P, A, _]] { 35 | override val decoder: AnnotatedSchema[P, A, _] ~> Decoder = new (AnnotatedSchema[P, A, _] ~> Decoder) { 36 | override def apply[I](schema: AnnotatedSchema[P, A, I]): Decoder[I] = 37 | HFix 38 | .cataNT[[Y1[_], Z1] =>> HEnvT[A, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], Decoder](annDecoderAlg[P, A]) 39 | .apply(schema) 40 | } 41 | } 42 | 43 | def decoderAlg[P[_]: FromJson]: HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], Decoder] = 44 | new HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], Decoder] { 45 | def apply[I](s: SchemaF[P, Decoder, I]): Decoder[I] = s match { 46 | case PrimSchema(p) => FromJson[P].decoder(p) 47 | 48 | case OneOfSchema(alts, None) => 49 | Decoder.instance { (c: HCursor) => 50 | val results = for { 51 | fields <- c.keys.toList.map(_.toList) 52 | altResult <- alts.toList flatMap { case Alt(id, base, prism) => 53 | fields 54 | .contains(id) 55 | .option( 56 | c.downField(id).as(base).map(prism.upcast) 57 | ) 58 | .toList 59 | } 60 | } yield altResult 61 | 62 | val altIds = alts.map(_.id) 63 | results match { 64 | case x :: Nil => x 65 | case Nil => Left(DecodingFailure(s"No fields found matching any of $altIds", c.history)) 66 | case _ => Left(DecodingFailure(s"More than one matching field found among $altIds", c.history)) 67 | } 68 | } 69 | 70 | case OneOfSchema(alts, Some(discriminatorField)) => 71 | Decoder.instance { (c: HCursor) => 72 | for { 73 | altId <- c.downField(discriminatorField).as[String] 74 | alt <- alts 75 | .find(_.id == altId) 76 | .toRight(DecodingFailure(s"No '$discriminatorField' case of value '$altId'", c.history)) 77 | altResult <- c.as(alt.base).map(alt.subset.upcast) 78 | } yield altResult 79 | } 80 | 81 | case RecordSchema(rb) => 82 | decodeObj(rb) 83 | 84 | case IsoSchema(base, iso) => 85 | base.map(iso.get) 86 | } 87 | } 88 | 89 | def annDecoderAlg[P[_]: FromJson, Ann[_]](implicit 90 | interpret: Ann ~> ([T] =>> Endo[Decoder[T]]) 91 | ): HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], Decoder] = 92 | new HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], Decoder] { 93 | override def apply[I](s: HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Decoder, I]): Decoder[I] = 94 | interpret(s.ask).apply(decoderAlg[P].apply(s.fa)) 95 | } 96 | 97 | def decodeObj[I](rb: FreeApplicative[PropSchema[I, Decoder, _], I]): Decoder[I] = 98 | rb.foldMap( 99 | new (PropSchema[I, Decoder, _] ~> Decoder) { 100 | def apply[B](ps: PropSchema[I, Decoder, B]): Decoder[B] = ps match { 101 | case Required(field, base, _, None) => 102 | Decoder.instance(_.downField(field).as(base)) 103 | 104 | case Required(field, base, _, Some(default)) => 105 | Decoder.instance(_.downField(field).as(base)).handleErrorWith(_ => Decoder.const(default)) 106 | 107 | case opt: Optional[I, Decoder, i] @unchecked => 108 | Decoder.instance(_.downField(opt.fieldName).as[B](Decoder.decodeOption(opt.base))) 109 | 110 | case Constant(_, value, _) => Decoder.const(value) 111 | 112 | case abs: Absent[I, Decoder, i] @unchecked => 113 | Decoder.instance(_ => Option.empty[i].asRight[DecodingFailure]) 114 | } 115 | } 116 | ) 117 | 118 | given [P[_]: FromJson, Q[_]: FromJson]: FromJson[EitherK[P, Q, _]] = 119 | new FromJson[EitherK[P, Q, _]] { 120 | override val decoder: EitherK[P, Q, _] ~> Decoder = new (EitherK[P, Q, _] ~> Decoder) { 121 | override def apply[A](p: EitherK[P, Q, A]): Decoder[A] = 122 | p.run.fold( 123 | FromJson[P].decoder(_), 124 | FromJson[Q].decoder(_), 125 | ) 126 | } 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /circe/src/main/scala-3/morphling/circe/ToFilter.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.* 4 | import cats.data.{Const, EitherK} 5 | import cats.free.* 6 | import cats.instances.function.* 7 | import cats.instances.option.* 8 | import cats.syntax.all.* 9 | import io.circe.{Json, JsonObject} 10 | import morphling.* 11 | import morphling.Schema.Schema 12 | import morphling.annotated.Schema.AnnotatedSchema 13 | import morphling.given 14 | import mouse.option.* 15 | 16 | trait ToFilter[S[_]] extends Serializable { 17 | def filter: S ~> Const[Json => Option[Json], *] 18 | 19 | extension [S[_], A](s: S[A])(using TF: ToFilter[S]) def jsonFilter: ToFilter.Subset[Json] = TF.filter(s).getConst 20 | } 21 | 22 | object ToFilter { 23 | type Subset[T] = T => Option[T] 24 | type JsonFilter[T] = Const[Subset[Json], T] 25 | 26 | def apply[P[_]](using tf: ToFilter[P]): ToFilter[P] = tf 27 | 28 | given [P[_]: ToFilter]: ToFilter[Schema[P, _]] = 29 | new ToFilter[Schema[P, _]] { 30 | override val filter: Schema[P, _] ~> JsonFilter = new (Schema[P, _] ~> JsonFilter) { 31 | override def apply[I](schema: Schema[P, I]): JsonFilter[I] = 32 | HFix.cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], JsonFilter](filterAlg[P]).apply(schema) 33 | } 34 | } 35 | 36 | given [P[_]: ToFilter, A[_]: [Y[_]] =>> Y ~> ([T] =>> Endo[JsonFilter[T]])]: ToFilter[AnnotatedSchema[P, A, *]] = 37 | new ToFilter[AnnotatedSchema[P, A, _]] { 38 | override val filter: AnnotatedSchema[P, A, _] ~> JsonFilter = new (AnnotatedSchema[P, A, _] ~> JsonFilter) { 39 | override def apply[I](schema: AnnotatedSchema[P, A, I]): JsonFilter[I] = 40 | HFix 41 | .cataNT[[Y1[_], Z1] =>> HEnvT[A, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], JsonFilter](annFilterAlg) 42 | .apply(schema) 43 | } 44 | } 45 | 46 | def filterAlg[P[_]: ToFilter]: HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], JsonFilter] = 47 | new HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], JsonFilter] { 48 | override def apply[I](schema: SchemaF[P, JsonFilter, I]): JsonFilter[I] = schema match { 49 | case s: PrimSchema[P, JsonFilter, I] => ToFilter[P].filter(s.prim) 50 | case s: OneOfSchema[P, JsonFilter, I] => 51 | Const.of { 52 | s.discriminator.cata( 53 | dField => 54 | s.alts.map { case Alt(_, f, _) => 55 | extractField(dField) |+| f.getConst 56 | }.fold, 57 | s.alts.map { case Alt(id, f, _) => 58 | extractFieldContentsStrict(id, f.getConst) 59 | }.fold 60 | ) 61 | } 62 | case s: RecordSchema[P, JsonFilter, I] => recordFilter[P, I](s.props) 63 | case s: IsoSchema[P, JsonFilter, i0, I] => s.base.retag[I] 64 | } 65 | } 66 | 67 | def annFilterAlg[P[_]: ToFilter, Ann[_]](implicit 68 | interpret: Ann ~> ([T] =>> Endo[JsonFilter[T]]) 69 | ): HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], JsonFilter] = 70 | new HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], JsonFilter] { 71 | override def apply[A](schema: HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], JsonFilter, A]): JsonFilter[A] = 72 | interpret.apply(schema.ask).apply(filterAlg[P].apply(schema.fa)) 73 | } 74 | 75 | def recordFilter[P[_]: ToFilter, I](rb: FreeApplicative[PropSchema[I, JsonFilter, _], I]): JsonFilter[I] = 76 | rb.foldMap[JsonFilter]( 77 | new (PropSchema[I, JsonFilter, _] ~> JsonFilter) { 78 | override def apply[B](ps: PropSchema[I, JsonFilter, B]): JsonFilter[B] = 79 | ps match { 80 | case req: Required[I, JsonFilter, i] => 81 | Const.of(extractFieldContentsStrict(req.fieldName, req.base.getConst)) 82 | case opt: Optional[I, JsonFilter, i] @unchecked => 83 | Const.of(extractFieldContents(opt.fieldName, opt.base.getConst)) 84 | case _ => Const.of(Monoid[Subset[Json]].empty) 85 | } 86 | } 87 | ) 88 | 89 | given [P[_]: ToFilter, Q[_]: ToFilter]: ToFilter[EitherK[P, Q, _]] = 90 | new ToFilter[EitherK[P, Q, _]] { 91 | override val filter = new (EitherK[P, Q, _] ~> JsonFilter) { 92 | def apply[A](p: EitherK[P, Q, A]): JsonFilter[A] = 93 | p.run.fold(ToFilter[P].filter(_), ToFilter[Q].filter(_)) 94 | } 95 | } 96 | 97 | private def extractField(name: String): Subset[Json] = { j => 98 | j.mapObject(_.filterKeys(_ == name)).asObject.filter(_.nonEmpty).map(Json.fromJsonObject) 99 | } 100 | 101 | private def extractFieldContents(name: String, inner: Subset[Json]): Subset[Json] = { j => 102 | j.mapObject(jo => 103 | JsonObject.fromIterable(jo.filterKeys(_ == name).toIterable.flatMap { case (k, v) => inner(v).map(k -> _) }) 104 | ).asObject 105 | .map(Json.fromJsonObject) 106 | } 107 | 108 | private def extractFieldContentsStrict(name: String, inner: Subset[Json]): Subset[Json] = { j => 109 | j.mapObject(jo => 110 | JsonObject.fromIterable(jo.filterKeys(_ == name).toIterable.flatMap { case (k, v) => inner(v).map(k -> _) }) 111 | ).asObject 112 | .filter(_.nonEmpty) 113 | .map(Json.fromJsonObject) 114 | } 115 | 116 | private given Semigroup[Json] = _ deepMerge _ 117 | 118 | private given Monoid[Subset[Json]] = new Monoid[Subset[Json]] { 119 | override val empty: Subset[Json] = _ => None 120 | 121 | override def combine(x: Subset[Json], y: Subset[Json]): Subset[Json] = 122 | x &&& y andThen { case (lhs, rhs) => lhs |+| rhs } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /circe/src/main/scala-3/morphling/circe/ToJson.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.* 4 | import cats.data.State.* 5 | import cats.data.{EitherK, State} 6 | import cats.free.* 7 | import io.circe.syntax.* 8 | import io.circe.{Encoder, Json, JsonObject} 9 | import morphling.* 10 | import morphling.Schema.* 11 | import morphling.annotated.Schema.AnnotatedSchema 12 | import mouse.option.* 13 | 14 | trait ToJson[S[_]] extends Serializable { 15 | def encoder: S ~> Encoder 16 | 17 | extension [F[_], A](fa: F[A])(using TJ: ToJson[F]) def encoder: Encoder[A] = TJ.encoder(fa) 18 | } 19 | 20 | object ToJson { 21 | def apply[P[_]](using tj: ToJson[P]): ToJson[P] = tj 22 | 23 | given [P[_]: ToJson]: ToJson[Schema[P, _]] = 24 | new ToJson[Schema[P, _]] { 25 | override val encoder: Schema[P, _] ~> Encoder = new (Schema[P, _] ~> Encoder) { 26 | override def apply[I](schema: Schema[P, I]): Encoder[I] = 27 | HFix.cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], Encoder](serializeAlg).apply(schema) 28 | } 29 | } 30 | 31 | given [P[_]: ToJson, A[_]]: ToJson[AnnotatedSchema[P, A, _]] = 32 | new ToJson[AnnotatedSchema[P, A, _]] { 33 | override val encoder: AnnotatedSchema[P, A, _] ~> Encoder = new (AnnotatedSchema[P, A, _] ~> Encoder) { 34 | override def apply[I](schema: AnnotatedSchema[P, A, I]): Encoder[I] = 35 | HFix 36 | .cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], Encoder](serializeAlg) 37 | .apply( 38 | HFix.forget[[Y[_], Z] =>> SchemaF[P, Y, Z], A].apply(schema) 39 | ) 40 | } 41 | } 42 | 43 | def serializeAlg[P[_]: ToJson]: HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], Encoder] = 44 | new HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], Encoder] { 45 | def apply[I](schema: SchemaF[P, Encoder, I]): Encoder[I] = 46 | schema match { 47 | case s: PrimSchema[P, Encoder, I] => ToJson[P].encoder(s.prim) 48 | 49 | case s: OneOfSchema[P, Encoder, I] => 50 | (value: I) => 51 | s.discriminator.cata( 52 | discriminator => 53 | s.alts 54 | .map { case alt @ Alt(id, base, prism) => 55 | prism.getOption(value).map(alt.base(_).mapObject((discriminator := alt.id) +: _)) 56 | } 57 | .collect { case Some(json) => json } 58 | .head, 59 | s.alts 60 | .map { case Alt(id, base, prism) => 61 | prism.getOption(value).map(base(_)).map(json => Json.obj(id -> json)) 62 | } 63 | .collect { case Some(json) => json } 64 | .head 65 | ) 66 | 67 | case s: RecordSchema[P, Encoder, I] => 68 | serializeObjF[P, I](s.props) 69 | 70 | case s: IsoSchema[P, Encoder, i0, I] => 71 | s.base.contramap(s.eqv.upcast(_)) 72 | } 73 | } 74 | 75 | def serializeObjF[P[_]: ToJson, I](rb: FreeApplicative[PropSchema[I, Encoder, _], I]): Encoder[I] = { (value: I) => 76 | Json.fromJsonObject( 77 | rb.foldMap[State[JsonObject, _]]( 78 | new (PropSchema[I, Encoder, _] ~> State[JsonObject, _]) { 79 | def apply[B](ps: PropSchema[I, Encoder, B]): State[JsonObject, B] = 80 | for { 81 | _ <- modify { (obj: JsonObject) => 82 | ps match { 83 | case req: Required[I, Encoder, i] => 84 | (req.fieldName, req.base(req.extract.extract(value))) +: obj 85 | 86 | case opt: Optional[I, Encoder, i] @unchecked => 87 | opt.extract.extract(value).cata(v => (opt.fieldName, opt.base(v)) +: obj, obj) 88 | 89 | case Constant(_, _, _) => obj 90 | 91 | case Absent(_, _) => obj 92 | } 93 | } 94 | } yield ps.extract.extract(value) 95 | } 96 | ).runS(JsonObject.empty) 97 | .value 98 | ) 99 | } 100 | 101 | given [P[_]: ToJson, Q[_]: ToJson]: ToJson[EitherK[P, Q, _]] = 102 | new ToJson[EitherK[P, Q, _]] { 103 | override val encoder = new (EitherK[P, Q, _] ~> Encoder) { 104 | def apply[A](p: EitherK[P, Q, A]): Encoder[A] = 105 | p.run.fold(ToJson[P].encoder(_), ToJson[Q].encoder(_)) 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /circe/src/test/scala/morphling/circe/CircePack.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.data.Const 4 | import cats.~> 5 | import io.circe.{Decoder, Encoder, Json} 6 | import morphling.protocol.* 7 | 8 | trait CircePack { 9 | def sTypeEncoder[F[_]: ToJson]: SType[F, *] ~> Encoder = 10 | new (SType[F, *] ~> Encoder) { 11 | import ToJson.* 12 | 13 | override def apply[A](st: SType[F, A]): Encoder[A] = st match { 14 | case SNullT() => Encoder.encodeUnit 15 | case SBoolT() => Encoder.encodeBoolean 16 | case SIntT() => Encoder.encodeInt 17 | case SLongT() => Encoder.encodeLong 18 | case SFloatT() => Encoder.encodeFloat 19 | case SDoubleT() => Encoder.encodeDouble 20 | case SCharT() => Encoder.encodeChar 21 | case SStrT() => Encoder.encodeString 22 | case sa: SArrayT[F, i] => Encoder.encodeVector(sa.elem.encoder) 23 | } 24 | } 25 | 26 | def sTypeDecoder[F[_]: FromJson]: SType[F, *] ~> Decoder = 27 | new (SType[F, *] ~> Decoder) { 28 | import FromJson.* 29 | 30 | override def apply[A](st: SType[F, A]): Decoder[A] = st match { 31 | case SNullT() => Decoder.decodeUnit 32 | case SBoolT() => Decoder.decodeBoolean 33 | case SIntT() => Decoder.decodeInt 34 | case SLongT() => Decoder.decodeLong 35 | case SFloatT() => Decoder.decodeFloat 36 | case SDoubleT() => Decoder.decodeDouble 37 | case SCharT() => Decoder.decodeChar 38 | case SStrT() => Decoder.decodeString 39 | case sa: SArrayT[F, i] => Decoder.decodeVector(sa.elem.decoder) 40 | } 41 | } 42 | 43 | def sTypeFilter[F[_]: ToFilter]: SType[F, *] ~> Const[Json => Option[Json], *] = 44 | new (SType[F, *] ~> Const[Json => Option[Json], *]) { 45 | import ToFilter.* 46 | 47 | override def apply[A](st: SType[F, A]): Const[Json => Option[Json], A] = Const.of(st match { 48 | case SNullT() => _.asNull.map(_ => Json.Null) 49 | case SBoolT() => _.asBoolean.map(Json.fromBoolean) 50 | case SIntT() => _.asNumber.map(Json.fromJsonNumber) 51 | case SLongT() => _.asNumber.map(Json.fromJsonNumber) 52 | case SFloatT() => _.asNumber.map(Json.fromJsonNumber) 53 | case SDoubleT() => _.asNumber.map(Json.fromJsonNumber) 54 | case SCharT() => _.asString.map(Json.fromString) 55 | case SStrT() => _.asString.map(Json.fromString) 56 | case sa: SArrayT[F, i] => _.asArray.map(_.flatMap(sa.elem.jsonFilter.apply)).map(Json.fromValues) 57 | }) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /circe/src/test/scala/morphling/circe/CirceSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.scalatest.{EitherValues, ValidatedValues} 4 | import io.circe.syntax.* 5 | import io.circe.{Encoder, Json} 6 | import morphling.circe.FromJson.* 7 | import morphling.circe.Implicits.* 8 | import morphling.circe.ToJson.* 9 | import morphling.samples.* 10 | import morphling.scalacheck.Implicits.* 11 | import morphling.scalacheck.ToGen.* 12 | import org.scalacheck.Arbitrary 13 | import org.scalatest.funsuite.AnyFunSuite 14 | import org.scalatest.matchers.should.Matchers 15 | import org.scalatestplus.scalacheck.Checkers 16 | 17 | class CirceSpec extends AnyFunSuite with Matchers with EitherValues with ValidatedValues with Checkers { 18 | test("A value should serialise to JSON") { 19 | implicit val encoder: Encoder[Person] = Person.schema.encoder 20 | 21 | person.asJson shouldBe Json.obj( 22 | "updateCounter" := 42, 23 | "roles" := Seq( 24 | Json.obj( 25 | "administrator" -> Json.obj( 26 | "subordinateCount" := 0, 27 | "department" := "windmill-tilting" 28 | ) 29 | ) 30 | ), 31 | "birthDate" := 20147028000L, 32 | "name" := "Kris Nuttycombe" 33 | ) 34 | } 35 | 36 | test("A value should serialise to JSON [deannotated]") { 37 | implicit val encoder: Encoder[Person] = Person.deannotatedSchema.encoder 38 | 39 | person.asJson shouldBe Json.obj( 40 | "updateCounter" := 42, 41 | "roles" := Seq( 42 | Json.obj( 43 | "administrator" -> Json.obj( 44 | "subordinateCount" := 0, 45 | "department" := "windmill-tilting" 46 | ) 47 | ) 48 | ), 49 | "birthDate" := 20147028000L, 50 | "name" := "Kris Nuttycombe" 51 | ) 52 | } 53 | 54 | test("A value should be deserialised from JSON") { 55 | implicit val encoder: Encoder[Person] = Person.schema.encoder 56 | val decoder = Person.schema.decoder 57 | 58 | decoder.decodeJson(person.asJson).value shouldBe person.copy(stamp = 101) 59 | decoder.decodeAccumulating(person.asJson.hcursor).value shouldBe person.copy(stamp = 101) 60 | } 61 | 62 | test("A value should be deserialised from JSON [deannotated]") { 63 | implicit val encoder: Encoder[Person] = Person.deannotatedSchema.encoder 64 | val decoder = Person.deannotatedSchema.decoder 65 | 66 | decoder.decodeJson(person.asJson).value shouldBe person.copy(stamp = 101) 67 | decoder.decodeAccumulating(person.asJson.hcursor).value shouldBe person.copy(stamp = 101) 68 | } 69 | 70 | test("A default value should be applied during deserialization") { 71 | implicit val encoder: Encoder[Person] = Person.schema.encoder 72 | val decoder = Person.schema.decoder 73 | 74 | decoder.decodeJson(person.asJson.mapObject(_.filterKeys(_ != "updateCounter"))).value shouldBe person.copy( 75 | updateCounter = 0, 76 | stamp = 101 77 | ) 78 | decoder 79 | .decodeAccumulating(person.asJson.mapObject(_.filterKeys(_ != "updateCounter")).hcursor) 80 | .value shouldBe person.copy(updateCounter = 0, stamp = 101) 81 | } 82 | 83 | test("A default value should be applied during deserialization [deannotated]") { 84 | implicit val encoder: Encoder[Person] = Person.deannotatedSchema.encoder 85 | val decoder = Person.deannotatedSchema.decoder 86 | 87 | decoder.decodeJson(person.asJson.mapObject(_.filterKeys(_ != "updateCounter"))).value shouldBe person.copy( 88 | updateCounter = 0, 89 | stamp = 101 90 | ) 91 | decoder 92 | .decodeAccumulating(person.asJson.mapObject(_.filterKeys(_ != "updateCounter")).hcursor) 93 | .value shouldBe person.copy(updateCounter = 0, stamp = 101) 94 | } 95 | 96 | test("Serialization should round-trip values produced by a generator") { 97 | implicit val arbPerson: Arbitrary[Person] = Arbitrary(Person.schema.gen) 98 | implicit val encoder: Encoder[Person] = Person.schema.encoder 99 | val decoder = Person.schema.decoder 100 | check { (p: Person) => 101 | decoder.decodeJson(p.asJson).toOption.contains(p) 102 | } 103 | check { (p: Person) => 104 | decoder.decodeAccumulating(p.asJson.hcursor).toOption.contains(p) 105 | } 106 | } 107 | 108 | test("Serialization should round-trip values produced by a generator [deannotated]") { 109 | implicit val arbPerson: Arbitrary[Person] = Arbitrary(Person.deannotatedSchema.gen) 110 | implicit val encoder: Encoder[Person] = Person.deannotatedSchema.encoder 111 | val decoder = Person.deannotatedSchema.decoder 112 | check { (p: Person) => 113 | decoder.decodeJson(p.asJson).toOption.contains(p) 114 | } 115 | check { (p: Person) => 116 | decoder.decodeAccumulating(p.asJson.hcursor).toOption.contains(p) 117 | } 118 | } 119 | 120 | test("A value should serialize to JSON flat") { 121 | implicit val encoder: Encoder[Person] = Person.flatSchema.encoder 122 | 123 | person.asJson shouldBe Json.obj( 124 | "updateCounter" := 42, 125 | "roles" := Seq( 126 | Json.obj( 127 | "type" := "administrator", 128 | "subordinateCount" := 0, 129 | "department" := "windmill-tilting" 130 | ) 131 | ), 132 | "birthDate" := 20147028000L, 133 | "name" := "Kris Nuttycombe" 134 | ) 135 | } 136 | 137 | test("A value should be deserialized from JSON flat") { 138 | implicit val encoder: Encoder[Person] = Person.flatSchema.encoder 139 | val decoder = Person.flatSchema.decoder 140 | 141 | decoder.decodeJson(person.asJson).value shouldBe person.copy(stamp = 101) 142 | } 143 | 144 | test("Flat serialization should round-trip values produced by a generator") { 145 | implicit val arbPerson: Arbitrary[Person] = Arbitrary(Person.flatSchema.gen) 146 | implicit val encoder: Encoder[Person] = Person.flatSchema.encoder 147 | val decoder = Person.flatSchema.decoder 148 | check { (p: Person) => 149 | decoder.decodeJson(p.asJson).toOption.contains(p) 150 | } 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /circe/src/test/scala/morphling/circe/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import cats.* 4 | import io.circe.{Decoder, Encoder} 5 | import morphling.circe.ToFilter.JsonFilter 6 | import morphling.protocol.SType.SSchema 7 | 8 | object Implicits extends CircePack { 9 | implicit val primToJson: ToJson[SSchema] = new ToJson[SSchema] { 10 | val encoder = new (SSchema ~> Encoder) { 11 | def apply[I](s: SSchema[I]): Encoder[I] = sTypeEncoder[SSchema[I]#Inner].apply(s.unmutu) 12 | } 13 | } 14 | 15 | implicit val primFromJson: FromJson[SSchema] = new FromJson[SSchema] { 16 | val decoder = new (SSchema ~> Decoder) { 17 | def apply[I](s: SSchema[I]): Decoder[I] = sTypeDecoder[SSchema[I]#Inner].apply(s.unmutu) 18 | } 19 | } 20 | 21 | implicit val primToFilter: ToFilter[SSchema] = new ToFilter[SSchema] { 22 | val filter = new (SSchema ~> ToFilter.JsonFilter) { 23 | override def apply[I](s: SSchema[I]): JsonFilter[I] = sTypeFilter[SSchema[I]#Inner].apply(s.unmutu) 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /circe/src/test/scala/morphling/circe/JsonFilterSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe 2 | 3 | import io.circe.syntax.* 4 | import io.circe.{Encoder, Json} 5 | import morphling.circe.Implicits.* 6 | import morphling.circe.ToFilter.* 7 | import morphling.circe.ToJson.* 8 | import morphling.samples.* 9 | import org.scalatest.funsuite.AnyFunSuite 10 | import org.scalatest.matchers.should.Matchers 11 | 12 | class JsonFilterSpec extends AnyFunSuite with Matchers { 13 | test("Filter should keep correct values as-is") { 14 | implicit val encoder: Encoder[Person] = Person.schema.encoder 15 | val sut = Person.schema.jsonFilter 16 | 17 | sut(person.asJson) shouldBe Some(person.asJson) 18 | } 19 | 20 | test("Filter should discard all unrelated data") { 21 | implicit val encoder: Encoder[Person] = Person.schema.encoder 22 | val sut = Person.schema.jsonFilter 23 | 24 | val json = Json.obj( 25 | "updateCounter" := 42, 26 | "roles" := Seq( 27 | Json.obj( 28 | "administrator" -> Json.obj( 29 | "subordinateCount" := 0, 30 | "department" := "windmill-tilting", 31 | "foo" := "bar" 32 | ), 33 | "val" := 42 34 | ) 35 | ), 36 | "birthDate" := 20147028000L, 37 | "name" := "Kris Nuttycombe", 38 | "peka" := "yoba" 39 | ) 40 | 41 | sut(json) shouldBe Some(person.asJson) 42 | } 43 | 44 | test("Filter should discard all unrelated data with flat schema") { 45 | implicit val encoder: Encoder[Person] = Person.flatSchema.encoder 46 | val sut = Person.flatSchema.jsonFilter 47 | 48 | val json = Json.obj( 49 | "updateCounter" := 42, 50 | "roles" := Seq( 51 | Json.obj( 52 | "type" := "administrator", 53 | "subordinateCount" := 0, 54 | "department" := "windmill-tilting", 55 | "val" := 42 56 | ) 57 | ), 58 | "birthDate" := 20147028000L, 59 | "name" := "Kris Nuttycombe", 60 | "peka" := "yoba" 61 | ) 62 | 63 | sut(json) shouldBe Some(person.asJson) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /circe/src/test/scala/morphling/circe/annotated/CirceAnnotatedSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe.annotated 2 | 3 | import cats.scalatest.{EitherValues, ValidatedValues} 4 | import io.circe.syntax.* 5 | import io.circe.{Encoder, Json} 6 | import morphling.circe.FromJson.* 7 | import morphling.circe.ToJson.* 8 | import morphling.circe.annotated.Implicits.* 9 | import morphling.samples.annotated.{AnnPerson, Server} 10 | import morphling.samples.{Person, person} 11 | import morphling.scalacheck.ToGen.* 12 | import morphling.scalacheck.annotated.Implicits.* 13 | import org.scalacheck.Arbitrary 14 | import org.scalatest.funsuite.AnyFunSuite 15 | import org.scalatest.matchers.should.Matchers 16 | import org.scalatestplus.scalacheck.Checkers 17 | 18 | class CirceAnnotatedSpec extends AnyFunSuite with Matchers with EitherValues with ValidatedValues with Checkers { 19 | private val left = Symbol("left") 20 | 21 | test("A value should serialise to JSON") { 22 | implicit val encoder: Encoder[Person] = AnnPerson.schema.encoder 23 | 24 | person.asJson shouldBe Json.obj( 25 | "updateCounter" := 42, 26 | "roles" := Seq( 27 | Json.obj( 28 | "administrator" -> Json.obj( 29 | "subordinateCount" := 0, 30 | "department" := "windmill-tilting" 31 | ) 32 | ) 33 | ), 34 | "birthDate" := 20147028000L, 35 | "name" := "Kris Nuttycombe" 36 | ) 37 | } 38 | 39 | test("A value should be deserialised from JSON") { 40 | implicit val encoder: Encoder[Person] = AnnPerson.schema.encoder 41 | val decoder = AnnPerson.schema.decoder 42 | 43 | decoder.decodeJson(person.asJson).value shouldBe person.copy(stamp = 101) 44 | decoder.decodeAccumulating(person.asJson.hcursor).value shouldBe person.copy(stamp = 101) 45 | } 46 | 47 | test("Serialization should round-trip values produced by a generator") { 48 | implicit val arbPerson: Arbitrary[Person] = Arbitrary(AnnPerson.schema.gen) 49 | implicit val encoder: Encoder[Person] = AnnPerson.schema.encoder 50 | val decoder = AnnPerson.schema.decoder 51 | check { (p: Person) => 52 | decoder.decodeJson(p.asJson).toOption == Some(p) 53 | } 54 | check { (p: Person) => 55 | decoder.decodeAccumulating(p.asJson.hcursor).toOption == Some(p) 56 | } 57 | } 58 | 59 | test("Deserialization should fail if some value does not fit limitations") { 60 | val decoder = Server.schema.decoder 61 | 62 | decoder.decodeJson(Json.obj("host" := "peka.com", "port" := 0)) shouldBe left 63 | decoder.decodeJson(Json.obj("host" := "peka.com", "port" := 70000)) shouldBe left 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /circe/src/test/scala/morphling/circe/annotated/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe.annotated 2 | 3 | import cats.* 4 | import cats.data.{Const, Kleisli} 5 | import cats.instances.option.* 6 | import io.circe.{Decoder, Encoder, Json} 7 | import morphling.circe.{CircePack, FromJson, ToFilter, ToJson} 8 | import morphling.protocol.annotated.STypeAnn.ASchema 9 | import morphling.protocol.annotated.{Non, Range, Restriction} 10 | 11 | object Implicits extends CircePack { 12 | implicit val decoderRestriction: (Restriction ~> λ[T => Endo[Decoder[T]]]) = 13 | new (Restriction ~> λ[T => Endo[Decoder[T]]]) { 14 | override def apply[A](rs: Restriction[A]): Endo[Decoder[A]] = rs match { 15 | case Non() => identity 16 | case Range(from, to) => 17 | (dec: Decoder[Int]) => 18 | dec 19 | .ensure(_ > from, s"Value should be greater than $from") 20 | .ensure(_ < to, s"Value should be less than $to") 21 | } 22 | } 23 | 24 | implicit val primToJson: ToJson[ASchema] = new ToJson[ASchema] { 25 | val encoder = new (ASchema ~> Encoder) { 26 | def apply[I](s: ASchema[I]): Encoder[I] = sTypeEncoder[ASchema[I]#Inner].apply(s.unmutu) 27 | } 28 | } 29 | 30 | implicit val primFromJson: FromJson[ASchema] = new FromJson[ASchema] { 31 | val decoder = new (ASchema ~> Decoder) { 32 | def apply[I](s: ASchema[I]): Decoder[I] = sTypeDecoder[ASchema[I]#Inner].apply(s.unmutu) 33 | } 34 | } 35 | 36 | implicit val primToFilter: ToFilter[ASchema] = new ToFilter[ASchema] { 37 | val filter = new (ASchema ~> ToFilter.JsonFilter) { 38 | override def apply[I](s: ASchema[I]): ToFilter.JsonFilter[I] = sTypeFilter[ASchema[I]#Inner].apply(s.unmutu) 39 | } 40 | } 41 | 42 | implicit val filterRestriction: (Restriction ~> λ[T => Endo[ToFilter.JsonFilter[T]]]) = 43 | new (Restriction ~> λ[T => Endo[ToFilter.JsonFilter[T]]]) { 44 | override def apply[A](rs: Restriction[A]): Endo[ToFilter.JsonFilter[A]] = rs match { 45 | case Non() => identity 46 | case Range(from, to) => 47 | (jf: ToFilter.JsonFilter[Int]) => 48 | Const.of[Int]( 49 | Kleisli(jf.getConst) 50 | .andThen((json: Json) => 51 | json.asNumber.filter(jn => jn.toInt.exists(n => n > from && n < to)).map(Json.fromJsonNumber) 52 | ) 53 | .run 54 | ) 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /circe/src/test/scala/morphling/circe/annotated/JsonFilterAnnotatedSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.circe.annotated 2 | 3 | import io.circe.syntax.* 4 | import io.circe.{Encoder, Json} 5 | import morphling.circe.ToFilter.* 6 | import morphling.circe.ToJson.* 7 | import morphling.circe.annotated.Implicits.* 8 | import morphling.samples.annotated.AnnPerson 9 | import morphling.samples.{Person, person} 10 | import org.scalatest.funsuite.AnyFunSuite 11 | import org.scalatest.matchers.should.Matchers 12 | 13 | class JsonFilterAnnotatedSpec extends AnyFunSuite with Matchers { 14 | test("Filter should keep correct values as-is") { 15 | implicit val encoder: Encoder[Person] = AnnPerson.schema.encoder 16 | val sut = AnnPerson.schema.jsonFilter 17 | 18 | sut(person.asJson) shouldBe Some(person.asJson) 19 | } 20 | 21 | test("Filter should discard all unrelated data") { 22 | implicit val encoder: Encoder[Person] = AnnPerson.schema.encoder 23 | val sut = AnnPerson.schema.jsonFilter 24 | 25 | val json = Json.obj( 26 | "updateCounter" := 42, 27 | "roles" := Seq( 28 | Json.obj( 29 | "administrator" -> Json.obj( 30 | "subordinateCount" := 0, 31 | "department" := "windmill-tilting", 32 | "foo" := "bar" 33 | ), 34 | "val" := 42 35 | ) 36 | ), 37 | "birthDate" := 20147028000L, 38 | "name" := "Kris Nuttycombe", 39 | "peka" := "yoba" 40 | ) 41 | 42 | sut(json) shouldBe Some(person.asJson) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /copying.txt: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2019-2024 Daniil Smirnov 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. -------------------------------------------------------------------------------- /core/src/main/scala-2/morphling/Constructors.scala: -------------------------------------------------------------------------------- 1 | package morphling 2 | 3 | import cats.data.NonEmptyList 4 | import cats.syntax.list.* 5 | import shapeless.ops.coproduct.ToHList 6 | import shapeless.ops.hlist.{Align, Comapped, ToTraversable} 7 | import shapeless.{Prism as _, *} 8 | 9 | import scala.annotation.implicitNotFound 10 | 11 | /** 12 | * Implicit proof type 13 | */ 14 | @implicitNotFound(msg = "Cannot prove the completeness of your oneOf definition; you may have not provided an alternative for each constructor of your sum type ${I}") 15 | sealed trait Constructors[I, F[_], H <: HList] { 16 | def toNel(h: H): NonEmptyList[F[?]] 17 | } 18 | 19 | object Constructors { 20 | implicit def evidence[I, F[_], C <: Coproduct, H0 <: HList, H1 <: HList, H <: HList](implicit 21 | G: Generic.Aux[I, C], 22 | L: ToHList.Aux[C, H1], 23 | M: Comapped.Aux[H, F, H0], 24 | A: Align[H0, H1], 25 | T: ToTraversable.Aux[H, List, F[?]] 26 | ): Constructors[I, F, H] = new Constructors[I, F, H] { 27 | def toNel(h: H): NonEmptyList[F[?]] = 28 | h.toList.toNel.get 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /core/src/main/scala-2/morphling/HFunctor.scala: -------------------------------------------------------------------------------- 1 | package morphling 2 | 3 | import cats.* 4 | 5 | trait HFunctor[F[_[_], _]] { 6 | def hlift[M[_], N[_]](nt: M ~> N): F[M, *] ~> F[N, *] 7 | } 8 | 9 | object HFunctor { 10 | def apply[F[_[_], _]](implicit v: HFunctor[F]): HFunctor[F] = v 11 | 12 | implicit final class HFunctorOps[F[_[_], _], M[_], A](val fa: F[M, A])(implicit F: HFunctor[F]) { 13 | def hfmap[N[_]](nt: M ~> N): F[N, A] = F.hlift(nt)(fa) 14 | } 15 | } 16 | 17 | /** 18 | * Fixpoint data type that can preserve a type index through its recursive step. 19 | */ 20 | final case class HFix[F[_[_], _], I](unfix: Eval[F[HFix[F, *], I]]) 21 | 22 | object HFix { 23 | import HFunctor.* 24 | 25 | def hfix[F[_[_], _], I](fa: => F[HFix[F, *], I]): HFix[F, I] = 26 | HFix[F, I](Later(fa)) 27 | 28 | def cataNT[F[_[_], _]: HFunctor, G[_]](alg: HAlgebra[F, G]): HFix[F, *] ~> G = 29 | new (HFix[F, *] ~> G) { self => 30 | def apply[I](f: HFix[F, I]): G[I] = 31 | alg.apply[I](f.unfix.value.hfmap[G](self)) 32 | } 33 | 34 | def anaNT[F[_[_], _]: HFunctor, G[_]](alg: HCoAlgebra[F, G]): G ~> HFix[F, *] = 35 | new (G ~> HFix[F, *]) { self => 36 | override def apply[I](fa: G[I]): HFix[F, I] = 37 | hfix(alg.apply[I](fa).hfmap(self)) 38 | } 39 | 40 | /** Smart constructor for HCofree values. */ 41 | def hcofree[F[_[_], _], A[_], I](ask: A[I], fga: => F[HCofree[F, A, *], I]): HCofree[F, A, I] = 42 | hfix[HEnvT[A, F, *[_], *], I](HEnvT(ask, fga)) 43 | 44 | /** 45 | * Algebra to discard the annotations from an HCofree structure. 46 | */ 47 | def forgetAlg[F[_[_], _], A[_]]: HEnvT[A, F, HFix[F, *], *] ~> HFix[F, *] = 48 | new HAlgebra[HEnvT[A, F, *[_], *], HFix[F, *]] { 49 | def apply[I](env: HEnvT[A, F, HFix[F, *], I]): HFix[F, I] = hfix(env.fa) 50 | } 51 | 52 | def forget[F[_[_], _]: HFunctor, A[_]]: HCofree[F, A, *] ~> HFix[F, *] = cataNT(forgetAlg) 53 | 54 | /** 55 | * Algebra to annotate the whole HCofree with a same annotation 56 | */ 57 | def annotateAlg[F[_[_], _], A[_]](ann: A[Nothing]): HFix[F, *] ~> HEnvT[A, F, HFix[F, *], *] = 58 | new HCoAlgebra[HEnvT[A, F, *[_], *], HFix[F, *]] { 59 | override def apply[T](fa: HFix[F, T]): HEnvT[A, F, HFix[F, *], T] = 60 | HEnvT[A, F, HFix[F, *], T](ann.asInstanceOf[A[T]], fa.unfix.value) 61 | } 62 | 63 | def annotate[F[_[_], _]: HFunctor, A[_]](ann: A[Nothing]): HFix[F, *] ~> HCofree[F, A, *] = anaNT(annotateAlg(ann)) 64 | 65 | /** HFunctor over the annotation type of an HCofree value */ 66 | implicit def hCoFreeHFunctor[F[_[_], _]](implicit HF: HFunctor[F]): HFunctor[HCofree[F, *[_], *]] = 67 | new HFunctor[HCofree[F, *[_], *]] { 68 | override def hlift[M[_], N[_]](nt: M ~> N): HCofree[F, M, *] ~> HCofree[F, N, *] = 69 | new (HCofree[F, M, *] ~> HCofree[F, N, *]) { 70 | override def apply[I](hc: HCofree[F, M, I]): HCofree[F, N, I] = { 71 | val step = hc.unfix.value 72 | hcofree(nt.apply(step.ask), HF.hlift(hCoFreeHFunctor[F].hlift(nt)).apply(step.fa)) 73 | } 74 | } 75 | } 76 | } 77 | 78 | //final case class HMutu[F[_[_], _], G[_[_], _], I](unmutu: F[G[HMutu[F, G, *], *], I]) { 79 | final case class HMutu[F[_[_], _], G[_[_], _], I](unmutu: F[HMutu.Inner[F, G]#IAux, I]) { 80 | // type Inner[T] = G[HMutu[F, G, *], T] 81 | type Inner[T] = G[HMutu.Aux[F, G]#Aux, T] 82 | 83 | def transformInner[H[_[_], _]](f: Inner ~> H[HMutu[F, H, *], *])(implicit hfg: HFunctor[F]): HMutu[F, H, I] = 84 | HMutu(hfg.hlift(f)(unmutu)) 85 | } 86 | 87 | object HMutu { 88 | type Aux[F[_[_], _], G[_[_], _]] = { 89 | type Aux[I] = HMutu[F, G, I] 90 | } 91 | 92 | type Inner[F[_[_], _], G[_[_], _]] = { 93 | type IAux[I] = G[Aux[F, G]#Aux, I] 94 | } 95 | } 96 | 97 | final case class HEnvT[E[_], F[_[_], _], G[_], I](ask: E[I], fa: F[G, I]) 98 | 99 | object HEnvT { 100 | import HFunctor.* 101 | 102 | implicit def hEnvTHFunctor[E[_], F[_[_], _]: HFunctor]: HFunctor[HEnvT[E, F, *[_], *]] = 103 | new HFunctor[HEnvT[E, F, *[_], *]] { 104 | def hlift[M[_], N[_]](nt: M ~> N): HEnvT[E, F, M, *] ~> HEnvT[E, F, N, *] = 105 | new (HEnvT[E, F, M, *] ~> HEnvT[E, F, N, *]) { 106 | def apply[I](fm: HEnvT[E, F, M, I]): HEnvT[E, F, N, I] = HEnvT(fm.ask, fm.fa.hfmap[N](nt)) 107 | } 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /core/src/main/scala-2/morphling/SchemaF.scala: -------------------------------------------------------------------------------- 1 | package morphling 2 | 3 | import cats.* 4 | import cats.data.NonEmptyList 5 | import cats.free.* 6 | import glass.* 7 | 8 | /** 9 | * The base trait for the schema GADT. 10 | * 11 | * @define PDefn 12 | * The GADT type constructor for a sum type which defines the set of primitive types used in the schema. 13 | * @define IDefn 14 | * The type of the Scala value to be produced (or consumed) by an interpreter of the schema. Also known as the "index" 15 | * type of the schema. 16 | * @define FDefn 17 | * The functor through which the structure of the schema will be interpreted. This will almost always be a fixpoint 18 | * type such as [[morphling.HFix.HCofree]], which is used to introduce the ability to create recursive 19 | * (tree-structured) schema. 20 | * 21 | * @tparam P 22 | * $PDefn 23 | * @tparam F 24 | * $FDefn 25 | * @tparam I 26 | * $IDefn 27 | */ 28 | sealed trait SchemaF[P[_], F[_], I] { 29 | 30 | /** 31 | * HFunctor operation which allows transformation of the functor through which the structure of the schema will be 32 | * interpreted. 33 | * 34 | * Defining this operation directly on the SchemaF type rather than in [[morphling.SchemaF.schemaFHFunctor]] 35 | * simplifies type inference. 36 | */ 37 | def hfmap[G[_]](nt: F ~> G): SchemaF[P, G, I] 38 | 39 | /** 40 | * HFunctor operation which allows transformation of the primitive algebra of the schema. 41 | * 42 | * Defining this operation directly on the SchemaF type rather than in [[morphling.SchemaF.schemaFHFunctor]] 43 | * simplifies type inference. 44 | */ 45 | def pmap[Q[_]](nt: P ~> Q): SchemaF[Q, F, I] 46 | } 47 | 48 | object SchemaF { 49 | implicit def schemaFHFunctor[P[_]]: HFunctor[SchemaF[P, *[_], *]] = new HFunctor[SchemaF[P, *[_], *]] { 50 | def hlift[M[_], N[_]](nt: M ~> N): SchemaF[P, M, *] ~> SchemaF[P, N, *] = 51 | new (SchemaF[P, M, *] ~> SchemaF[P, N, *]) { 52 | def apply[I](fa: SchemaF[P, M, I]): SchemaF[P, N, I] = fa.hfmap(nt) 53 | } 54 | } 55 | } 56 | 57 | /** 58 | * Schema constructor that wraps a value of an underlying GADT of allowed primitive types. 59 | * 60 | * The underlying GADT defines a set of types via GADT constructors; see [[morphling.protocol.SType]] for an example. 61 | * This set of types defines what types may be treated as primitive (and have parsing/ serialization/etc deferred to an 62 | * external handler) when interpreting a schema value. For example, one might want to construct a GADT for for the Scala 63 | * primitive types as such: 64 | * 65 | * {{{ 66 | * sealed trait SType[I] 67 | * 68 | * case object SNullT extends SType[Unit] 69 | * case object SBoolT extends SType[Boolean] 70 | * 71 | * case object SByteT extends SType[Byte] 72 | * case object SShortT extends SType[Short] 73 | * case object SIntT extends SType[Int] 74 | * case object SLongT extends SType[Long] 75 | * 76 | * case object SFloatT extends SType[Float] 77 | * case object SDoubleT extends SType[Double] 78 | * 79 | * case object SCharT extends SType[Char] 80 | * case object SStrT extends SType[String] 81 | * }}} 82 | * 83 | * This example treats String values as primitive as well, even though strictly speaking they're reference types, just 84 | * because virtually any interpreter for a schema algebra will not want to represent strings in terms of sum or product 85 | * types. The same might hold true for, for example, [[scala.Array]] but for the purposes of this example issues related 86 | * to `ClassManifest` instances would introduce excessive complexity. 87 | * 88 | * @tparam P 89 | * $PDefn 90 | * @tparam F 91 | * $FDefn 92 | * @tparam I 93 | * $IDefn 94 | * @param prim 95 | * value identifying a primitive type. 96 | */ 97 | final case class PrimSchema[P[_], F[_], I](prim: P[I]) extends SchemaF[P, F, I] { 98 | def hfmap[G[_]](nt: F ~> G): PrimSchema[P, G, I] = PrimSchema[P, G, I](prim) 99 | def pmap[Q[_]](nt: P ~> Q): PrimSchema[Q, F, I] = PrimSchema[Q, F, I](nt(prim)) 100 | } 101 | 102 | /** 103 | * Constructor that enables creation of schema for sum types. 104 | * 105 | * Each constructor of the sum type `I` is represented as a member of the list of alternatives. Each alternative defines 106 | * a prism between a single constructor of the sum type, and an underlying type describing the arguments demanded by 107 | * that constructor. 108 | * 109 | * Consider the following sum type. The first constructor takes no arguments; the second takes two. 110 | * 111 | * {{{ 112 | * sealed trait Role 113 | * 114 | * case object User extends Role 115 | * case class Administrator(department: String, subordinateCount: Int) extends Role 116 | * }}} 117 | * 118 | * A schema value for this type looks like: 119 | * 120 | * {{{ 121 | * val roleSchema = oneOf( 122 | * alt[Unit, Prim, Role, Unit]( 123 | * "user", 124 | * Schema.empty, 125 | * (_: Unit) => User, 126 | * { 127 | * case User => Some(Unit) 128 | * case _ => None 129 | * } 130 | * ) :: 131 | * alt[Unit, Prim, Role, Administrator]( 132 | * "administrator", 133 | * rec[Prim, Administrator]( 134 | * ( 135 | * required("department", Prim.str, (_: Administrator).department), 136 | * required("subordinateCount", Prim.int, (_: Administrator).subordinateCount) 137 | * ).mapN(Administrator.apply) 138 | * ), 139 | * identity, 140 | * { 141 | * case a @ Administrator(_, _) => Some(a) 142 | * case _ => None 143 | * } 144 | * ) :: Nil 145 | * ) 146 | * }}} 147 | * 148 | * @tparam P 149 | * $PDefn 150 | * @tparam F 151 | * $FDefn 152 | * @tparam I 153 | * $IDefn 154 | */ 155 | final case class OneOfSchema[P[_], F[_], I](alts: NonEmptyList[Alt[F, I, ?]], discriminator: Option[String] = None) 156 | extends SchemaF[P, F, I] { 157 | def hfmap[G[_]](nt: F ~> G): OneOfSchema[P, G, I] = OneOfSchema[P, G, I](alts.map(_.hfmap(nt)), discriminator) 158 | def pmap[Q[_]](nt: P ~> Q): OneOfSchema[Q, F, I] = OneOfSchema[Q, F, I](alts, discriminator) 159 | } 160 | 161 | /** 162 | * A prism between a base type containing the arguments required by a single constructor of a sum type, and that sum 163 | * type, along with the schema for the base type is used to describe those constructor arguments. The identifier is used 164 | * to distinguish which constructor is being represented in the serialized form. 165 | * 166 | * @define IDefn 167 | * The type of the Scala value to be produced (or consumed) by an interpreter of the schema. Also known as the "index" 168 | * type of the schema. 169 | * 170 | * @define FDefn 171 | * The functor through which the structure of the schema will be interpreted. This will almost always be a fixpoint 172 | * type such as [[morphling.HFix.HCofree]], which is used to introduce the ability to create recursive 173 | * (tree-structured) schema. 174 | * 175 | * @tparam F 176 | * $FDefn 177 | * @tparam I 178 | * $IDefn 179 | * @tparam I0 180 | * The base type which corresponds to the arguments to the selected constructor. 181 | * @param id 182 | * The unique identifier of the constructor 183 | * @param base 184 | * The schema for the `I0` type 185 | * @param subset 186 | * Subset between the sum type and the selected constructor. 187 | */ 188 | final case class Alt[F[_], I, I0](id: String, base: F[I0], subset: Subset[I, I0]) { 189 | def hfmap[G[_]](nt: F ~> G): Alt[G, I, I0] = Alt(id, nt(base), subset) 190 | } 191 | 192 | /** 193 | * Wrapper for the free applicative structure which is used to construct and disassemble values of product types. 194 | * 195 | * @tparam P 196 | * $PDefn 197 | * @tparam F 198 | * $FDefn 199 | * @tparam I 200 | * $IDefn 201 | * @param props 202 | * the free applicative value composed of zero or more PropSchema instances 203 | */ 204 | final case class RecordSchema[P[_], F[_], I](props: FreeApplicative[PropSchema[I, F, *], I]) extends SchemaF[P, F, I] { 205 | def hfmap[G[_]](nt: F ~> G): RecordSchema[P, G, I] = 206 | RecordSchema[P, G, I](props.compile[PropSchema[I, G, *]](PropSchema.propSchemaHFunctor[I].hlift[F, G](nt))) 207 | def pmap[Q[_]](nt: P ~> Q): RecordSchema[Q, F, I] = RecordSchema[Q, F, I](props) 208 | } 209 | 210 | /** 211 | * Base trait for values which describe record properties. 212 | * 213 | * @define FDefn 214 | * The functor through which the structure of the schema will be interpreted. This will almost always be a fixpoint 215 | * type such as [[morphling.HFix.HCofree]], which is used to introduce the ability to create recursive 216 | * (tree-structured) schema. 217 | * 218 | * @tparam O 219 | * The record type. 220 | * @tparam F 221 | * $FDefn 222 | * @tparam I 223 | * The type of the property value. 224 | */ 225 | sealed trait PropSchema[O, F[_], I] { 226 | def fieldName: String 227 | def extract: Extract[O, I] 228 | 229 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] 230 | } 231 | 232 | /** 233 | * Class describing a required property of a record. 234 | * 235 | * @param fieldName 236 | * The name of the property. 237 | * @param base 238 | * Schema for the property's value type. 239 | * @param extract 240 | * Extract lens from the record type to the property. 241 | * @param default 242 | * Optional default value, for use in the case that a serialized form is missing the property. 243 | */ 244 | final case class Required[O, F[_], I]( 245 | fieldName: String, 246 | base: F[I], 247 | extract: Extract[O, I], 248 | default: Option[I] 249 | ) extends PropSchema[O, F, I] { 250 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] = 251 | Required(fieldName, nt(base), extract, default) 252 | } 253 | 254 | /** 255 | * Class describing an optional property of a record. Since in many serialized forms optional properties may be omitted 256 | * entirely from the serialized form, a distinct type is needed in order to be able to correctly interpret the absence 257 | * of a field. 258 | * 259 | * @param fieldName 260 | * The name of the property. 261 | * @param base 262 | * Schema for the property's value type. 263 | * @param extract 264 | * Extract lens from the record type to the property. 265 | */ 266 | final case class Optional[O, F[_], I]( 267 | fieldName: String, 268 | base: F[I], 269 | extract: Extract[O, Option[I]] 270 | ) extends PropSchema[O, F, Option[I]] { 271 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, Option[I]] = 272 | Optional(fieldName, nt(base), extract) 273 | } 274 | 275 | /** 276 | * Class describing an optional property of a record that is always absent. 277 | * 278 | * @param fieldName 279 | * The name of the property. 280 | * @param extract 281 | * Extract lens from the record type to the property. 282 | */ 283 | final case class Absent[O, F[_], I]( 284 | fieldName: String, 285 | extract: Extract[O, Option[I]] 286 | ) extends PropSchema[O, F, Option[I]] { 287 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, Option[I]] = 288 | Absent(fieldName, extract) 289 | } 290 | 291 | /** 292 | * Class describing a constant (non-serializable) property of a record. 293 | * @param fieldName 294 | * The name of the property. 295 | * @param value 296 | * The value of the property. 297 | * @param extract 298 | * Extract lens from the record type to the property. 299 | */ 300 | final case class Constant[O, F[_], I]( 301 | fieldName: String, 302 | value: I, 303 | extract: Extract[O, I] 304 | ) extends PropSchema[O, F, I] { 305 | override def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] = 306 | this.asInstanceOf[PropSchema[O, G, I]] 307 | } 308 | 309 | object PropSchema { 310 | implicit def propSchemaHFunctor[O]: HFunctor[PropSchema[O, *[_], *]] = 311 | new HFunctor[PropSchema[O, *[_], *]] { 312 | def hlift[M[_], N[_]](nt: M ~> N): PropSchema[O, M, *] ~> PropSchema[O, N, *] = 313 | new (PropSchema[O, M, *] ~> PropSchema[O, N, *]) { 314 | def apply[I](ps: PropSchema[O, M, I]): PropSchema[O, N, I] = ps.hfmap(nt) 315 | } 316 | } 317 | 318 | private def extract[A, B](f: A => B): Extract[A, B] = (s: A) => f(s) 319 | 320 | def contraNT[O, N, F[_]](f: N => O): PropSchema[O, F, *] ~> PropSchema[N, F, *] = 321 | new (PropSchema[O, F, *] ~> PropSchema[N, F, *]) { 322 | def apply[I](pso: PropSchema[O, F, I]): PropSchema[N, F, I] = 323 | pso match { 324 | case Required(n, s, g, d) => Required(n, s, extract(f) >> g, d) 325 | case opt: Optional[O, F, i] => Optional(opt.fieldName, opt.base, extract(f) >> opt.extract) 326 | case Constant(fn, v, g) => Constant(fn, v, extract(f) >> g) 327 | case abs: Absent[O, F, i] => Absent(abs.fieldName, extract(f) >> abs.extract) 328 | } 329 | } 330 | } 331 | 332 | case class IsoSchema[P[_], F[_], I, J](base: F[I], eqv: Equivalent[I, J]) extends SchemaF[P, F, J] { 333 | def hfmap[G[_]](nt: F ~> G): IsoSchema[P, G, I, J] = IsoSchema(nt(base), eqv) 334 | def pmap[Q[_]](nt: P ~> Q): IsoSchema[Q, F, I, J] = IsoSchema(base, eqv) 335 | } 336 | -------------------------------------------------------------------------------- /core/src/main/scala-2/morphling/package.scala: -------------------------------------------------------------------------------- 1 | import cats.~> 2 | 3 | package object morphling { 4 | type HAlgebra[F[_[_], _], G[_]] = F[G, *] ~> G 5 | type HCoAlgebra[F[_[_], _], G[_]] = G ~> F[G, *] 6 | 7 | type HCofree[F[_[_], _], A[_], I] = HFix[HEnvT[A, F, *[_], *], I] 8 | } 9 | -------------------------------------------------------------------------------- /core/src/main/scala-3/morphling/HFunctor.scala: -------------------------------------------------------------------------------- 1 | package morphling 2 | 3 | import cats.* 4 | 5 | type HAlgebra[F[_[_], _], G[_]] = F[G, _] ~> G 6 | type HCoAlgebra[F[_[_], _], G[_]] = G ~> F[G, _] 7 | 8 | type HCofree[F[_[_], _], A[_], I] = HFix[[Y[_], Z] =>> HEnvT[A, F, Y, Z], I] 9 | 10 | trait HFunctor[F[_[_], _]] { 11 | def hlift[M[_], N[_]](nt: M ~> N): F[M, _] ~> F[N, _] 12 | 13 | extension [M[_], I](fa: F[M, I])(using HF: HFunctor[F]) def hfmap[N[_]](nt: M ~> N): F[N, I] = HF.hlift(nt)(fa) 14 | } 15 | 16 | object HFunctor { 17 | def apply[F[_[_], _]](using hf: HFunctor[F]): HFunctor[F] = hf 18 | } 19 | 20 | /** 21 | * Fixpoint data type that can preserve a type index through its recursive step. 22 | */ 23 | final case class HFix[F[_[_], _], I](unfix: Eval[F[HFix[F, _], I]]) 24 | 25 | object HFix { 26 | import HFunctor.* 27 | 28 | def hfix[F[_[_], _], I](fa: => F[HFix[F, _], I]): HFix[F, I] = 29 | HFix[F, I](Later(fa)) 30 | 31 | def cataNT[F[_[_], _]: HFunctor, G[_]](alg: HAlgebra[F, G]): HFix[F, _] ~> G = 32 | new ((HFix[F, _]) ~> G) { self => 33 | def apply[I](f: HFix[F, I]): G[I] = 34 | alg.apply[I](f.unfix.value.hfmap[G](self)) 35 | } 36 | 37 | def anaNT[F[_[_], _]: HFunctor, G[_]](alg: HCoAlgebra[F, G]): G ~> HFix[F, _] = 38 | new (G ~> (HFix[F, _])) { self => 39 | override def apply[I](fa: G[I]): HFix[F, I] = 40 | hfix(alg.apply[I](fa).hfmap(self)) 41 | } 42 | 43 | /** Smart constructor for HCofree values. */ 44 | def hcofree[F[_[_], _], A[_], I](ask: A[I], fga: => F[HCofree[F, A, _], I]): HCofree[F, A, I] = 45 | hfix[[Y[_], Z] =>> HEnvT[A, F, Y, Z], I](HEnvT(ask, fga)) 46 | 47 | /** 48 | * Algebra to discard the annotations from an HCofree structure. 49 | */ 50 | def forgetAlg[F[_[_], _], A[_]]: HEnvT[A, F, HFix[F, _], _] ~> HFix[F, _] = 51 | new HAlgebra[[Y[_], Z] =>> HEnvT[A, F, Y, Z], HFix[F, _]] { 52 | def apply[I](env: HEnvT[A, F, HFix[F, _], I]): HFix[F, I] = hfix(env.fa) 53 | } 54 | 55 | def forget[F[_[_], _]: HFunctor, A[_]]: HCofree[F, A, _] ~> HFix[F, _] = cataNT(forgetAlg) 56 | 57 | /** 58 | * Algebra to annotate the whole HCofree with a same annotation 59 | */ 60 | def annotateAlg[F[_[_], _], A[_]](ann: A[Nothing]): HFix[F, _] ~> HEnvT[A, F, HFix[F, _], _] = 61 | new HCoAlgebra[[Y[_], Z] =>> HEnvT[A, F, Y, Z], HFix[F, _]] { 62 | override def apply[T](fa: HFix[F, T]): HEnvT[A, F, HFix[F, _], T] = 63 | HEnvT[A, F, HFix[F, _], T](ann.asInstanceOf[A[T]], fa.unfix.value) 64 | } 65 | 66 | def annotate[F[_[_], _]: HFunctor, A[_]](ann: A[Nothing]): HFix[F, _] ~> HCofree[F, A, _] = anaNT(annotateAlg(ann)) 67 | } 68 | 69 | given [F[_[_], _]: HFunctor]: HFunctor[[Y[_], Z] =>> HCofree[F, Y, Z]] with { 70 | override def hlift[M[_], N[_]](nt: M ~> N): HCofree[F, M, _] ~> HCofree[F, N, _] = 71 | new (HCofree[F, M, *] ~> HCofree[F, N, *]) { 72 | override def apply[I](hc: HCofree[F, M, I]): HCofree[F, N, I] = { 73 | val step = hc.unfix.value 74 | HFix.hcofree( 75 | nt.apply(step.ask), 76 | HFunctor[F].hlift(HFunctor[[Y[_], Z] =>> HCofree[F, Y, Z]].hlift(nt)).apply(step.fa) 77 | ) 78 | } 79 | } 80 | } 81 | 82 | final case class HMutu[F[_[_], _], G[_[_], _], I](unmutu: F[G[HMutu[F, G, _], _], I]) { 83 | type Inner[T] = G[HMutu[F, G, _], T] 84 | 85 | def transformInner[H[_[_], _]](f: Inner ~> H[HMutu[F, H, _], _])(using hfg: HFunctor[F]): HMutu[F, H, I] = 86 | HMutu(unmutu.hfmap(f)) 87 | } 88 | 89 | final case class HEnvT[E[_], F[_[_], _], G[_], I](ask: E[I], fa: F[G, I]) 90 | 91 | given [E[_], F[_[_], _]: HFunctor]: HFunctor[[Y[_], Z] =>> HEnvT[E, F, Y, Z]] with { 92 | override def hlift[M[_], N[_]](nt: M ~> N): HEnvT[E, F, M, _] ~> HEnvT[E, F, N, _] = 93 | new (HEnvT[E, F, M, _] ~> HEnvT[E, F, N, _]) { 94 | override def apply[I](fm: HEnvT[E, F, M, I]): HEnvT[E, F, N, I] = HEnvT(fm.ask, fm.fa.hfmap[N](nt)) 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /core/src/main/scala-3/morphling/SchemaF.scala: -------------------------------------------------------------------------------- 1 | package morphling 2 | 3 | import cats.* 4 | import cats.data.NonEmptyList 5 | import cats.free.* 6 | import glass.* 7 | 8 | /** 9 | * The base trait for the schema GADT. 10 | * 11 | * @define PDefn 12 | * The GADT type constructor for a sum type which defines the set of primitive types used in the schema. 13 | * @define IDefn 14 | * The type of the Scala value to be produced (or consumed) by an interpreter of the schema. Also known as the "index" 15 | * type of the schema. 16 | * @define FDefn 17 | * The functor through which the structure of the schema will be interpreted. This will almost always be a fixpoint 18 | * type such as [[morphling.HFix.HCofree]], which is used to introduce the ability to create recursive 19 | * (tree-structured) schema. 20 | * 21 | * @tparam P 22 | * $PDefn 23 | * @tparam F 24 | * $FDefn 25 | * @tparam I 26 | * $IDefn 27 | */ 28 | sealed trait SchemaF[P[_], F[_], I] { 29 | 30 | /** 31 | * HFunctor operation which allows transformation of the functor through which the structure of the schema will be 32 | * interpreted. 33 | * 34 | * Defining this operation directly on the SchemaF type rather than in [[morphling.SchemaF.schemaFHFunctor]] 35 | * simplifies type inference. 36 | */ 37 | def hfmap[G[_]](nt: F ~> G): SchemaF[P, G, I] 38 | 39 | /** 40 | * HFunctor operation which allows transformation of the primitive algebra of the schema. 41 | * 42 | * Defining this operation directly on the SchemaF type rather than in [[morphling.SchemaF.schemaFHFunctor]] 43 | * simplifies type inference. 44 | */ 45 | def pmap[Q[_]](nt: P ~> Q): SchemaF[Q, F, I] 46 | } 47 | 48 | object SchemaF { 49 | given [P[_]]: HFunctor[[Y[_], Z] =>> SchemaF[P, Y, Z]] = new HFunctor[[Y[_], Z] =>> SchemaF[P, Y, Z]] { 50 | override def hlift[M[_], N[_]](nt: M ~> N): SchemaF[P, M, _] ~> SchemaF[P, N, _] = 51 | new (SchemaF[P, M, _] ~> SchemaF[P, N, _]) { 52 | def apply[I](fa: SchemaF[P, M, I]): SchemaF[P, N, I] = fa.hfmap(nt) 53 | } 54 | } 55 | } 56 | 57 | /** 58 | * Schema constructor that wraps a value of an underlying GADT of allowed primitive types. 59 | * 60 | * The underlying GADT defines a set of types via GADT constructors; see [[morphling.protocol.SType]] for an example. 61 | * This set of types defines what types may be treated as primitive (and have parsing/ serialization/etc deferred to an 62 | * external handler) when interpreting a schema value. For example, one might want to construct a GADT for for the Scala 63 | * primitive types as such: 64 | * 65 | * {{{ 66 | * sealed trait SType[I] 67 | * 68 | * case object SNullT extends SType[Unit] 69 | * case object SBoolT extends SType[Boolean] 70 | * 71 | * case object SByteT extends SType[Byte] 72 | * case object SShortT extends SType[Short] 73 | * case object SIntT extends SType[Int] 74 | * case object SLongT extends SType[Long] 75 | * 76 | * case object SFloatT extends SType[Float] 77 | * case object SDoubleT extends SType[Double] 78 | * 79 | * case object SCharT extends SType[Char] 80 | * case object SStrT extends SType[String] 81 | * }}} 82 | * 83 | * This example treats String values as primitive as well, even though strictly speaking they're reference types, just 84 | * because virtually any interpreter for a schema algebra will not want to represent strings in terms of sum or product 85 | * types. The same might hold true for, for example, [[scala.Array]] but for the purposes of this example issues related 86 | * to `ClassManifest` instances would introduce excessive complexity. 87 | * 88 | * @tparam P 89 | * $PDefn 90 | * @tparam F 91 | * $FDefn 92 | * @tparam I 93 | * $IDefn 94 | * @param prim 95 | * value identifying a primitive type. 96 | */ 97 | final case class PrimSchema[P[_], F[_], I](prim: P[I]) extends SchemaF[P, F, I] { 98 | def hfmap[G[_]](nt: F ~> G): PrimSchema[P, G, I] = PrimSchema[P, G, I](prim) 99 | def pmap[Q[_]](nt: P ~> Q): PrimSchema[Q, F, I] = PrimSchema[Q, F, I](nt(prim)) 100 | } 101 | 102 | /** 103 | * Constructor that enables creation of schema for sum types. 104 | * 105 | * Each constructor of the sum type `I` is represented as a member of the list of alternatives. Each alternative defines 106 | * a prism between a single constructor of the sum type, and an underlying type describing the arguments demanded by 107 | * that constructor. 108 | * 109 | * Consider the following sum type. The first constructor takes no arguments; the second takes two. 110 | * 111 | * {{{ 112 | * sealed trait Role 113 | * 114 | * case object User extends Role 115 | * case class Administrator(department: String, subordinateCount: Int) extends Role 116 | * }}} 117 | * 118 | * A schema value for this type looks like: 119 | * 120 | * {{{ 121 | * val roleSchema = oneOf( 122 | * alt[Unit, Prim, Role, Unit]( 123 | * "user", 124 | * Schema.empty, 125 | * (_: Unit) => User, 126 | * { 127 | * case User => Some(Unit) 128 | * case _ => None 129 | * } 130 | * ) :: 131 | * alt[Unit, Prim, Role, Administrator]( 132 | * "administrator", 133 | * rec[Prim, Administrator]( 134 | * ( 135 | * required("department", Prim.str, (_: Administrator).department), 136 | * required("subordinateCount", Prim.int, (_: Administrator).subordinateCount) 137 | * ).mapN(Administrator.apply) 138 | * ), 139 | * identity, 140 | * { 141 | * case a @ Administrator(_, _) => Some(a) 142 | * case _ => None 143 | * } 144 | * ) :: Nil 145 | * ) 146 | * }}} 147 | * 148 | * @tparam P 149 | * $PDefn 150 | * @tparam F 151 | * $FDefn 152 | * @tparam I 153 | * $IDefn 154 | */ 155 | final case class OneOfSchema[P[_], F[_], I](alts: NonEmptyList[Alt[F, I, ?]], discriminator: Option[String] = None) 156 | extends SchemaF[P, F, I] { 157 | def hfmap[G[_]](nt: F ~> G): OneOfSchema[P, G, I] = OneOfSchema[P, G, I](alts.map(_.hfmap(nt)), discriminator) 158 | def pmap[Q[_]](nt: P ~> Q): OneOfSchema[Q, F, I] = OneOfSchema[Q, F, I](alts, discriminator) 159 | } 160 | 161 | /** 162 | * A prism between a base type containing the arguments required by a single constructor of a sum type, and that sum 163 | * type, along with the schema for the base type is used to describe those constructor arguments. The identifier is used 164 | * to distinguish which constructor is being represented in the serialized form. 165 | * 166 | * @define IDefn 167 | * The type of the Scala value to be produced (or consumed) by an interpreter of the schema. Also known as the "index" 168 | * type of the schema. 169 | * 170 | * @define FDefn 171 | * The functor through which the structure of the schema will be interpreted. This will almost always be a fixpoint 172 | * type such as [[morphling.HFix.HCofree]], which is used to introduce the ability to create recursive 173 | * (tree-structured) schema. 174 | * 175 | * @tparam F 176 | * $FDefn 177 | * @tparam I 178 | * $IDefn 179 | * @tparam I0 180 | * The base type which corresponds to the arguments to the selected constructor. 181 | * @param id 182 | * The unique identifier of the constructor 183 | * @param base 184 | * The schema for the `I0` type 185 | * @param subset 186 | * Subset between the sum type and the selected constructor. 187 | */ 188 | final case class Alt[F[_], I, I0](id: String, base: F[I0], subset: Subset[I, I0]) { 189 | def hfmap[G[_]](nt: F ~> G): Alt[G, I, I0] = Alt(id, nt(base), subset) 190 | } 191 | 192 | /** 193 | * Wrapper for the free applicative structure which is used to construct and disassemble values of product types. 194 | * 195 | * @tparam P 196 | * $PDefn 197 | * @tparam F 198 | * $FDefn 199 | * @tparam I 200 | * $IDefn 201 | * @param props 202 | * the free applicative value composed of zero or more PropSchema instances 203 | */ 204 | final case class RecordSchema[P[_], F[_], I](props: FreeApplicative[PropSchema[I, F, *], I]) extends SchemaF[P, F, I] { 205 | def hfmap[G[_]](nt: F ~> G): RecordSchema[P, G, I] = RecordSchema[P, G, I]( 206 | props.compile[PropSchema[I, G, *]](HFunctor[[Y[_], Z] =>> PropSchema[I, Y, Z]].hlift[F, G](nt)) 207 | ) 208 | def pmap[Q[_]](nt: P ~> Q): RecordSchema[Q, F, I] = RecordSchema[Q, F, I](props) 209 | } 210 | 211 | /** 212 | * Base trait for values which describe record properties. 213 | * 214 | * @define FDefn 215 | * The functor through which the structure of the schema will be interpreted. This will almost always be a fixpoint 216 | * type such as [[morphling.HFix.HCofree]], which is used to introduce the ability to create recursive 217 | * (tree-structured) schema. 218 | * 219 | * @tparam O 220 | * The record type. 221 | * @tparam F 222 | * $FDefn 223 | * @tparam I 224 | * The type of the property value. 225 | */ 226 | sealed trait PropSchema[O, F[_], I] { 227 | def fieldName: String 228 | def extract: Extract[O, I] 229 | 230 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] 231 | } 232 | 233 | /** 234 | * Class describing a required property of a record. 235 | * 236 | * @param fieldName 237 | * The name of the property. 238 | * @param base 239 | * Schema for the property's value type. 240 | * @param extract 241 | * Extract lens from the record type to the property. 242 | * @param default 243 | * Optional default value, for use in the case that a serialized form is missing the property. 244 | */ 245 | final case class Required[O, F[_], I]( 246 | fieldName: String, 247 | base: F[I], 248 | extract: Extract[O, I], 249 | default: Option[I] 250 | ) extends PropSchema[O, F, I] { 251 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] = 252 | Required(fieldName, nt(base), extract, default) 253 | } 254 | 255 | /** 256 | * Class describing an optional property of a record. Since in many serialized forms optional properties may be omitted 257 | * entirely from the serialized form, a distinct type is needed in order to be able to correctly interpret the absence 258 | * of a field. 259 | * 260 | * @param fieldName 261 | * The name of the property. 262 | * @param base 263 | * Schema for the property's value type. 264 | * @param extract 265 | * Extract lens from the record type to the property. 266 | */ 267 | final case class Optional[O, F[_], I]( 268 | fieldName: String, 269 | base: F[I], 270 | extract: Extract[O, Option[I]] 271 | ) extends PropSchema[O, F, Option[I]] { 272 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, Option[I]] = 273 | Optional(fieldName, nt(base), extract) 274 | } 275 | 276 | /** 277 | * Class describing an optional property of a record that is always absent. 278 | * 279 | * @param fieldName 280 | * The name of the property. 281 | * @param extract 282 | * Extract lens from the record type to the property. 283 | */ 284 | final case class Absent[O, F[_], I]( 285 | fieldName: String, 286 | extract: Extract[O, Option[I]] 287 | ) extends PropSchema[O, F, Option[I]] { 288 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, Option[I]] = 289 | Absent(fieldName, extract) 290 | } 291 | 292 | /** 293 | * Class describing a constant (non-serializable) property of a record. 294 | * @param fieldName 295 | * The name of the property. 296 | * @param value 297 | * The value of the property. 298 | * @param extract 299 | * Extract lens from the record type to the property. 300 | */ 301 | final case class Constant[O, F[_], I]( 302 | fieldName: String, 303 | value: I, 304 | extract: Extract[O, I] 305 | ) extends PropSchema[O, F, I] { 306 | override def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] = 307 | this.asInstanceOf[PropSchema[O, G, I]] 308 | } 309 | 310 | object PropSchema { 311 | given [O]: HFunctor[[Y[_], Z] =>> PropSchema[O, Y, Z]] = 312 | new HFunctor[[Y[_], Z] =>> PropSchema[O, Y, Z]] { 313 | def hlift[M[_], N[_]](nt: M ~> N): PropSchema[O, M, _] ~> PropSchema[O, N, _] = 314 | new (PropSchema[O, M, _] ~> PropSchema[O, N, _]) { 315 | def apply[I](ps: PropSchema[O, M, I]): PropSchema[O, N, I] = ps.hfmap(nt) 316 | } 317 | } 318 | 319 | private def extract[A, B](f: A => B): Extract[A, B] = (s: A) => f(s) 320 | 321 | def contraNT[O, N, F[_]](f: N => O): PropSchema[O, F, _] ~> PropSchema[N, F, _] = 322 | new (PropSchema[O, F, _] ~> PropSchema[N, F, _]) { 323 | def apply[I](pso: PropSchema[O, F, I]): PropSchema[N, F, I] = 324 | pso match { 325 | case Required(n, s, g, d) => Required(n, s, extract(f) >> g, d) 326 | case opt: Optional[O, F, i] @unchecked => Optional(opt.fieldName, opt.base, extract(f) >> opt.extract) 327 | case Constant(fn, v, g) => Constant(fn, v, extract(f) >> g) 328 | case abs: Absent[O, F, i] @unchecked => Absent(abs.fieldName, extract(f) >> abs.extract) 329 | } 330 | } 331 | } 332 | 333 | case class IsoSchema[P[_], F[_], I, J](base: F[I], eqv: Equivalent[I, J]) extends SchemaF[P, F, J] { 334 | def hfmap[G[_]](nt: F ~> G): IsoSchema[P, G, I, J] = IsoSchema(nt(base), eqv) 335 | def pmap[Q[_]](nt: P ~> Q): IsoSchema[Q, F, I, J] = IsoSchema(base, eqv) 336 | } 337 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/protocol/SType.scala: -------------------------------------------------------------------------------- 1 | package morphling.protocol 2 | 3 | import cats.~> 4 | import morphling.Schema.* 5 | import morphling.{HFunctor, HMutu} 6 | 7 | sealed trait SType[F[_], I] 8 | 9 | case class SNullT[F[_]]() extends SType[F, Unit] 10 | case class SBoolT[F[_]]() extends SType[F, Boolean] 11 | 12 | case class SIntT[F[_]]() extends SType[F, Int] 13 | case class SLongT[F[_]]() extends SType[F, Long] 14 | 15 | case class SFloatT[F[_]]() extends SType[F, Float] 16 | case class SDoubleT[F[_]]() extends SType[F, Double] 17 | 18 | case class SCharT[F[_]]() extends SType[F, Char] 19 | case class SStrT[F[_]]() extends SType[F, String] 20 | 21 | case class SArrayT[F[_], I](elem: F[I]) extends SType[F, Vector[I]] 22 | 23 | object SType { 24 | type SSchema[I] = HMutu[SType, Schema, I] 25 | 26 | val sNull = prim(HMutu[SType, Schema, Unit](SNullT())) 27 | val sBool = prim(HMutu[SType, Schema, Boolean](SBoolT())) 28 | val sInt = prim(HMutu[SType, Schema, Int](SIntT())) 29 | val sLong = prim(HMutu[SType, Schema, Long](SLongT())) 30 | val sFloat = prim(HMutu[SType, Schema, Float](SFloatT())) 31 | val sDouble = prim(HMutu[SType, Schema, Double](SDoubleT())) 32 | val sChar = prim(HMutu[SType, Schema, Char](SCharT())) 33 | val sStr = prim(HMutu[SType, Schema, String](SStrT())) 34 | 35 | def sArray[I](elem: Schema[SSchema, I]): Schema[SSchema, Vector[I]] = 36 | prim(HMutu[SType, Schema, Vector[I]](SArrayT(elem))) 37 | 38 | implicit val sTypeHFunctor: HFunctor[SType] = 39 | new HFunctor[SType] { 40 | override def hlift[M[_], N[_]](nt: M ~> N): SType[M, *] ~> SType[N, *] = 41 | new (SType[M, *] ~> SType[N, *]) { 42 | override def apply[A](stm: SType[M, A]): SType[N, A] = 43 | stm match { 44 | case SNullT() => SNullT() 45 | case SBoolT() => SBoolT() 46 | case SIntT() => SIntT() 47 | case SLongT() => SLongT() 48 | case SFloatT() => SFloatT() 49 | case SDoubleT() => SDoubleT() 50 | case SCharT() => SCharT() 51 | case SStrT() => SStrT() 52 | case SArrayT(elem) => SArrayT(nt(elem)) 53 | } 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/protocol/annotated/Restriction.scala: -------------------------------------------------------------------------------- 1 | package morphling.protocol.annotated 2 | 3 | import alleycats.EmptyK 4 | 5 | sealed trait Restriction[T] 6 | case class Non[A]() extends Restriction[A] 7 | object Non { 8 | private val nonOfNothing = Non[Nothing]() 9 | 10 | def of[A]: Non[A] = nonOfNothing.asInstanceOf[Non[A]] 11 | } 12 | 13 | case class Range(from: Int, to: Int) extends Restriction[Int] 14 | object Restriction { 15 | implicit val restrictionEmptyK: EmptyK[Restriction] = new EmptyK[Restriction] { 16 | override def empty[A]: Restriction[A] = Non.of[A] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/protocol/annotated/STypeAnn.scala: -------------------------------------------------------------------------------- 1 | package morphling.protocol.annotated 2 | 3 | import alleycats.EmptyK 4 | import morphling.HMutu 5 | import morphling.annotated.Schema.{AnnotatedSchema, prim} 6 | import morphling.protocol.* 7 | 8 | object STypeAnn { 9 | type Schema[P[_], I] = AnnotatedSchema[P, Restriction, I] 10 | 11 | type ASchema[I] = HMutu[SType, Schema, I] 12 | 13 | private def non[T]: Restriction[T] = EmptyK[Restriction].empty[T] 14 | 15 | def sNull(ann: Restriction[Unit] = non): Schema[ASchema, Unit] = 16 | prim(HMutu[SType, Schema, Unit](SNullT()), ann) 17 | def sBool(ann: Restriction[Boolean] = non): Schema[ASchema, Boolean] = 18 | prim(HMutu[SType, Schema, Boolean](SBoolT()), ann) 19 | def sInt(ann: Restriction[Int] = non): Schema[ASchema, Int] = 20 | prim(HMutu[SType, Schema, Int](SIntT()), ann) 21 | def sLong(ann: Restriction[Long] = non): Schema[ASchema, Long] = 22 | prim(HMutu[SType, Schema, Long](SLongT()), ann) 23 | def sFloat(ann: Restriction[Float] = non): Schema[ASchema, Float] = 24 | prim(HMutu[SType, Schema, Float](SFloatT()), ann) 25 | def sDouble(ann: Restriction[Double] = non): Schema[ASchema, Double] = 26 | prim(HMutu[SType, Schema, Double](SDoubleT()), ann) 27 | def sChar(ann: Restriction[Char] = non): Schema[ASchema, Char] = 28 | prim(HMutu[SType, Schema, Char](SCharT()), ann) 29 | def sStr(ann: Restriction[String] = non): Schema[ASchema, String] = 30 | prim(HMutu[SType, Schema, String](SStrT()), ann) 31 | 32 | def sArray[I](elem: Schema[ASchema, I], ann: Restriction[Vector[I]] = non[Vector[I]]): Schema[ASchema, Vector[I]] = 33 | prim(HMutu[SType, Schema, Vector[I]](SArrayT(elem)), ann) 34 | } 35 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/samples/Deannotator.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples 2 | 3 | import cats.* 4 | import morphling.HFix 5 | import morphling.Schema.Schema 6 | import morphling.annotated.Schema.AnnotatedSchema 7 | import morphling.protocol.SType.SSchema 8 | import morphling.protocol.annotated.Restriction 9 | import morphling.protocol.annotated.STypeAnn.ASchema 10 | 11 | object Deannotator extends (AnnotatedSchema[ASchema, Restriction, *] ~> Schema[SSchema, *]) { 12 | override def apply[T](as: AnnotatedSchema[ASchema, Restriction, T]): Schema[SSchema, T] = 13 | HFix.hfix( 14 | as.unfix.value.fa.pmap[SSchema](λ[ASchema ~> SSchema](_.transformInner[Schema](Deannotator))).hfmap(Deannotator) 15 | ) 16 | } 17 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/samples/Person.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples 2 | 3 | import cats.syntax.apply.* 4 | import glass.* 5 | import glass.macros.* 6 | import morphling.Schema.* 7 | import morphling.protocol.SType.* 8 | import morphling.samples.annotated.AnnPerson 9 | 10 | import java.time.Instant 11 | 12 | @Optics 13 | case class Person( 14 | name: String, 15 | birthDate: Instant, 16 | roles: Vector[Role], 17 | updateCounter: Int, 18 | stamp: Int, 19 | ignored: Option[Any] 20 | ) 21 | 22 | object Person { 23 | private val instantIso = Equivalent[Long](Instant.ofEpochMilli _)(_.toEpochMilli) 24 | 25 | val schema: Schema[SSchema, Person] = rec( 26 | ( 27 | required("name", sStr, Person.name), 28 | required( 29 | "birthDate", 30 | sLong.composeIso(instantIso), 31 | Person.birthDate 32 | ), 33 | required("roles", sArray(Role.schema), Person.roles), 34 | property("updateCounter", sInt, 0, Person.updateCounter), 35 | constant[SSchema]("stamp", 101, Person.stamp), 36 | absent[SSchema]("ignored", Person.ignored) 37 | ).mapN(Person.apply) 38 | ) 39 | 40 | val flatSchema: Schema[SSchema, Person] = rec( 41 | ( 42 | required("name", sStr, Person.name), 43 | required( 44 | "birthDate", 45 | sLong.composeIso(instantIso), 46 | Person.birthDate 47 | ), 48 | required("roles", sArray(Role.flatSchema), Person.roles), 49 | property("updateCounter", sInt, 0, Person.updateCounter), 50 | constant[SSchema]("stamp", 101, Person.stamp), 51 | absent[SSchema]("ignored", Person.ignored) 52 | ).mapN(Person.apply) 53 | ) 54 | 55 | lazy val deannotatedSchema: Schema[SSchema, Person] = 56 | Deannotator(AnnPerson.schema) 57 | } 58 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/samples/Role.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples 2 | 3 | import cats.syntax.apply.* 4 | import glass.macros.* 5 | import morphling.Schema 6 | import morphling.Schema.* 7 | import morphling.protocol.SType.* 8 | import shapeless.HNil 9 | 10 | sealed trait Role 11 | 12 | object Role { 13 | val schema: Schema[SSchema, Role] = Schema.oneOf( 14 | alt[SSchema, Role, User.type]( 15 | "user", 16 | Schema.const(User), 17 | User.prism 18 | ) :: 19 | alt[SSchema, Role, Administrator]( 20 | "administrator", 21 | rec( 22 | ( 23 | required("department", sStr, Administrator.department), 24 | required("subordinateCount", sInt, Administrator.subordinateCount) 25 | ).mapN(Administrator.apply) 26 | ), 27 | Administrator.prism 28 | ) :: HNil 29 | ) 30 | 31 | val flatSchema: Schema[SSchema, Role] = Schema.oneOfDiscr("type")( 32 | alt[SSchema, Role, User.type]( 33 | "user", 34 | Schema.const(User), 35 | User.prism 36 | ) :: 37 | alt[SSchema, Role, Administrator]( 38 | "administrator", 39 | rec( 40 | ( 41 | required("department", sStr, Administrator.department), 42 | required("subordinateCount", sInt, Administrator.subordinateCount) 43 | ).mapN(Administrator.apply) 44 | ), 45 | Administrator.prism 46 | ) :: HNil 47 | ) 48 | } 49 | 50 | case object User extends Role { 51 | val prism = GenSubset[Role, User.type] 52 | } 53 | 54 | @Optics 55 | case class Administrator(department: String, subordinateCount: Int) extends Role 56 | object Administrator { 57 | val prism = GenSubset[Role, Administrator] 58 | } 59 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/samples/annotated/AnnPerson.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples.annotated 2 | 3 | import cats.syntax.apply.* 4 | import glass.Equivalent 5 | import morphling.annotated.Schema.* 6 | import morphling.protocol.annotated.Restriction 7 | import morphling.protocol.annotated.STypeAnn.* 8 | import morphling.samples.Person 9 | 10 | import java.time.Instant 11 | 12 | object AnnPerson { 13 | private val instantIso = Equivalent[Long](Instant.ofEpochMilli _)(_.toEpochMilli) 14 | 15 | val schema: Schema[ASchema, Person] = rec( 16 | ( 17 | required("name", sStr(), Person.name), 18 | required( 19 | "birthDate", 20 | sLong().composeIso(instantIso, _.asInstanceOf[Restriction[Instant]]), 21 | Person.birthDate 22 | ), 23 | required("roles", sArray(AnnRole.schema), Person.roles), 24 | property("updateCounter", sInt(), 0, Person.updateCounter), 25 | constant[ASchema, Restriction]("stamp", 101, Person.stamp), 26 | absent[ASchema, Restriction]("ignored", Person.ignored) 27 | ).mapN(Person.apply) 28 | ) 29 | } 30 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/samples/annotated/AnnRole.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples.annotated 2 | 3 | import cats.syntax.apply.* 4 | import morphling.annotated.Schema 5 | import morphling.annotated.Schema.* 6 | import morphling.protocol.annotated.Restriction 7 | import morphling.protocol.annotated.STypeAnn.* 8 | import morphling.samples.{Administrator, Role, User} 9 | import shapeless.HNil 10 | 11 | object AnnRole { 12 | val schema: Schema[ASchema, Role] = Schema.oneOf( 13 | alt[ASchema, Restriction, Role, User.type]( 14 | "user", 15 | Schema.const(User), 16 | User.prism 17 | ) :: 18 | alt[ASchema, Restriction, Role, Administrator]( 19 | "administrator", 20 | rec( 21 | ( 22 | required("department", sStr(), Administrator.department), 23 | required("subordinateCount", sInt(), Administrator.subordinateCount) 24 | ).mapN(Administrator.apply) 25 | ), 26 | Administrator.prism 27 | ) :: HNil 28 | ) 29 | } 30 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/samples/annotated/Server.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples.annotated 2 | 3 | import cats.syntax.apply.* 4 | import glass.macros.* 5 | import morphling.annotated.Schema.* 6 | import morphling.protocol.annotated.Range 7 | import morphling.protocol.annotated.STypeAnn.* 8 | 9 | @Optics 10 | case class Server(host: String, port: Int) 11 | object Server { 12 | val schema: Schema[ASchema, Server] = rec( 13 | ( 14 | required("host", sStr(), host), 15 | required("port", sInt(Range(1, 65535)), port) 16 | ).mapN(Server.apply) 17 | ) 18 | } 19 | -------------------------------------------------------------------------------- /core/src/test/scala-2/morphling/samples/package.scala: -------------------------------------------------------------------------------- 1 | package morphling 2 | 3 | import java.time.Instant 4 | 5 | package object samples { 6 | val person = Person( 7 | "Kris Nuttycombe", 8 | Instant.ofEpochMilli(20147028000L), 9 | Vector(Administrator("windmill-tilting", 0)), 10 | 42, 11 | 42, 12 | None 13 | ) 14 | } 15 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/protocol/SType.scala: -------------------------------------------------------------------------------- 1 | package morphling.protocol 2 | 3 | import cats.~> 4 | import morphling.Schema.* 5 | import morphling.{HFunctor, HMutu} 6 | 7 | sealed trait SType[F[_], I] 8 | 9 | case class SNullT[F[_]]() extends SType[F, Unit] 10 | case class SBoolT[F[_]]() extends SType[F, Boolean] 11 | 12 | case class SIntT[F[_]]() extends SType[F, Int] 13 | case class SLongT[F[_]]() extends SType[F, Long] 14 | 15 | case class SFloatT[F[_]]() extends SType[F, Float] 16 | case class SDoubleT[F[_]]() extends SType[F, Double] 17 | 18 | case class SCharT[F[_]]() extends SType[F, Char] 19 | case class SStrT[F[_]]() extends SType[F, String] 20 | 21 | case class SArrayT[F[_], I](elem: F[I]) extends SType[F, Vector[I]] 22 | 23 | object SType { 24 | type SSchema[I] = HMutu[SType, Schema, I] 25 | 26 | val sNull = prim(HMutu[SType, Schema, Unit](SNullT())) 27 | val sBool = prim(HMutu[SType, Schema, Boolean](SBoolT())) 28 | val sInt = prim(HMutu[SType, Schema, Int](SIntT())) 29 | val sLong = prim(HMutu[SType, Schema, Long](SLongT())) 30 | val sFloat = prim(HMutu[SType, Schema, Float](SFloatT())) 31 | val sDouble = prim(HMutu[SType, Schema, Double](SDoubleT())) 32 | val sChar = prim(HMutu[SType, Schema, Char](SCharT())) 33 | val sStr = prim(HMutu[SType, Schema, String](SStrT())) 34 | 35 | def sArray[I](elem: Schema[SSchema, I]): Schema[SSchema, Vector[I]] = 36 | prim(HMutu[SType, Schema, Vector[I]](SArrayT(elem))) 37 | 38 | given HFunctor[SType] = 39 | new HFunctor[SType] { 40 | override def hlift[M[_], N[_]](nt: M ~> N): SType[M, _] ~> SType[N, _] = 41 | new (SType[M, _] ~> SType[N, _]) { 42 | override def apply[A](stm: SType[M, A]): SType[N, A] = 43 | stm match { 44 | case SNullT() => SNullT() 45 | case SBoolT() => SBoolT() 46 | case SIntT() => SIntT() 47 | case SLongT() => SLongT() 48 | case SFloatT() => SFloatT() 49 | case SDoubleT() => SDoubleT() 50 | case SCharT() => SCharT() 51 | case SStrT() => SStrT() 52 | case SArrayT(elem) => SArrayT(nt(elem)) 53 | } 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/protocol/annotated/Restriction.scala: -------------------------------------------------------------------------------- 1 | package morphling.protocol.annotated 2 | 3 | import alleycats.EmptyK 4 | 5 | sealed trait Restriction[T] 6 | case class Non[A]() extends Restriction[A] 7 | object Non { 8 | private val nonOfNothing = Non[Nothing]() 9 | 10 | def of[A]: Non[A] = nonOfNothing.asInstanceOf[Non[A]] 11 | } 12 | 13 | case class Range(from: Int, to: Int) extends Restriction[Int] 14 | object Restriction { 15 | given EmptyK[Restriction] = new EmptyK[Restriction] { 16 | override def empty[A]: Restriction[A] = Non.of[A] 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/protocol/annotated/STypeAnn.scala: -------------------------------------------------------------------------------- 1 | package morphling.protocol.annotated 2 | 3 | import alleycats.EmptyK 4 | import morphling.HMutu 5 | import morphling.annotated.Schema.{AnnotatedSchema, prim} 6 | import morphling.protocol.* 7 | 8 | object STypeAnn { 9 | type Schema[P[_], I] = AnnotatedSchema[P, Restriction, I] 10 | 11 | type ASchema[I] = HMutu[SType, Schema, I] 12 | 13 | private def non[T]: Restriction[T] = EmptyK[Restriction].empty[T] 14 | 15 | def sNull(ann: Restriction[Unit] = non): Schema[ASchema, Unit] = 16 | prim(HMutu[SType, Schema, Unit](SNullT()), ann) 17 | def sBool(ann: Restriction[Boolean] = non): Schema[ASchema, Boolean] = 18 | prim(HMutu[SType, Schema, Boolean](SBoolT()), ann) 19 | def sInt(ann: Restriction[Int] = non): Schema[ASchema, Int] = 20 | prim(HMutu[SType, Schema, Int](SIntT()), ann) 21 | def sLong(ann: Restriction[Long] = non): Schema[ASchema, Long] = 22 | prim(HMutu[SType, Schema, Long](SLongT()), ann) 23 | def sFloat(ann: Restriction[Float] = non): Schema[ASchema, Float] = 24 | prim(HMutu[SType, Schema, Float](SFloatT()), ann) 25 | def sDouble(ann: Restriction[Double] = non): Schema[ASchema, Double] = 26 | prim(HMutu[SType, Schema, Double](SDoubleT()), ann) 27 | def sChar(ann: Restriction[Char] = non): Schema[ASchema, Char] = 28 | prim(HMutu[SType, Schema, Char](SCharT()), ann) 29 | def sStr(ann: Restriction[String] = non): Schema[ASchema, String] = 30 | prim(HMutu[SType, Schema, String](SStrT()), ann) 31 | 32 | def sArray[I](elem: Schema[ASchema, I], ann: Restriction[Vector[I]] = non[Vector[I]]): Schema[ASchema, Vector[I]] = 33 | prim(HMutu[SType, Schema, Vector[I]](SArrayT(elem)), ann) 34 | } 35 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/samples/Deannotator.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples 2 | 3 | import cats.* 4 | import cats.arrow.* 5 | import morphling.HFix 6 | import morphling.Schema.Schema 7 | import morphling.annotated.Schema.AnnotatedSchema 8 | import morphling.protocol.SType.SSchema 9 | import morphling.protocol.annotated.Restriction 10 | import morphling.protocol.annotated.STypeAnn.ASchema 11 | 12 | object Deannotator extends (AnnotatedSchema[ASchema, Restriction, _] ~> Schema[SSchema, _]) { 13 | override def apply[T](as: AnnotatedSchema[ASchema, Restriction, T]): Schema[SSchema, T] = 14 | HFix.hfix( 15 | as.unfix.value.fa 16 | .pmap[SSchema]( 17 | FunctionK.lift[ASchema, SSchema]([T] => (_: ASchema[T]).transformInner[Schema](Deannotator)) 18 | ) 19 | .hfmap(Deannotator) 20 | ) 21 | } 22 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/samples/Person.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples 2 | 3 | import cats.syntax.apply.* 4 | import glass.* 5 | import glass.macros.* 6 | import morphling.Schema.* 7 | import morphling.protocol.SType.* 8 | import morphling.samples.annotated.AnnPerson 9 | 10 | import java.time.Instant 11 | 12 | case class Person( 13 | name: String, 14 | birthDate: Instant, 15 | roles: Vector[Role], 16 | updateCounter: Int, 17 | stamp: Int, 18 | ignored: Option[Any] 19 | ) 20 | 21 | object Person extends DeriveContains { 22 | private val instantIso = Equivalent[Long](Instant.ofEpochMilli(_))(_.toEpochMilli) 23 | 24 | val schema: Schema[SSchema, Person] = rec( 25 | ( 26 | required("name", sStr, Person.name), 27 | required( 28 | "birthDate", 29 | sLong.composeIso(instantIso), 30 | Person.birthDate 31 | ), 32 | required("roles", sArray(Role.schema), Person.roles), 33 | property("updateCounter", sInt, 0, Person.updateCounter), 34 | constant[SSchema]("stamp", 101, Person.stamp), 35 | absent[SSchema]("ignored", Person.ignored) 36 | ).mapN(Person.apply) 37 | ) 38 | 39 | val flatSchema: Schema[SSchema, Person] = rec( 40 | ( 41 | required("name", sStr, Person.name), 42 | required( 43 | "birthDate", 44 | sLong.composeIso(instantIso), 45 | Person.birthDate 46 | ), 47 | required("roles", sArray(Role.flatSchema), Person.roles), 48 | property("updateCounter", sInt, 0, Person.updateCounter), 49 | constant[SSchema]("stamp", 101, Person.stamp), 50 | absent[SSchema]("ignored", Person.ignored) 51 | ).mapN(Person.apply) 52 | ) 53 | 54 | lazy val deannotatedSchema: Schema[SSchema, Person] = 55 | Deannotator(AnnPerson.schema) 56 | } 57 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/samples/Role.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples 2 | 3 | import cats.data.NonEmptyList 4 | import cats.syntax.apply.* 5 | import glass.macros.* 6 | import morphling.Schema 7 | import morphling.Schema.* 8 | import morphling.protocol.SType.* 9 | 10 | sealed trait Role 11 | 12 | object Role { 13 | val schema: Schema[SSchema, Role] = Schema.unsafeOneOf( 14 | NonEmptyList.of( 15 | alt[SSchema, Role, User.type]( 16 | "user", 17 | Schema.const(User), 18 | User.prism 19 | ), 20 | alt[SSchema, Role, Administrator]( 21 | "administrator", 22 | rec( 23 | ( 24 | required("department", sStr, Administrator.department), 25 | required("subordinateCount", sInt, Administrator.subordinateCount) 26 | ).mapN(Administrator.apply) 27 | ), 28 | Administrator.prism 29 | ) 30 | ) 31 | ) 32 | 33 | val flatSchema: Schema[SSchema, Role] = Schema.unsafeOneOfDiscr("type")( 34 | NonEmptyList.of( 35 | alt[SSchema, Role, User.type]( 36 | "user", 37 | Schema.const(User), 38 | User.prism 39 | ), 40 | alt[SSchema, Role, Administrator]( 41 | "administrator", 42 | rec( 43 | ( 44 | required("department", sStr, Administrator.department), 45 | required("subordinateCount", sInt, Administrator.subordinateCount) 46 | ).mapN(Administrator.apply) 47 | ), 48 | Administrator.prism 49 | ) 50 | ) 51 | ) 52 | } 53 | 54 | case object User extends Role { 55 | val prism = GenSubset[Role, User.type] 56 | } 57 | 58 | case class Administrator(department: String, subordinateCount: Int) extends Role 59 | object Administrator extends DeriveContains { 60 | val prism = GenSubset[Role, Administrator] 61 | } 62 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/samples/Samples.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples 2 | 3 | import java.time.Instant 4 | 5 | val person = Person( 6 | "Kris Nuttycombe", 7 | Instant.ofEpochMilli(20147028000L), 8 | Vector(Administrator("windmill-tilting", 0)), 9 | 42, 10 | 42, 11 | None 12 | ) 13 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/samples/annotated/AnnPerson.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples.annotated 2 | 3 | import cats.syntax.apply.* 4 | import glass.Equivalent 5 | import morphling.annotated.Schema.* 6 | import morphling.protocol.annotated.Restriction 7 | import morphling.protocol.annotated.STypeAnn.* 8 | import morphling.samples.Person 9 | 10 | import java.time.Instant 11 | 12 | object AnnPerson { 13 | private val instantIso = Equivalent[Long](Instant.ofEpochMilli(_))(_.toEpochMilli) 14 | 15 | val schema: Schema[ASchema, Person] = rec( 16 | ( 17 | required("name", sStr(), Person.name), 18 | required( 19 | "birthDate", 20 | sLong().composeIso(instantIso, _.asInstanceOf[Restriction[Instant]]), 21 | Person.birthDate 22 | ), 23 | required("roles", sArray(AnnRole.schema), Person.roles), 24 | property("updateCounter", sInt(), 0, Person.updateCounter), 25 | constant[ASchema, Restriction]("stamp", 101, Person.stamp), 26 | absent[ASchema, Restriction]("ignored", Person.ignored) 27 | ).mapN(Person.apply) 28 | ) 29 | } 30 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/samples/annotated/AnnRole.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples.annotated 2 | 3 | import cats.data.NonEmptyList 4 | import cats.syntax.apply.* 5 | import morphling.annotated.Schema 6 | import morphling.annotated.Schema.* 7 | import morphling.protocol.annotated.Restriction 8 | import morphling.protocol.annotated.STypeAnn.* 9 | import morphling.samples.{Administrator, Role, User} 10 | 11 | object AnnRole { 12 | val schema: Schema[ASchema, Role] = Schema.unsafeOneOf( 13 | NonEmptyList.of( 14 | alt[ASchema, Restriction, Role, User.type]( 15 | "user", 16 | Schema.const(User), 17 | User.prism 18 | ), 19 | alt[ASchema, Restriction, Role, Administrator]( 20 | "administrator", 21 | rec( 22 | ( 23 | required("department", sStr(), Administrator.department), 24 | required("subordinateCount", sInt(), Administrator.subordinateCount) 25 | ).mapN(Administrator.apply) 26 | ), 27 | Administrator.prism 28 | ) 29 | ) 30 | ) 31 | } 32 | -------------------------------------------------------------------------------- /core/src/test/scala-3/morphling/samples/annotated/Server.scala: -------------------------------------------------------------------------------- 1 | package morphling.samples.annotated 2 | 3 | import cats.syntax.apply.* 4 | import glass.macros.* 5 | import morphling.annotated.Schema.* 6 | import morphling.protocol.annotated.Range 7 | import morphling.protocol.annotated.STypeAnn.* 8 | 9 | case class Server(host: String, port: Int) 10 | object Server extends DeriveContains { 11 | val schema: Schema[ASchema, Server] = rec( 12 | ( 13 | required("host", sStr(), this.host), 14 | required("port", sInt(Range(1, 65535)), this.port) 15 | ).mapN(Server.apply) 16 | ) 17 | } 18 | -------------------------------------------------------------------------------- /project/Settings.scala: -------------------------------------------------------------------------------- 1 | import scalafix.sbt.ScalafixPlugin.autoImport._ 2 | 3 | import sbt.Keys._ 4 | import sbt._ 5 | import sbtprojectmatrix.ProjectMatrixKeys._ 6 | 7 | object Settings { 8 | val common = Seq( 9 | organization := "com.github.danslapman", 10 | organizationName := "danslapman", 11 | organizationHomepage := Some(url("https://github.com/danslapman")), 12 | scalacOptions ++= { 13 | (CrossVersion.partialVersion(scalaVersion.value): @unchecked) match { 14 | case Some((2, 12)) => 15 | Seq( 16 | "-Ywarn-unused-import", 17 | "-language:higherKinds,implicitConversions", 18 | "-deprecation", 19 | "-Ypartial-unification", 20 | "-Xsource:3", 21 | "-P:kind-projector:underscore-placeholders", 22 | "-Yrangepos" 23 | ) 24 | case Some((2, 13)) => 25 | Seq( 26 | "-Wunused:imports", 27 | "-language:higherKinds,implicitConversions", 28 | "-deprecation", 29 | "-Ymacro-annotations", 30 | "-Xsource:3", 31 | "-P:kind-projector:underscore-placeholders", 32 | "-Yrangepos" 33 | ) 34 | case Some((3, _)) => 35 | Seq( 36 | "-Ykind-projector:underscores", 37 | "-source:future" 38 | ) 39 | } 40 | }, 41 | semanticdbEnabled := true, 42 | semanticdbVersion := scalafixSemanticdb.revision, 43 | scalafixConfig := { 44 | (CrossVersion.partialVersion(scalaVersion.value): @unchecked) match { 45 | case Some((2, _)) => scalafixConfig.value 46 | case Some((3, _)) => Some(projectMatrixBaseDirectory.value.getParentFile / ".scalafix3.conf") 47 | } 48 | }, 49 | libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { 50 | case Some((2, y)) if y < 13 => 51 | Seq(compilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full)) 52 | case _ => Seq.empty[ModuleID] 53 | }), 54 | libraryDependencies ++= { 55 | CrossVersion.partialVersion(scalaVersion.value) match { 56 | case Some((2, _)) => 57 | Seq( 58 | compilerPlugin("org.typelevel" %% "kind-projector" % "0.13.3" cross CrossVersion.full), 59 | compilerPlugin(scalafixSemanticdb), 60 | compilerPlugin("com.olegpy" %% "better-monadic-for" % "0.3.1") 61 | ) 62 | case _ => Seq.empty[ModuleID] 63 | } 64 | }, 65 | scmInfo := Some( 66 | ScmInfo( 67 | url("https://github.com/danslapman/morphling"), 68 | "scm:git@github.com:danslapman/morphling.git" 69 | ) 70 | ), 71 | developers := List( 72 | Developer( 73 | id = "danslapman", 74 | name = "Daniil Smirnov", 75 | email = "danslapman@gmail.com", 76 | url = url("https://github.com/danslapman") 77 | ) 78 | ), 79 | licenses += ("WTFPL", url("http://www.wtfpl.net")), 80 | homepage := Some(url("https://github.com/danslapman/morphling")) 81 | ) 82 | } 83 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version = 1.9.9 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.13.0") 2 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") 3 | addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.10.0") 4 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.6.1") -------------------------------------------------------------------------------- /reactivemongo/src/main/scala-2/morphling/reactivemongo/FromBson.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | import cats.* 4 | import cats.data.EitherK 5 | import cats.free.* 6 | import morphling.Schema.Schema 7 | import morphling.annotated.Schema.AnnotatedSchema 8 | import morphling.{Absent, Alt, Constant, HAlgebra, HEnvT, HFix, IsoSchema, OneOfSchema, Optional, PrimSchema, PropSchema, RecordSchema, Required, SchemaF} 9 | import mouse.boolean.* 10 | import mouse.option.* 11 | import reactivemongo.api.bson.* 12 | import simulacrum_.typeclass 13 | 14 | @typeclass 15 | trait FromBson[S[_]] { 16 | def reader: S ~> BSONReader 17 | } 18 | 19 | object FromBson { 20 | implicit class FromBsonOps[F[_], A](fa: F[A]) { 21 | def reader(implicit FB: FromBson[F]): BSONReader[A] = FB.reader(fa) 22 | } 23 | 24 | implicit def schemaFromBson[P[_]: FromBson]: FromBson[Schema[P, *]] = new FromBson[Schema[P, *]] { 25 | override val reader: Schema[P, *] ~> BSONReader = new (Schema[P, *] ~> BSONReader) { 26 | override def apply[I](schema: Schema[P, I]): BSONReader[I] = 27 | HFix.cataNT[SchemaF[P, *[_], *], BSONReader](decoderAlg[P]).apply(schema) 28 | } 29 | } 30 | 31 | implicit def annSchemaFromBson[P[_]: FromBson, A[_]: *[_] ~> λ[T => Endo[BSONReader[T]]]] 32 | : FromBson[AnnotatedSchema[P, A, *]] = 33 | new FromBson[AnnotatedSchema[P, A, *]] { 34 | override val reader: AnnotatedSchema[P, A, *] ~> BSONReader = new (AnnotatedSchema[P, A, *] ~> BSONReader) { 35 | override def apply[I](schema: AnnotatedSchema[P, A, I]): BSONReader[I] = 36 | HFix.cataNT[HEnvT[A, SchemaF[P, *[_], *], *[_], *], BSONReader](annDecoderAlg[P, A]).apply(schema) 37 | } 38 | } 39 | 40 | def decoderAlg[P[_]: FromBson]: HAlgebra[SchemaF[P, *[_], *], BSONReader] = 41 | new HAlgebra[SchemaF[P, *[_], *], BSONReader] { 42 | def apply[I](s: SchemaF[P, BSONReader, I]): BSONReader[I] = s match { 43 | case PrimSchema(p) => 44 | FromBson[P].reader(p) 45 | 46 | case OneOfSchema(alts, None) => 47 | BSONDocumentReader[I] { doc => 48 | val results = for { 49 | fields <- doc.elements.map(_.name).toList 50 | altResult <- alts.toList flatMap { case Alt(id, base, prism) => 51 | fields 52 | .contains(id) 53 | .option( 54 | doc.getAsOpt(id)(base).map(prism.upcast) 55 | ) 56 | .toList 57 | } 58 | } yield altResult 59 | 60 | val altIds = alts.map(_.id) 61 | results match { 62 | case Some(x) :: Nil => x 63 | case None :: Nil => throw TypeDoesNotMatch(s"Could not deserialize ${alts.head.id}") 64 | case Nil => throw DocumentKeyNotFound(s"No fields found matching any of $altIds") 65 | case _ => throw MultipleKeysFound(s"More than one matching field found among $altIds}") 66 | } 67 | } 68 | 69 | case OneOfSchema(alts, Some(discriminatorField)) => 70 | BSONDocumentReader.from[I] { doc => 71 | for { 72 | altId <- doc.getAsTry[String](discriminatorField) 73 | Alt(_, base, prism) <- alts 74 | .find(_.id == altId) 75 | .toTry(DocumentKeyNotFound(s"No '$discriminatorField' case of value '$altId'")) 76 | altResult <- doc.asTry(base).map(prism.upcast) 77 | } yield altResult 78 | } 79 | 80 | case RecordSchema(rb) => 81 | decodeObj(rb) 82 | 83 | case IsoSchema(base, iso) => 84 | base.afterRead(iso.get) 85 | } 86 | } 87 | 88 | def annDecoderAlg[P[_]: FromBson, Ann[_]](implicit 89 | interpret: Ann ~> λ[T => Endo[BSONReader[T]]] 90 | ): HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], BSONReader] = 91 | new HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], BSONReader] { 92 | override def apply[I](s: HEnvT[Ann, SchemaF[P, *[_], *], BSONReader, I]): BSONReader[I] = 93 | interpret(s.ask).apply(decoderAlg[P].apply(s.fa)) 94 | } 95 | 96 | def decodeObj[I](rb: FreeApplicative[PropSchema[I, BSONReader, *], I]): BSONReader[I] = { 97 | implicit val djap: Applicative[BSONReader[*]] = new Applicative[BSONReader] { 98 | override def pure[T](a: T): BSONReader[T] = BSONReader[T](_ => a) 99 | 100 | override def ap[T, U](ff: BSONReader[T => U])(fa: BSONReader[T]): BSONReader[U] = 101 | (v: BSONValue) => ff.readTry(v).flatMap(fa.readTry(v).map(_)) 102 | } 103 | 104 | rb.foldMap( 105 | new (PropSchema[I, BSONReader, *] ~> BSONReader) { 106 | def apply[B](ps: PropSchema[I, BSONReader, B]): BSONReader[B] = ps match { 107 | case Required(field, base, _, None) => 108 | BSONDocumentReader[B](doc => doc.getAsOpt[B](field)(base).getOrElse(throw DocumentKeyNotFound(field))) 109 | 110 | case Required(field, base, _, Some(default)) => 111 | BSONDocumentReader[B](doc => doc.getAsOpt[B](field)(base).getOrElse(default)) 112 | 113 | case opt: Optional[I, BSONReader, i] => 114 | BSONDocumentReader[B](doc => doc.getAsOpt[i](opt.fieldName)(opt.base)) 115 | 116 | case Constant(_, value, _) => 117 | BSONReader[B](_ => value) 118 | 119 | case abs: Absent[I, BSONReader, i] => 120 | BSONReader(_ => Option.empty[i]) 121 | } 122 | } 123 | ) 124 | } 125 | 126 | implicit def eitherKFromBson[P[_]: FromBson, Q[_]: FromBson]: FromBson[EitherK[P, Q, *]] = 127 | new FromBson[EitherK[P, Q, *]] { 128 | override val reader = new (EitherK[P, Q, *] ~> BSONReader) { 129 | def apply[A](p: EitherK[P, Q, A]): BSONReader[A] = 130 | p.run.fold( 131 | FromBson[P].reader(_), 132 | FromBson[Q].reader(_), 133 | ) 134 | } 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /reactivemongo/src/main/scala-2/morphling/reactivemongo/ToBson.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | import cats.* 4 | import cats.data.State.* 5 | import cats.data.{EitherK, State} 6 | import cats.free.* 7 | import morphling.Schema.* 8 | import morphling.annotated.Schema.AnnotatedSchema 9 | import morphling.{Absent, Alt, Constant, HAlgebra, HFix, IsoSchema, OneOfSchema, Optional, PrimSchema, PropSchema, RecordSchema, Required, SchemaF} 10 | import mouse.option.* 11 | import reactivemongo.api.bson.* 12 | import simulacrum_.typeclass 13 | 14 | import scala.util.{Success, Try} 15 | 16 | @typeclass 17 | trait ToBson[S[_]] { 18 | def writer: S ~> BSONWriter 19 | } 20 | 21 | object ToBson { 22 | implicit class ToBsonOps[F[_], A](fa: F[A]) { 23 | def writer(implicit TB: ToBson[F]): BSONWriter[A] = TB.writer(fa) 24 | } 25 | 26 | implicit def schemaToBson[P[_]: ToBson]: ToBson[Schema[P, *]] = new ToBson[Schema[P, *]] { 27 | override val writer: Schema[P, *] ~> BSONWriter = new (Schema[P, *] ~> BSONWriter) { 28 | override def apply[I](schema: Schema[P, I]): BSONWriter[I] = 29 | HFix.cataNT[SchemaF[P, *[_], *], BSONWriter](serializeAlg).apply(schema) 30 | } 31 | } 32 | 33 | implicit def annSchemaToBson[P[_]: ToBson, A[_]]: ToBson[AnnotatedSchema[P, A, *]] = 34 | new ToBson[AnnotatedSchema[P, A, *]] { 35 | override val writer: AnnotatedSchema[P, A, *] ~> BSONWriter = new (AnnotatedSchema[P, A, *] ~> BSONWriter) { 36 | override def apply[I](schema: AnnotatedSchema[P, A, I]): BSONWriter[I] = 37 | HFix 38 | .cataNT[SchemaF[P, *[_], *], BSONWriter](serializeAlg) 39 | .apply( 40 | HFix.forget[SchemaF[P, *[_], *], A].apply(schema) 41 | ) 42 | } 43 | } 44 | 45 | def serializeAlg[P[_]: ToBson]: HAlgebra[SchemaF[P, *[_], *], BSONWriter] = 46 | new HAlgebra[SchemaF[P, *[_], *], BSONWriter] { 47 | def apply[I](schema: SchemaF[P, BSONWriter, I]): BSONWriter[I] = 48 | schema match { 49 | case s: PrimSchema[P, BSONWriter, I] => 50 | ToBson[P].writer(s.prim) 51 | 52 | case s: OneOfSchema[P, BSONWriter, I] => 53 | (value: I) => 54 | s.discriminator.cata( 55 | dField => 56 | s.alts 57 | .map { case alt: Alt[BSONWriter, I, i] => 58 | alt.subset 59 | .getOption(value) 60 | .map(v => 61 | alt.base.writeTry(v).map { 62 | case BSONDocument(elems) => 63 | BSONDocument((BSONElement(dField, BSONString(alt.id)) +: elems): _*) 64 | case other => other 65 | } 66 | ) 67 | } 68 | .collect { case Some(doc) => doc } 69 | .head, 70 | Success( 71 | s.alts 72 | .map { case alt: Alt[BSONWriter, I, i] => 73 | alt.subset.getOption(value).flatMap(alt.base.writeOpt(_)).map(bson => document(alt.id -> bson)) 74 | } 75 | .collect { case Some(bson) => bson } 76 | .head 77 | ) 78 | ) 79 | 80 | case s: RecordSchema[P, BSONWriter, I] => 81 | serializeObjF[P, I](s.props).asInstanceOf[BSONWriter[I]] 82 | 83 | case s: IsoSchema[P, BSONWriter, i0, I] => 84 | s.base.beforeWrite(s.eqv.upcast(_)) 85 | } 86 | } 87 | 88 | def serializeObjF[P[_]: ToBson, I](rb: FreeApplicative[PropSchema[I, BSONWriter, *], I]): BSONDocumentWriter[I] = { 89 | (value: I) => 90 | rb.foldMap[State[Try[BSONDocument], *]]( 91 | new (PropSchema[I, BSONWriter, *] ~> State[Try[BSONDocument], *]) { 92 | def apply[B](ps: PropSchema[I, BSONWriter, B]): State[Try[BSONDocument], B] = 93 | for { 94 | _ <- modify { (tryDoc: Try[BSONDocument]) => 95 | tryDoc.flatMap { (doc: BSONDocument) => 96 | ps match { 97 | case req: Required[I, BSONWriter, i] => 98 | req.base.writeTry(req.extract.extract(value)).map(doc ++ BSONElement(req.fieldName, _)) 99 | 100 | case opt: Optional[I, BSONWriter, i] => 101 | opt.extract 102 | .extract(value) 103 | .cata( 104 | v => 105 | opt.base 106 | .writeTry(v) 107 | .map(doc ++ BSONElement(opt.fieldName, _)), 108 | Success(doc) 109 | ) 110 | 111 | case const: Constant[I, BSONWriter, i] => Success(doc) 112 | 113 | case abs: Absent[I, BSONWriter, i] => Success(doc) 114 | } 115 | } 116 | } 117 | } yield ps.extract.extract(value) 118 | } 119 | ).runS(Success(document)) 120 | .value 121 | } 122 | 123 | implicit def eitherKToBson[P[_]: ToBson, Q[_]: ToBson]: ToBson[EitherK[P, Q, *]] = 124 | new ToBson[EitherK[P, Q, *]] { 125 | override val writer = new (EitherK[P, Q, *] ~> BSONWriter) { 126 | def apply[A](p: EitherK[P, Q, A]): BSONWriter[A] = 127 | p.run.fold( 128 | ToBson[P].writer(_), 129 | ToBson[Q].writer(_) 130 | ) 131 | } 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /reactivemongo/src/main/scala-3/morphling/reactivemongo/FromBson.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | import cats.* 4 | import cats.data.EitherK 5 | import cats.free.* 6 | import morphling.Schema.Schema 7 | import morphling.annotated.Schema.AnnotatedSchema 8 | import morphling.given 9 | import morphling.{Absent, Alt, Constant, HAlgebra, HEnvT, HFix, IsoSchema, OneOfSchema, Optional, PrimSchema, PropSchema, RecordSchema, Required, SchemaF} 10 | import mouse.boolean.* 11 | import mouse.option.* 12 | import reactivemongo.api.bson.* 13 | 14 | trait FromBson[S[_]] extends Serializable { 15 | def reader: S ~> BSONReader 16 | 17 | extension [F[_], A](fa: F[A])(using FB: FromBson[F]) { 18 | def reader: BSONReader[A] = FB.reader(fa) 19 | } 20 | } 21 | 22 | object FromBson { 23 | def apply[P[_]](using fb: FromBson[P]): FromBson[P] = fb 24 | 25 | given [P[_]: FromBson]: FromBson[Schema[P, _]] = 26 | new FromBson[Schema[P, _]] { 27 | override val reader: Schema[P, _] ~> BSONReader = new (Schema[P, _] ~> BSONReader) { 28 | override def apply[I](schema: Schema[P, I]): BSONReader[I] = 29 | HFix.cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], BSONReader](decoderAlg[P]).apply(schema) 30 | } 31 | } 32 | 33 | given [P[_]: FromBson, A[_]: [Y[_]] =>> Y ~> ([T] =>> Endo[BSONReader[T]])]: FromBson[AnnotatedSchema[P, A, _]] = 34 | new FromBson[AnnotatedSchema[P, A, _]] { 35 | override val reader: AnnotatedSchema[P, A, *] ~> BSONReader = new (AnnotatedSchema[P, A, _] ~> BSONReader) { 36 | override def apply[I](schema: AnnotatedSchema[P, A, I]): BSONReader[I] = 37 | HFix 38 | .cataNT[[Y1[_], Z1] =>> HEnvT[A, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], BSONReader](annDecoderAlg[P, A]) 39 | .apply(schema) 40 | } 41 | } 42 | 43 | def decoderAlg[P[_]: FromBson]: HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], BSONReader] = 44 | new HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], BSONReader] { 45 | def apply[I](s: SchemaF[P, BSONReader, I]): BSONReader[I] = s match { 46 | case PrimSchema(p) => 47 | FromBson[P].reader(p) 48 | 49 | case OneOfSchema(alts, None) => 50 | BSONDocumentReader[I] { doc => 51 | val results = for { 52 | fields <- doc.elements.map(_.name).toList 53 | altResult <- alts.toList flatMap { case Alt(id, base, prism) => 54 | fields 55 | .contains(id) 56 | .option( 57 | doc.getAsOpt(id)(base).map(prism.upcast) 58 | ) 59 | .toList 60 | } 61 | } yield altResult 62 | 63 | val altIds = alts.map(_.id) 64 | results match { 65 | case Some(x) :: Nil => x 66 | case None :: Nil => throw TypeDoesNotMatch(s"Could not deserialize ${alts.head.id}") 67 | case Nil => throw DocumentKeyNotFound(s"No fields found matching any of $altIds") 68 | case _ => throw MultipleKeysFound(s"More than one matching field found among $altIds}") 69 | } 70 | } 71 | 72 | case OneOfSchema(alts, Some(discriminatorField)) => 73 | BSONDocumentReader.from[I] { doc => 74 | for { 75 | altId <- doc.getAsTry[String](discriminatorField) 76 | alt <- alts 77 | .find(_.id == altId) 78 | .toTry(DocumentKeyNotFound(s"No '$discriminatorField' case of value '$altId'")) 79 | altResult <- doc.asTry(alt.base).map(alt.subset.upcast) 80 | } yield altResult 81 | } 82 | 83 | case RecordSchema(rb) => 84 | decodeObj(rb) 85 | 86 | case IsoSchema(base, iso) => 87 | base.afterRead(iso.get) 88 | } 89 | } 90 | 91 | def annDecoderAlg[P[_]: FromBson, Ann[_]](implicit 92 | interpret: Ann ~> ([T] =>> Endo[BSONReader[T]]) 93 | ): HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], BSONReader] = 94 | new HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], BSONReader] { 95 | override def apply[I](s: HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], BSONReader, I]): BSONReader[I] = 96 | interpret(s.ask).apply(decoderAlg[P].apply(s.fa)) 97 | } 98 | 99 | def decodeObj[I](rb: FreeApplicative[PropSchema[I, BSONReader, _], I]): BSONReader[I] = { 100 | given Applicative[BSONReader[_]] = new Applicative[BSONReader] { 101 | override def pure[T](a: T): BSONReader[T] = BSONReader[T](_ => a) 102 | 103 | override def ap[T, U](ff: BSONReader[T => U])(fa: BSONReader[T]): BSONReader[U] = 104 | (v: BSONValue) => ff.readTry(v).flatMap(fa.readTry(v).map(_)) 105 | } 106 | 107 | rb.foldMap( 108 | new (PropSchema[I, BSONReader, _] ~> BSONReader) { 109 | def apply[B](ps: PropSchema[I, BSONReader, B]): BSONReader[B] = ps match { 110 | case Required(field, base, _, None) => 111 | BSONDocumentReader[B](doc => doc.getAsOpt[B](field)(base).getOrElse(throw DocumentKeyNotFound(field))) 112 | 113 | case Required(field, base, _, Some(default)) => 114 | BSONDocumentReader[B](doc => doc.getAsOpt[B](field)(base).getOrElse(default)) 115 | 116 | case opt: Optional[I, BSONReader, i] @unchecked => 117 | BSONDocumentReader[B](doc => doc.getAsOpt[i](opt.fieldName)(opt.base)) 118 | 119 | case Constant(_, value, _) => 120 | BSONReader[B](_ => value) 121 | 122 | case abs: Absent[I, BSONReader, i] @unchecked => 123 | BSONReader(_ => Option.empty[i]) 124 | } 125 | } 126 | ) 127 | } 128 | 129 | given [P[_]: FromBson, Q[_]: FromBson]: FromBson[EitherK[P, Q, _]] = 130 | new FromBson[EitherK[P, Q, _]] { 131 | override val reader = new (EitherK[P, Q, _] ~> BSONReader) { 132 | def apply[A](p: EitherK[P, Q, A]): BSONReader[A] = 133 | p.run.fold( 134 | FromBson[P].reader(_), 135 | FromBson[Q].reader(_), 136 | ) 137 | } 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /reactivemongo/src/main/scala-3/morphling/reactivemongo/ToBson.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | import cats.* 4 | import cats.data.State.* 5 | import cats.data.{EitherK, State} 6 | import cats.free.* 7 | import morphling.Schema.* 8 | import morphling.annotated.Schema.AnnotatedSchema 9 | import morphling.{Absent, Alt, Constant, HAlgebra, HFix, IsoSchema, OneOfSchema, Optional, PrimSchema, PropSchema, RecordSchema, Required, SchemaF} 10 | import mouse.option.* 11 | import reactivemongo.api.bson.* 12 | 13 | import scala.util.{Success, Try} 14 | 15 | trait ToBson[S[_]] extends Serializable { 16 | def writer: S ~> BSONWriter 17 | 18 | extension [F[_], A](fa: F[A])(using TB: ToBson[F]) { 19 | def writer: BSONWriter[A] = TB.writer(fa) 20 | } 21 | } 22 | 23 | object ToBson { 24 | def apply[P[_]](using tb: ToBson[P]): ToBson[P] = tb 25 | 26 | given [P[_]: ToBson]: ToBson[Schema[P, _]] = 27 | new ToBson[Schema[P, _]] { 28 | override val writer: Schema[P, _] ~> BSONWriter = new (Schema[P, _] ~> BSONWriter) { 29 | override def apply[I](schema: Schema[P, I]): BSONWriter[I] = 30 | HFix.cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], BSONWriter](serializeAlg).apply(schema) 31 | } 32 | } 33 | 34 | given [P[_]: ToBson, A[_]]: ToBson[AnnotatedSchema[P, A, _]] = 35 | new ToBson[AnnotatedSchema[P, A, _]] { 36 | override val writer: AnnotatedSchema[P, A, _] ~> BSONWriter = new (AnnotatedSchema[P, A, _] ~> BSONWriter) { 37 | override def apply[I](schema: AnnotatedSchema[P, A, I]): BSONWriter[I] = 38 | HFix 39 | .cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], BSONWriter](serializeAlg) 40 | .apply( 41 | HFix.forget[[Y[_], Z] =>> SchemaF[P, Y, Z], A].apply(schema) 42 | ) 43 | } 44 | } 45 | 46 | def serializeAlg[P[_]: ToBson]: HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], BSONWriter] = 47 | new HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], BSONWriter] { 48 | def apply[I](schema: SchemaF[P, BSONWriter, I]): BSONWriter[I] = 49 | schema match { 50 | case s: PrimSchema[P, BSONWriter, I] => 51 | ToBson[P].writer(s.prim) 52 | 53 | case s: OneOfSchema[P, BSONWriter, I] => 54 | (value: I) => 55 | s.discriminator.cata( 56 | dField => 57 | s.alts 58 | .map { case alt: Alt[BSONWriter, I, i] => 59 | alt.subset 60 | .getOption(value) 61 | .map(v => 62 | alt.base.writeTry(v).map { 63 | case BSONDocument(elems) => 64 | BSONDocument((BSONElement(dField, BSONString(alt.id)) +: elems)*) 65 | case other => other 66 | } 67 | ) 68 | } 69 | .collect { case Some(doc) => doc } 70 | .head, 71 | Success( 72 | s.alts 73 | .map { case alt: Alt[BSONWriter, I, i] => 74 | alt.subset.getOption(value).flatMap(alt.base.writeOpt(_)).map(bson => document(alt.id -> bson)) 75 | } 76 | .collect { case Some(bson) => bson } 77 | .head 78 | ) 79 | ) 80 | 81 | case s: RecordSchema[P, BSONWriter, I] => 82 | serializeObjF[P, I](s.props).asInstanceOf[BSONWriter[I]] 83 | 84 | case s: IsoSchema[P, BSONWriter, i0, I] => 85 | s.base.beforeWrite(s.eqv.upcast(_)) 86 | } 87 | } 88 | 89 | def serializeObjF[P[_]: ToBson, I](rb: FreeApplicative[PropSchema[I, BSONWriter, _], I]): BSONDocumentWriter[I] = { 90 | (value: I) => 91 | rb.foldMap[State[Try[BSONDocument], _]]( 92 | new (PropSchema[I, BSONWriter, _] ~> State[Try[BSONDocument], _]) { 93 | def apply[B](ps: PropSchema[I, BSONWriter, B]): State[Try[BSONDocument], B] = 94 | for { 95 | _ <- modify { (tryDoc: Try[BSONDocument]) => 96 | tryDoc.flatMap { (doc: BSONDocument) => 97 | ps match { 98 | case req: Required[I, BSONWriter, i] => 99 | req.base.writeTry(req.extract.extract(value)).map(doc ++ BSONElement(req.fieldName, _)) 100 | 101 | case opt: Optional[I, BSONWriter, i] @unchecked => 102 | opt.extract 103 | .extract(value) 104 | .cata( 105 | v => 106 | opt.base 107 | .writeTry(v) 108 | .map(doc ++ BSONElement(opt.fieldName, _)), 109 | Success(doc) 110 | ) 111 | 112 | case const: Constant[I, BSONWriter, i] => Success(doc) 113 | 114 | case abs: Absent[I, BSONWriter, i] @unchecked => Success(doc) 115 | } 116 | } 117 | } 118 | } yield ps.extract.extract(value) 119 | } 120 | ).runS(Success(document)) 121 | .value 122 | } 123 | 124 | given [P[_]: ToBson, Q[_]: ToBson]: ToBson[EitherK[P, Q, _]] = 125 | new ToBson[EitherK[P, Q, _]] { 126 | override val writer = new (EitherK[P, Q, _] ~> BSONWriter) { 127 | def apply[A](p: EitherK[P, Q, A]): BSONWriter[A] = 128 | p.run.fold( 129 | ToBson[P].writer(_), 130 | ToBson[Q].writer(_) 131 | ) 132 | } 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /reactivemongo/src/main/scala/morphling/reactivemongo/MultipleKeysFound.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | case class MultipleKeysFound(message: String) extends Exception 4 | case class TypeDoesNotMatch(message: String) extends Exception 5 | case class DocumentKeyNotFound(message: String) extends Exception 6 | -------------------------------------------------------------------------------- /reactivemongo/src/test/scala/morphling/reactivemongo/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | import cats.~> 4 | import morphling.protocol.SType.SSchema 5 | import reactivemongo.api.bson.* 6 | 7 | object Implicits extends ReactivemongoPack { 8 | implicit val primToBson: ToBson[SSchema] = new ToBson[SSchema] { self => 9 | val writer: SSchema ~> BSONWriter = new (SSchema ~> BSONWriter) { 10 | override def apply[I](s: SSchema[I]): BSONWriter[I] = 11 | sTypeWriter[SSchema[I]#Inner].apply(s.unmutu) 12 | } 13 | } 14 | 15 | implicit val primFromBson: FromBson[SSchema] = new FromBson[SSchema] { self => 16 | val reader = new (SSchema ~> BSONReader) { 17 | def apply[I](s: SSchema[I]): BSONReader[I] = 18 | sTypeReader[SSchema[I]#Inner].apply(s.unmutu) 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /reactivemongo/src/test/scala/morphling/reactivemongo/ReactivemongoPack.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | import cats.instances.try_.* 4 | import cats.instances.vector.* 5 | import cats.syntax.all.* 6 | import cats.~> 7 | import morphling.protocol.* 8 | import reactivemongo.api.bson.* 9 | 10 | import scala.Float.float2double // scalafix:ok 11 | import scala.util.Success 12 | 13 | trait ReactivemongoPack { 14 | def sTypeWriter[F[_]: ToBson]: (SType[F, *] ~> BSONWriter) = 15 | new (SType[F, *] ~> BSONWriter) { 16 | import ToBson.* 17 | 18 | override def apply[I](st: SType[F, I]): BSONWriter[I] = 19 | st match { 20 | case SNullT() => (_: I) => Success(BSONNull) 21 | case SBoolT() => b => Success(BSONBoolean(b)) 22 | case SIntT() => i => Success(BSONInteger(i)) 23 | case SLongT() => l => Success(BSONLong(l)) 24 | case SFloatT() => f => Success(BSONDouble(f)) 25 | case SDoubleT() => d => Success(BSONDouble(d)) 26 | case SCharT() => c => Success(BSONString(c.toString)) 27 | case SStrT() => s => Success(BSONString(s)) 28 | case sa: SArrayT[F, i] => 29 | (xs: Vector[i]) => xs.traverse(sa.elem.writer.writeTry).map(BSONArray(_)) 30 | } 31 | } 32 | 33 | def sTypeReader[F[_]: FromBson]: (SType[F, *] ~> BSONReader) = 34 | new (SType[F, *] ~> BSONReader) { 35 | import FromBson.* 36 | 37 | override def apply[I](st: SType[F, I]): BSONReader[I] = st match { 38 | case SNullT() => BSONReader[I](_ => ()) 39 | case SBoolT() => BSONBooleanLike.Handler.readTry(_).flatMap(_.toBoolean) 40 | case SIntT() => BSONNumberLike.Handler.readTry(_).flatMap(_.toInt) 41 | case SLongT() => BSONNumberLike.Handler.readTry(_).flatMap(_.toLong) 42 | case SFloatT() => BSONNumberLike.Handler.readTry(_).flatMap(_.toFloat) 43 | case SDoubleT() => BSONNumberLike.Handler.readTry(_).flatMap(_.toDouble) 44 | case SCharT() => BSONStringHandler.afterRead(_.head) 45 | case SStrT() => BSONStringHandler 46 | case sa: SArrayT[F, i] => 47 | BSONReader.collect { case arr: BSONArray => arr }.readTry(_).flatMap { arr => 48 | arr.values.toVector.traverse(sa.elem.reader.readTry) 49 | } 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /reactivemongo/src/test/scala/morphling/reactivemongo/ReactivemongoSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo 2 | 3 | import morphling.reactivemongo.FromBson.* 4 | import morphling.reactivemongo.Implicits.* 5 | import morphling.reactivemongo.ToBson.* 6 | import morphling.samples.* 7 | import morphling.scalacheck.Implicits.* 8 | import morphling.scalacheck.ToGen.* 9 | import org.scalacheck.Arbitrary 10 | import org.scalatest.TryValues 11 | import org.scalatest.funsuite.AnyFunSuite 12 | import org.scalatest.matchers.should.Matchers 13 | import org.scalatestplus.scalacheck.Checkers 14 | import reactivemongo.api.bson.* 15 | 16 | import scala.util.Success 17 | 18 | class ReactivemongoSpec extends AnyFunSuite with Matchers with TryValues with Checkers { 19 | test("A value should serialise to BSON") { 20 | val result = Person.schema.writer.writeTry(person).success.value 21 | result shouldBe document( 22 | "updateCounter" -> 42, 23 | "roles" -> array( 24 | document( 25 | "administrator" -> document( 26 | "subordinateCount" -> 0, 27 | "department" -> "windmill-tilting" 28 | ) 29 | ) 30 | ), 31 | "birthDate" -> 20147028000L, 32 | "name" -> "Kris Nuttycombe" 33 | ) 34 | } 35 | 36 | test("A value should be deserialised from BSON") { 37 | val result = Person.schema.writer.writeTry(person).success.value 38 | Person.schema.reader.readTry(result) shouldBe Success(person.copy(stamp = 101)) 39 | } 40 | 41 | test("A default value should be applied during deserialization") { 42 | val result = Person.schema.writer.writeTry(person).success.value.asInstanceOf[BSONDocument] 43 | Person.schema.reader.readTry(result -- "updateCounter") shouldBe Success(person.copy(updateCounter = 0, stamp = 101)) 44 | } 45 | 46 | test("Serialization should round-trip values produced by a generator") { 47 | implicit val arbPerson: Arbitrary[Person] = Arbitrary(Person.schema.gen) 48 | check { (p: Person) => 49 | Person.schema.reader.readOpt(Person.schema.writer.writeTry(p).get) == Some(p) 50 | } 51 | } 52 | 53 | test("A value should serialise to BSON flat") { 54 | val result = Person.flatSchema.writer.writeTry(person).success.value 55 | result shouldBe document( 56 | "updateCounter" -> 42, 57 | "roles" -> array( 58 | document( 59 | "type" -> "administrator", 60 | "subordinateCount" -> 0, 61 | "department" -> "windmill-tilting" 62 | ) 63 | ), 64 | "birthDate" -> 20147028000L, 65 | "name" -> "Kris Nuttycombe" 66 | ) 67 | } 68 | 69 | test("A value should be deserialised from BSON flat") { 70 | val result = Person.flatSchema.writer.writeTry(person).success.value 71 | Person.flatSchema.reader.readTry(result) shouldBe Success(person.copy(stamp = 101)) 72 | } 73 | 74 | test("Flat serialization should round-trip values produced by a generator") { 75 | implicit val arbPerson: Arbitrary[Person] = Arbitrary(Person.flatSchema.gen) 76 | check { (p: Person) => 77 | Person.flatSchema.reader.readOpt(Person.flatSchema.writer.writeTry(p).get) == Some(p) 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /reactivemongo/src/test/scala/morphling/reactivemongo/annotated/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo.annotated 2 | 3 | import cats.{Endo, ~>} 4 | import morphling.protocol.annotated.STypeAnn.ASchema 5 | import morphling.protocol.annotated.{Non, Range, Restriction} 6 | import morphling.reactivemongo.{FromBson, ReactivemongoPack, ToBson} 7 | import reactivemongo.api.bson.* 8 | 9 | object Implicits extends ReactivemongoPack { 10 | implicit val readerRestrictions: (Restriction ~> λ[T => Endo[BSONReader[T]]]) = 11 | new (Restriction ~> λ[T => Endo[BSONReader[T]]]) { 12 | override def apply[A](rs: Restriction[A]): Endo[BSONReader[A]] = rs match { 13 | case Non() => identity 14 | case Range(from, to) => 15 | (rdr: BSONReader[Int]) => 16 | rdr 17 | .afterRead(i => i.ensuring(i > from, s"Value should be greater than $from")) 18 | .afterRead(i => i.ensuring(i < to, s"Value should be less than $to")) 19 | } 20 | } 21 | 22 | implicit val primToBson: ToBson[ASchema] = new ToBson[ASchema] { self => 23 | val writer: ASchema ~> BSONWriter = new (ASchema ~> BSONWriter) { 24 | override def apply[I](s: ASchema[I]): BSONWriter[I] = 25 | sTypeWriter[ASchema[I]#Inner].apply(s.unmutu) 26 | } 27 | } 28 | 29 | implicit val primFromBson: FromBson[ASchema] = new FromBson[ASchema] { self => 30 | val reader = new (ASchema ~> BSONReader) { 31 | def apply[I](s: ASchema[I]): BSONReader[I] = 32 | sTypeReader[ASchema[I]#Inner].apply(s.unmutu) 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /reactivemongo/src/test/scala/morphling/reactivemongo/annotated/ReactivemongoAnnotatedSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.reactivemongo.annotated 2 | 3 | import morphling.reactivemongo.FromBson.* 4 | import morphling.reactivemongo.ToBson.* 5 | import morphling.reactivemongo.annotated.Implicits.* 6 | import morphling.samples.annotated.{AnnPerson, Server} 7 | import morphling.samples.{Person, person} 8 | import morphling.scalacheck.ToGen.* 9 | import morphling.scalacheck.annotated.Implicits.* 10 | import org.scalacheck.Arbitrary 11 | import org.scalatest.TryValues 12 | import org.scalatest.funsuite.AnyFunSuite 13 | import org.scalatest.matchers.should.Matchers 14 | import org.scalatestplus.scalacheck.Checkers 15 | import reactivemongo.api.bson.* 16 | 17 | import scala.util.Success 18 | 19 | class ReactivemongoAnnotatedSpec extends AnyFunSuite with Matchers with TryValues with Checkers { 20 | private val failure = Symbol("failure") 21 | 22 | test("A value should serialise to BSON") { 23 | val result = AnnPerson.schema.writer.writeTry(person).success.value 24 | result shouldBe document( 25 | "updateCounter" -> 42, 26 | "roles" -> array( 27 | document( 28 | "administrator" -> document( 29 | "subordinateCount" -> 0, 30 | "department" -> "windmill-tilting" 31 | ) 32 | ) 33 | ), 34 | "birthDate" -> 20147028000L, 35 | "name" -> "Kris Nuttycombe" 36 | ) 37 | } 38 | 39 | test("A value should be deserialised from BSON") { 40 | val result = AnnPerson.schema.writer.writeTry(person).success.value 41 | AnnPerson.schema.reader.readTry(result) shouldBe Success(person.copy(stamp = 101)) 42 | } 43 | 44 | test("A default value should be applied during deserialization") { 45 | val result = AnnPerson.schema.writer.writeTry(person).success.value.asInstanceOf[BSONDocument] 46 | AnnPerson.schema.reader.readTry(result -- "updateCounter") shouldBe Success( 47 | person.copy(updateCounter = 0, stamp = 101) 48 | ) 49 | } 50 | 51 | test("Serialization should round-trip values produced by a generator") { 52 | implicit val arbPerson: Arbitrary[Person] = Arbitrary(AnnPerson.schema.gen) 53 | check { (p: Person) => 54 | AnnPerson.schema.reader.readOpt(AnnPerson.schema.writer.writeTry(p).get) == Some(p) 55 | } 56 | } 57 | 58 | test("Deserialization should fail if some value does not fit limitations") { 59 | val decoder = Server.schema.reader 60 | 61 | decoder.readTry(document("host" -> "peka.com", "port" -> 0)) shouldBe failure 62 | decoder.readTry(document("host" -> "peka.com", "port" -> 70000)) shouldBe failure 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /scalacheck/src/main/scala-2/morphling/scalacheck/ToGen.scala: -------------------------------------------------------------------------------- 1 | package morphling.scalacheck 2 | 3 | import cats.* 4 | import cats.data.EitherK 5 | import cats.free.* 6 | import morphling.* 7 | import morphling.Schema.Schema 8 | import morphling.annotated.Schema.AnnotatedSchema 9 | import mouse.option.* 10 | import org.scalacheck.Gen 11 | import simulacrum_.typeclass 12 | 13 | @typeclass 14 | trait ToGen[S[_]] { 15 | def toGen: S ~> Gen 16 | } 17 | 18 | object ToGen { 19 | implicit class ToGenOps[S[_], A](s: S[A]) { 20 | def gen(implicit TG: ToGen[S]): Gen[A] = TG.toGen(s) 21 | } 22 | 23 | implicit def schemaToGen[P[_]: ToGen]: ToGen[Schema[P, *]] = new ToGen[Schema[P, *]] { 24 | override val toGen: Schema[P, *] ~> Gen = new (Schema[P, *] ~> Gen) { 25 | override def apply[I](schema: Schema[P, I]): Gen[I] = 26 | HFix.cataNT[SchemaF[P, *[_], *], Gen](genAlg).apply(schema) 27 | } 28 | } 29 | 30 | implicit def annSchemaToGen[P[_]: ToGen, A[_]: *[_] ~> λ[T => Endo[Gen[T]]]]: ToGen[AnnotatedSchema[P, A, *]] = 31 | new ToGen[AnnotatedSchema[P, A, *]] { 32 | override val toGen: AnnotatedSchema[P, A, *] ~> Gen = new (AnnotatedSchema[P, A, *] ~> Gen) { 33 | override def apply[I](schema: AnnotatedSchema[P, A, I]): Gen[I] = 34 | HFix.cataNT[HEnvT[A, SchemaF[P, *[_], *], *[_], *], Gen](annGenAlg).apply(schema) 35 | } 36 | } 37 | 38 | def genAlg[P[_]: ToGen]: HAlgebra[SchemaF[P, *[_], *], Gen] = 39 | new HAlgebra[SchemaF[P, *[_], *], Gen] { 40 | def apply[I](schema: SchemaF[P, Gen, I]): Gen[I] = schema match { 41 | case s: PrimSchema[P, Gen, I] => ToGen[P].toGen(s.prim) 42 | case s: OneOfSchema[P, Gen, I] => 43 | val altGens = s.alts.map { case Alt(_, b, p) => b.map(p.upcast) } 44 | altGens.tail.headOption.cata( 45 | th => Gen.oneOf(altGens.head, th, altGens.tail.tail*), 46 | altGens.head 47 | ) 48 | 49 | case s: RecordSchema[P, Gen, I] => recordGen[P, I](s.props) 50 | case s: IsoSchema[P, Gen, i0, I] => s.base.map(s.eqv.get(_)) 51 | } 52 | } 53 | 54 | def annGenAlg[P[_]: ToGen, Ann[_]](implicit 55 | interpret: Ann ~> λ[T => Endo[Gen[T]]] 56 | ): HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], Gen] = 57 | new HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], Gen] { 58 | override def apply[I](schema: HEnvT[Ann, SchemaF[P, *[_], *], Gen, I]): Gen[I] = 59 | interpret.apply(schema.ask).apply(genAlg[P].apply(schema.fa)) 60 | } 61 | 62 | def recordGen[P[_]: ToGen, I](rb: FreeApplicative[PropSchema[I, Gen, *], I]): Gen[I] = { 63 | implicit val djap: Applicative[Gen] = new Applicative[Gen] { 64 | override def pure[T](x: T): Gen[T] = Gen.const(x) 65 | 66 | override def ap[T, U](ff: Gen[T => U])(fa: Gen[T]): Gen[U] = 67 | fa.flatMap(a => ff.map(_(a))) 68 | } 69 | 70 | rb.foldMap( 71 | new (PropSchema[I, Gen, *] ~> Gen) { 72 | def apply[B](ps: PropSchema[I, Gen, B]): Gen[B] = ps match { 73 | case Required(_, base, _, _) => base 74 | case opt: Optional[I, Gen, i] => Gen.option(opt.base) 75 | case Constant(_, value, _) => Gen.const(value) 76 | case abs: Absent[I, Gen, i] => Gen.const(Option.empty[i]) 77 | } 78 | } 79 | ) 80 | } 81 | 82 | implicit def eitherKGen[P[_]: ToGen, Q[_]: ToGen]: ToGen[EitherK[P, Q, *]] = new ToGen[EitherK[P, Q, *]] { 83 | override val toGen: EitherK[P, Q, *] ~> Gen = new (EitherK[P, Q, *] ~> Gen) { 84 | override def apply[A](fa: EitherK[P, Q, A]): Gen[A] = fa.run.fold( 85 | ToGen[P].toGen(_), 86 | ToGen[Q].toGen(_), 87 | ) 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /scalacheck/src/main/scala-3/morphling/scalacheck/ToGen.scala: -------------------------------------------------------------------------------- 1 | package morphling.scalacheck 2 | 3 | import cats.* 4 | import cats.data.EitherK 5 | import cats.free.* 6 | import morphling.* 7 | import morphling.Schema.Schema 8 | import morphling.annotated.Schema.AnnotatedSchema 9 | import morphling.given 10 | import mouse.option.* 11 | import org.scalacheck.Gen 12 | 13 | trait ToGen[S[_]] extends Serializable { 14 | def toGen: S ~> Gen 15 | 16 | extension [S[_], A](s: S[A])(using TG: ToGen[S]) def gen: Gen[A] = TG.toGen(s) 17 | } 18 | 19 | object ToGen { 20 | def apply[S[_]](using toGen: ToGen[S]): ToGen[S] = toGen 21 | 22 | given [P[_]: ToGen]: ToGen[Schema[P, _]] = 23 | new ToGen[Schema[P, _]] { 24 | override val toGen: Schema[P, _] ~> Gen = new (Schema[P, _] ~> Gen) { 25 | override def apply[I](schema: Schema[P, I]): Gen[I] = 26 | HFix.cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], Gen](genAlg).apply(schema) 27 | } 28 | } 29 | 30 | given [P[_]: ToGen, A[_]: [Y[_]] =>> Y ~> ([T] =>> Endo[Gen[T]])]: ToGen[AnnotatedSchema[P, A, _]] = 31 | new ToGen[AnnotatedSchema[P, A, *]] { 32 | override val toGen: AnnotatedSchema[P, A, _] ~> Gen = new (AnnotatedSchema[P, A, _] ~> Gen) { 33 | override def apply[I](schema: AnnotatedSchema[P, A, I]): Gen[I] = 34 | HFix.cataNT[[Y1[_], Z1] =>> HEnvT[A, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], Gen](annGenAlg).apply(schema) 35 | } 36 | } 37 | 38 | def genAlg[P[_]: ToGen]: HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], Gen] = 39 | new HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], Gen] { 40 | def apply[I](schema: SchemaF[P, Gen, I]): Gen[I] = schema match { 41 | case s: PrimSchema[P, Gen, I] => ToGen[P].toGen(s.prim) 42 | case s: OneOfSchema[P, Gen, I] => 43 | val altGens = s.alts.map { case Alt(_, b, p) => b.map(p.upcast) } 44 | altGens.tail.headOption.cata( 45 | th => Gen.oneOf(altGens.head, th, altGens.tail.tail*), 46 | altGens.head 47 | ) 48 | 49 | case s: RecordSchema[P, Gen, I] => recordGen[P, I](s.props) 50 | case s: IsoSchema[P, Gen, i0, I] => s.base.map(s.eqv.get(_)) 51 | } 52 | } 53 | 54 | def annGenAlg[P[_]: ToGen, Ann[_]](using 55 | interpret: Ann ~> ([T] =>> Endo[Gen[T]]) 56 | ): HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], Gen] = 57 | new HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], Gen] { 58 | override def apply[I](schema: HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Gen, I]): Gen[I] = 59 | interpret.apply(schema.ask).apply(genAlg[P].apply(schema.fa)) 60 | } 61 | 62 | def recordGen[P[_]: ToGen, I](rb: FreeApplicative[PropSchema[I, Gen, _], I]): Gen[I] = { 63 | implicit val djap: Applicative[Gen] = new Applicative[Gen] { 64 | override def pure[T](x: T): Gen[T] = Gen.const(x) 65 | 66 | override def ap[T, U](ff: Gen[T => U])(fa: Gen[T]): Gen[U] = 67 | fa.flatMap(a => ff.map(_(a))) 68 | } 69 | 70 | rb.foldMap( 71 | new (PropSchema[I, Gen, _] ~> Gen) { 72 | def apply[B](ps: PropSchema[I, Gen, B]): Gen[B] = ps match { 73 | case Required(_, base, _, _) => base 74 | case opt: Optional[I, Gen, i] @unchecked => Gen.option(opt.base) 75 | case Constant(_, value, _) => Gen.const(value) 76 | case abs: Absent[I, Gen, i] @unchecked => Gen.const(Option.empty[i]) 77 | } 78 | } 79 | ) 80 | } 81 | 82 | given [P[_]: ToGen, Q[_]: ToGen]: ToGen[EitherK[P, Q, _]] = 83 | new ToGen[EitherK[P, Q, _]] { 84 | override val toGen: EitherK[P, Q, _] ~> Gen = new (EitherK[P, Q, _] ~> Gen) { 85 | override def apply[A](fa: EitherK[P, Q, A]): Gen[A] = fa.run.fold( 86 | ToGen[P].toGen(_), 87 | ToGen[Q].toGen(_), 88 | ) 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/morphling/scalacheck/GenPack.scala: -------------------------------------------------------------------------------- 1 | package morphling.scalacheck 2 | 3 | import cats.~> 4 | import morphling.protocol.* 5 | import org.scalacheck.Arbitrary.* 6 | import org.scalacheck.Gen 7 | import org.scalacheck.Gen.* 8 | 9 | trait GenPack { 10 | def sTypeGen[F[_]: ToGen]: (SType[F, *] ~> Gen) = 11 | new (SType[F, *] ~> Gen) { 12 | import ToGen.* 13 | 14 | override def apply[A](st: SType[F, A]): Gen[A] = st match { 15 | case SNullT() => arbitrary[Unit] 16 | case SBoolT() => arbitrary[Boolean] 17 | case SIntT() => arbitrary[Int] 18 | case SLongT() => Gen.chooseNum(Long.MinValue + 808L, Long.MaxValue) // Magic number to circumvent Instant#toEpochMillis throwing exceptions 19 | case SFloatT() => arbitrary[Float] 20 | case SDoubleT() => arbitrary[Double] 21 | case SCharT() => arbitrary[Char] 22 | case SStrT() => arbitrary[String] 23 | case arr: SArrayT[F, i] => 24 | containerOf[Vector, i](arr.elem.gen) 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/morphling/scalacheck/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.scalacheck 2 | 3 | import cats.* 4 | import morphling.protocol.SType.SSchema 5 | import org.scalacheck.Gen 6 | 7 | object Implicits extends GenPack { 8 | implicit val primToGen: ToGen[SSchema] = new ToGen[SSchema] { 9 | val toGen = new (SSchema ~> Gen) { 10 | def apply[I](s: SSchema[I]): Gen[I] = sTypeGen[SSchema[I]#Inner].apply(s.unmutu) 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/morphling/scalacheck/annotated/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.scalacheck.annotated 2 | 3 | import cats.{Endo, ~>} 4 | import morphling.protocol.annotated.STypeAnn.ASchema 5 | import morphling.protocol.annotated.{Range, Restriction} 6 | import morphling.scalacheck.{GenPack, ToGen} 7 | import org.scalacheck.Gen 8 | 9 | object Implicits extends GenPack { 10 | implicit val genRestriction: (Restriction ~> λ[T => Endo[Gen[T]]]) = 11 | new (Restriction ~> λ[T => Endo[Gen[T]]]) { 12 | override def apply[A](rs: Restriction[A]): Endo[Gen[A]] = rs match { 13 | case Range(from, to) => 14 | (gen: Gen[Int]) => gen.filter(i => i > from && i < to) 15 | case _: Restriction[A] => identity 16 | } 17 | } 18 | 19 | implicit val primToGen: ToGen[ASchema] = new ToGen[ASchema] { self => 20 | val toGen = new (ASchema ~> Gen) { 21 | def apply[I](s: ASchema[I]): Gen[I] = sTypeGen[ASchema[I]#Inner].apply(s.unmutu) 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /tapir/src/main/scala-2/morphling/tapir/ToSchema.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir 2 | 3 | import cats.* 4 | import cats.data.{Const, EitherK} 5 | import cats.free.FreeApplicative 6 | import morphling.* 7 | import morphling.Schema.Schema 8 | import morphling.annotated.Schema.AnnotatedSchema 9 | import mouse.option.* 10 | import simulacrum_.typeclass 11 | import sttp.tapir.SchemaType.{SCoproduct, SDiscriminator, SProduct, SProductField} 12 | import sttp.tapir.{FieldName, Schema as TapirSchema, SchemaType, Validator} 13 | 14 | @typeclass 15 | trait ToSchema[S[_]] { 16 | def toSchema: S ~> TapirSchema 17 | } 18 | 19 | object ToSchema { 20 | implicit class ToSchemaOps[S[_], A](s: S[A]) { 21 | def schema(implicit TT: ToSchema[S]): TapirSchema[A] = TT.toSchema(s) 22 | } 23 | 24 | implicit def schemaToSchema[P[_]: ToSchema]: ToSchema[Schema[P, *]] = new ToSchema[Schema[P, *]] { 25 | override val toSchema: Schema[P, *] ~> TapirSchema = new (Schema[P, *] ~> TapirSchema) { 26 | override def apply[I](schema: Schema[P, I]): TapirSchema[I] = 27 | HFix.cataNT[SchemaF[P, *[_], *], TapirSchema](schemaAlg[P]).apply(schema) 28 | } 29 | } 30 | 31 | implicit def annSchemaToSchema[P[_]: ToSchema, A[_]: *[_] ~> λ[T => Endo[TapirSchema[T]]]] 32 | : ToSchema[AnnotatedSchema[P, A, *]] = 33 | new ToSchema[AnnotatedSchema[P, A, *]] { 34 | override val toSchema: AnnotatedSchema[P, A, *] ~> TapirSchema = 35 | new (AnnotatedSchema[P, A, *] ~> TapirSchema) { 36 | override def apply[I](schema: AnnotatedSchema[P, A, I]): TapirSchema[I] = 37 | HFix.cataNT[HEnvT[A, SchemaF[P, *[_], *], *[_], *], TapirSchema](annSchemaAlg).apply(schema) 38 | } 39 | } 40 | 41 | def schemaAlg[P[_]: ToSchema]: HAlgebra[SchemaF[P, *[_], *], TapirSchema] = 42 | new HAlgebra[SchemaF[P, *[_], *], TapirSchema] { 43 | def apply[I](schema: SchemaF[P, TapirSchema, I]): TapirSchema[I] = schema match { 44 | case s: PrimSchema[P, TapirSchema, I] => ToSchema[P].toSchema(s.prim) 45 | 46 | case s: OneOfSchema[P, TapirSchema, I] => 47 | s.discriminator.cata( 48 | dField => { 49 | val discriminator = SDiscriminator(FieldName(dField, dField), Map.empty) 50 | 51 | TapirSchema( 52 | SCoproduct( 53 | s.alts.map { case Alt(id, schema, subs) => 54 | val discriminatorField = SProductField[I, String]( 55 | FieldName(dField), 56 | TapirSchema.schemaForString, 57 | (t: I) => subs.getOption(t).map(_ => id) 58 | ) 59 | 60 | schema match { 61 | case ProductSchema(product) => 62 | schema.copy( 63 | schemaType = product 64 | .copy(fields = (discriminatorField :: product.fields).asInstanceOf[List[SProductField[Any]]]) 65 | .asInstanceOf[SchemaType[Any]], 66 | validator = schema.validator.asInstanceOf[Validator[Any]] 67 | ) 68 | case _ => schema 69 | } 70 | }.toList, 71 | Some(discriminator) 72 | )(_ => None) 73 | ) 74 | }, 75 | TapirSchema( 76 | SCoproduct( 77 | s.alts.map { case Alt(id, schema, subs) => 78 | TapirSchema(SProduct(SProductField(FieldName(id), schema, subs.getOption) :: Nil)) 79 | }.toList, 80 | None 81 | )(_ => None) 82 | ) 83 | ) 84 | 85 | case s: RecordSchema[P, TapirSchema, I] => recordSchema[P, I](s.props) 86 | case s: IsoSchema[P, TapirSchema, i0, I] => s.base.as[I] 87 | } 88 | } 89 | 90 | def annSchemaAlg[P[_]: ToSchema, Ann[_]](implicit 91 | interpret: Ann ~> λ[T => Endo[TapirSchema[T]]] 92 | ): HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], TapirSchema] = 93 | new HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], TapirSchema] { 94 | override def apply[A](schema: HEnvT[Ann, SchemaF[P, *[_], *], TapirSchema, A]): TapirSchema[A] = 95 | interpret.apply(schema.ask).apply(schemaAlg[P].apply(schema.fa)) 96 | } 97 | 98 | def recordSchema[P[_]: ToSchema, I](rb: FreeApplicative[PropSchema[I, TapirSchema, *], I]): TapirSchema[I] = { 99 | val fields = rb 100 | .foldMap[Const[List[SProductField[I]], *]]( 101 | new (PropSchema[I, TapirSchema, *] ~> Const[List[SProductField[I]], *]) { 102 | override def apply[A](ps: PropSchema[I, TapirSchema, A]): Const[List[SProductField[I]], A] = 103 | ps match { 104 | case req: Required[I, TapirSchema, i] => 105 | req.default.cata( 106 | (dv: i) => { 107 | val optionalField = SProductField( 108 | FieldName(req.fieldName), 109 | req.base.default(dv, None), 110 | (req.extract.getOption _).andThen(_.orElse(Option(dv))) 111 | ) 112 | Const.of(optionalField :: Nil) 113 | }, { 114 | val requiredField = SProductField( 115 | FieldName(req.fieldName), 116 | req.base, 117 | req.extract.getOption 118 | ) 119 | Const.of(requiredField :: Nil) 120 | } 121 | ) 122 | case opt: Optional[I, TapirSchema, i] => 123 | val optionalField = SProductField( 124 | FieldName(opt.fieldName), 125 | opt.base.copy(isOptional = true), 126 | opt.extract.extract 127 | ) 128 | Const.of(optionalField :: Nil) 129 | case const: Constant[I, TapirSchema, i] => 130 | Const.of(Nil) 131 | case abs: Absent[I, TapirSchema, i] => 132 | Const.of(Nil) 133 | } 134 | } 135 | ) 136 | .getConst 137 | 138 | TapirSchema(SProduct(fields)) 139 | } 140 | 141 | implicit def eitherKTSchema[P[_]: ToSchema, Q[_]: ToSchema]: ToSchema[EitherK[P, Q, *]] = 142 | new ToSchema[EitherK[P, Q, *]] { 143 | override val toSchema: EitherK[P, Q, *] ~> TapirSchema = 144 | new (EitherK[P, Q, *] ~> TapirSchema) { 145 | override def apply[A](fa: EitherK[P, Q, A]): TapirSchema[A] = fa.run.fold( 146 | ToSchema.apply[P].toSchema(_), 147 | ToSchema.apply[Q].toSchema(_), 148 | ) 149 | } 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /tapir/src/main/scala-3/morphling/tapir/ToSchema.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir 2 | 3 | import cats.* 4 | import cats.data.{Const, EitherK} 5 | import cats.free.* 6 | import morphling.* 7 | import morphling.Schema.* 8 | import morphling.annotated.Schema.AnnotatedSchema 9 | import morphling.given 10 | import mouse.option.* 11 | import sttp.tapir.SchemaType.{SCoproduct, SDiscriminator, SProduct, SProductField} 12 | import sttp.tapir.{FieldName, Schema as TapirSchema, SchemaType, Validator} 13 | 14 | trait ToSchema[S[_]] extends Serializable { 15 | def toSchema: S ~> TapirSchema 16 | 17 | extension [F[_], A](fa: F[A])(using TS: ToSchema[F]) def schema: TapirSchema[A] = TS.toSchema(fa) 18 | } 19 | 20 | object ToSchema { 21 | def apply[P[_]](using ts: ToSchema[P]): ToSchema[P] = ts 22 | 23 | given [P[_]: ToSchema]: ToSchema[Schema[P, _]] = 24 | new ToSchema[Schema[P, _]] { 25 | override val toSchema: Schema[P, _] ~> TapirSchema = new (Schema[P, _] ~> TapirSchema) { 26 | override def apply[I](schema: Schema[P, I]): TapirSchema[I] = 27 | HFix.cataNT[[Y[_], Z] =>> SchemaF[P, Y, Z], TapirSchema](schemaAlg).apply(schema) 28 | } 29 | } 30 | 31 | given [P[_]: ToSchema, A[_]: [Y[_]] =>> Y ~> ([T] =>> Endo[TapirSchema[T]])]: ToSchema[AnnotatedSchema[P, A, *]] = 32 | new ToSchema[AnnotatedSchema[P, A, _]] { 33 | override val toSchema: AnnotatedSchema[P, A, _] ~> TapirSchema = new (AnnotatedSchema[P, A, _] ~> TapirSchema) { 34 | override def apply[I](schema: AnnotatedSchema[P, A, I]): TapirSchema[I] = 35 | HFix 36 | .cataNT[[Y1[_], Z1] =>> HEnvT[A, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], TapirSchema](annSchemaAlg[P, A]) 37 | .apply(schema) 38 | } 39 | } 40 | 41 | def schemaAlg[P[_]: ToSchema]: HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], TapirSchema] = 42 | new HAlgebra[[Y[_], Z] =>> SchemaF[P, Y, Z], TapirSchema] { 43 | def apply[I](schema: SchemaF[P, TapirSchema, I]): TapirSchema[I] = schema match { 44 | case s: PrimSchema[P, TapirSchema, I] => ToSchema[P].toSchema(s.prim) 45 | 46 | case s: OneOfSchema[P, TapirSchema, I] => 47 | s.discriminator.cata( 48 | dField => { 49 | val discriminator = SDiscriminator(FieldName(dField, dField), Map.empty) 50 | 51 | TapirSchema( 52 | SCoproduct( 53 | s.alts.map { case Alt(id, schema, subs) => 54 | val discriminatorField = SProductField[I, String]( 55 | FieldName(dField), 56 | TapirSchema.schemaForString, 57 | (t: I) => subs.getOption(t).map(_ => id) 58 | ) 59 | 60 | schema match { 61 | case ProductSchema(product) => 62 | schema.copy( 63 | schemaType = product 64 | .copy(fields = (discriminatorField :: product.fields).asInstanceOf[List[SProductField[Any]]]) 65 | .asInstanceOf[SchemaType[Any]], 66 | validator = schema.validator.asInstanceOf[Validator[Any]] 67 | ) 68 | case _ => schema 69 | } 70 | }.toList, 71 | Some(discriminator) 72 | )(_ => None) 73 | ) 74 | }, 75 | TapirSchema( 76 | SCoproduct( 77 | s.alts.map { case Alt(id, schema, subs) => 78 | TapirSchema(SProduct(SProductField(FieldName(id), schema, subs.getOption) :: Nil)) 79 | }.toList, 80 | None 81 | )(_ => None) 82 | ) 83 | ) 84 | 85 | case s: RecordSchema[P, TapirSchema, I] => recordSchema[P, I](s.props) 86 | case s: IsoSchema[P, TapirSchema, i0, I] => s.base.as[I] 87 | } 88 | } 89 | 90 | def annSchemaAlg[P[_]: ToSchema, Ann[_]](implicit 91 | interpret: Ann ~> ([T] =>> Endo[TapirSchema[T]]) 92 | ): HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], TapirSchema] = 93 | new HAlgebra[[Y1[_], Z1] =>> HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], Y1, Z1], TapirSchema] { 94 | override def apply[I](s: HEnvT[Ann, [Y[_], Z] =>> SchemaF[P, Y, Z], TapirSchema, I]): TapirSchema[I] = 95 | interpret(s.ask).apply(schemaAlg[P].apply(s.fa)) 96 | } 97 | 98 | def recordSchema[P[_]: ToSchema, I](rb: FreeApplicative[PropSchema[I, TapirSchema, *], I]): TapirSchema[I] = { 99 | val fields = rb 100 | .foldMap[Const[List[SProductField[I]], *]]( 101 | new (PropSchema[I, TapirSchema, *] ~> Const[List[SProductField[I]], *]) { 102 | override def apply[A](ps: PropSchema[I, TapirSchema, A]): Const[List[SProductField[I]], A] = 103 | ps match { 104 | case req: Required[I, TapirSchema, i] => 105 | req.default.cata( 106 | (dv: i) => { 107 | val optionalField = SProductField( 108 | FieldName(req.fieldName), 109 | req.base.default(dv, None), 110 | req.extract.getOption.andThen(_.orElse(Option(dv))) 111 | ) 112 | Const.of(optionalField :: Nil) 113 | }, { 114 | val requiredField = SProductField( 115 | FieldName(req.fieldName), 116 | req.base, 117 | req.extract.getOption 118 | ) 119 | Const.of(requiredField :: Nil) 120 | } 121 | ) 122 | case opt: Optional[I, TapirSchema, i] => 123 | val optionalField = SProductField( 124 | FieldName(opt.fieldName), 125 | opt.base.copy(isOptional = true), 126 | opt.extract.extract 127 | ) 128 | Const.of(optionalField :: Nil) 129 | case const: Constant[I, TapirSchema, i] => 130 | Const.of(Nil) 131 | case abs: Absent[I, TapirSchema, i] => 132 | Const.of(Nil) 133 | } 134 | } 135 | ) 136 | .getConst 137 | 138 | TapirSchema(SProduct(fields)) 139 | } 140 | 141 | given [P[_]: ToSchema, Q[_]: ToSchema]: ToSchema[EitherK[P, Q, _]] = 142 | new ToSchema[EitherK[P, Q, _]] { 143 | override val toSchema = new (EitherK[P, Q, _] ~> TapirSchema) { 144 | def apply[A](p: EitherK[P, Q, A]): TapirSchema[A] = 145 | p.run.fold(ToSchema[P].toSchema(_), ToSchema[Q].toSchema(_)) 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /tapir/src/main/scala/morphling/tapir/ProductSchema.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir 2 | 3 | import sttp.tapir.Schema 4 | import sttp.tapir.SchemaType.SProduct 5 | 6 | object ProductSchema { 7 | def unapply[T](schema: Schema[T]): Option[SProduct[T]] = 8 | schema match { 9 | case Schema(sp @ SProduct(_), _, _, _, _, _, _, _, _, _, _) => Some(sp) 10 | case _ => None 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tapir/src/test/scala/morphling/tapir/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir 2 | 3 | import cats.free.Trampoline 4 | import cats.instances.vector.* 5 | import cats.syntax.traverse.* 6 | import cats.~> 7 | import io.circe.Json 8 | import morphling.protocol.SType.SSchema 9 | import sttp.tapir.Schema 10 | 11 | object Implicits extends SchemaPack { 12 | implicit val primToSchema: ToSchema[SSchema] = new ToSchema[SSchema] { self => 13 | val toSchema: SSchema ~> Schema = new (SSchema ~> Schema) { 14 | def apply[A](s: SSchema[A]): Schema[A] = schemaGen[SSchema[A]#Inner].apply(s.unmutu) 15 | } 16 | } 17 | 18 | implicit class JsonOps(private val json: Json) extends AnyVal { 19 | def dropNulls: Trampoline[Json] = 20 | json.arrayOrObject( 21 | Trampoline.done(json), 22 | arr => arr.traverse(j => Trampoline.defer(j.dropNulls)).map(Json.fromValues), 23 | _.filter { case (_, j) => !j.isNull } 24 | .traverse(obj => Trampoline.defer(obj.dropNulls)) 25 | .map(Json.fromJsonObject) 26 | ) 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /tapir/src/test/scala/morphling/tapir/SchemaPack.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir 2 | 3 | import cats.~> 4 | import morphling.protocol.* 5 | import sttp.tapir.Schema 6 | 7 | trait SchemaPack { 8 | def schemaGen[F[_]: ToSchema]: (SType[F, *] ~> Schema) = 9 | new (SType[F, *] ~> Schema) { 10 | 11 | import ToSchema.* 12 | 13 | override def apply[I](st: SType[F, I]): Schema[I] = st match { 14 | case SNullT() => Schema.schemaForUnit 15 | case SBoolT() => Schema.schemaForBoolean 16 | case SIntT() => Schema.schemaForInt 17 | case SLongT() => Schema.schemaForLong 18 | case SFloatT() => Schema.schemaForFloat 19 | case SDoubleT() => Schema.schemaForDouble 20 | case SCharT() => Schema.schemaForString.as[Char] 21 | case SStrT() => Schema.schemaForString 22 | case arr: SArrayT[F, i] => 23 | Schema.schemaForIterable[i, Vector](arr.elem.schema).copy(isOptional = false) 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /tapir/src/test/scala/morphling/tapir/TapirSchemaSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir 2 | 3 | import com.stephenn.scalatest.circe.JsonMatchers 4 | import io.circe.Json 5 | import io.circe.syntax.* 6 | import morphling.samples.Person 7 | import morphling.tapir.Implicits.* 8 | import morphling.tapir.ToSchema.* 9 | import org.scalatest.funsuite.AnyFunSuite 10 | import org.scalatest.matchers.should.Matchers 11 | import sttp.apispec.openapi.circe.* 12 | import sttp.tapir.docs.apispec.schema.SchemaRenderer 13 | 14 | class TapirSchemaSpec extends AnyFunSuite with Matchers with JsonMatchers { 15 | test("Typeable should be generated") { 16 | val personTypeableJson = SchemaRenderer.convert(Person.schema.schema).asJson.dropNulls.run 17 | 18 | personTypeableJson should matchJsonString( 19 | Json 20 | .obj( 21 | "required" := "name" :: "birthDate" :: "roles" :: Nil, 22 | "type" := "object", 23 | "properties" := Json.obj( 24 | "name" := Json.obj("type" := "string"), 25 | "birthDate" := Json.obj("format" := "int64", "type" := "integer"), 26 | "roles" := Json.obj( 27 | "type" := "array", 28 | "items" := Json.obj( 29 | "oneOf" := Json.arr( 30 | Json.obj( 31 | "required" := "administrator" :: Nil, 32 | "type" := "object", 33 | "properties" := Json.obj( 34 | "administrator" := Json.obj( 35 | "type" := "object", 36 | "required" := "department" :: "subordinateCount" :: Nil, 37 | "properties" := Json.obj( 38 | "department" := Json.obj("type" := "string"), 39 | "subordinateCount" := Json.obj("format" := "int32", "type" := "integer") 40 | ) 41 | ) 42 | ) 43 | ), 44 | Json.obj( 45 | "required" := "user" :: Nil, 46 | "type" := "object", 47 | "properties" := Json.obj( 48 | "user" := Json.obj( 49 | "type" := "object" 50 | ) 51 | ) 52 | ), 53 | ) 54 | ) 55 | ), 56 | "updateCounter" := Json.obj("format" := "int32", "type" := "integer", "default" := 0) 57 | ) 58 | ) 59 | .spaces2 60 | ) 61 | } 62 | 63 | test("Flat typeable should be generated") { 64 | val personTypeableJson = SchemaRenderer.convert(Person.flatSchema.schema).asJson.dropNulls.run 65 | 66 | personTypeableJson should matchJsonString( 67 | Json 68 | .obj( 69 | "required" := "name" :: "birthDate" :: "roles" :: Nil, 70 | "type" := "object", 71 | "properties" := Json.obj( 72 | "name" := Json.obj( 73 | "type" := "string" 74 | ), 75 | "birthDate" := Json.obj( 76 | "type" := "integer", 77 | "format" := "int64" 78 | ), 79 | "roles" := Json.obj( 80 | "type" := "array", 81 | "items" := Json.obj( 82 | "oneOf" := Json.obj( 83 | "required" := "type" :: Nil, 84 | "type" := "object", 85 | "properties" := Json.obj( 86 | "type" := Json.obj( 87 | "type" := "string" 88 | ) 89 | ) 90 | ) :: Json.obj( 91 | "required" := "type" :: "department" :: "subordinateCount" :: Nil, 92 | "type" := "object", 93 | "properties" := Json.obj( 94 | "type" := Json.obj("type" := "string"), 95 | "department" := Json.obj("type" := "string"), 96 | "subordinateCount" := Json.obj("type" := "integer", "format" := "int32") 97 | ) 98 | ) :: Nil, 99 | "discriminator" := Json.obj( 100 | "propertyName" := "type" 101 | ) 102 | ) 103 | ), 104 | "updateCounter" := Json.obj( 105 | "type" := "integer", 106 | "format" := "int32", 107 | "default" := 0 108 | ) 109 | ) 110 | ) 111 | .spaces2 112 | ) 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /tapir/src/test/scala/morphling/tapir/annotated/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir.annotated 2 | 3 | import cats.{Endo, ~>} 4 | import morphling.protocol.annotated.STypeAnn.ASchema 5 | import morphling.protocol.annotated.{Non, Range, Restriction} 6 | import morphling.tapir.{SchemaPack, ToSchema} 7 | import sttp.tapir.{Schema, Validator} 8 | 9 | object Implicits extends SchemaPack { 10 | implicit val schemaRestriction: (Restriction ~> λ[T => Endo[Schema[T]]]) = 11 | new (Restriction ~> λ[T => Endo[Schema[T]]]) { 12 | override def apply[A](rs: Restriction[A]): Endo[Schema[A]] = rs match { 13 | case Non() => identity 14 | case Range(from, to) => 15 | (sch: Schema[Int]) => sch.validate(Validator.min(from).and(Validator.max(to))) 16 | } 17 | } 18 | 19 | implicit val primToSchema: ToSchema[ASchema] = new ToSchema[ASchema] { self => 20 | val toSchema: ASchema ~> Schema = new (ASchema ~> Schema) { 21 | def apply[A](s: ASchema[A]): Schema[A] = schemaGen[ASchema[A]#Inner].apply(s.unmutu) 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /tapir/src/test/scala/morphling/tapir/annotated/TapirSchemaAnnotatedSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.tapir.annotated 2 | 3 | import com.stephenn.scalatest.circe.JsonMatchers 4 | import io.circe.Json 5 | import io.circe.syntax.* 6 | import morphling.samples.annotated.Server 7 | import morphling.tapir.Implicits.JsonOps 8 | import morphling.tapir.ToSchema.* 9 | import morphling.tapir.annotated.Implicits.* 10 | import org.scalatest.funsuite.AnyFunSuite 11 | import org.scalatest.matchers.should.Matchers 12 | import sttp.apispec.openapi.circe.* 13 | import sttp.tapir.docs.apispec.schema.SchemaRenderer 14 | 15 | class TapirSchemaAnnotatedSpec extends AnyFunSuite with Matchers with JsonMatchers { 16 | test("Annotated typeable should contain restrictions") { 17 | val serverTypeableJson = SchemaRenderer.convert(Server.schema.schema).asJson.dropNulls.run 18 | 19 | serverTypeableJson should matchJsonString( 20 | Json 21 | .obj( 22 | "type" := "object", 23 | "required" := "host" :: "port" :: Nil, 24 | "properties" := Json.obj( 25 | "host" := Json.obj( 26 | "type" := "string" 27 | ), 28 | "port" := Json.obj( 29 | "format" := "int32", 30 | "maximum" := 65535, 31 | "minimum" := 1, 32 | "type" := "integer" 33 | ) 34 | ) 35 | ) 36 | .spaces2 37 | ) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /tapir/src/test/scala/sttp/tapir/docs/apispec/schema/SchemaRenderer.scala: -------------------------------------------------------------------------------- 1 | package sttp.tapir.docs.apispec.schema 2 | 3 | import sttp.apispec.{ReferenceOr, Schema as ASchema} 4 | import sttp.tapir.Schema 5 | 6 | object SchemaRenderer { 7 | private val nsr = new NameToSchemaReference(Map.empty) 8 | private val sToS = new TSchemaToASchema(nsr, true) 9 | 10 | def convert[T](schema: Schema[T]): ReferenceOr[ASchema] = sToS(schema) 11 | } 12 | -------------------------------------------------------------------------------- /typedschema/src/main/scala/morphling/tschema/ToTypeable.scala: -------------------------------------------------------------------------------- 1 | package morphling.tschema 2 | 3 | import cats.* 4 | import cats.data.Const.* 5 | import cats.data.{Const, EitherK} 6 | import cats.free.* 7 | import cats.syntax.option.* 8 | import morphling.* 9 | import morphling.Schema.Schema 10 | import morphling.annotated.Schema.AnnotatedSchema 11 | import mouse.option.* 12 | import ru.tinkoff.tschema.swagger.{SwaggerObject, SwaggerOneOf, SwaggerPrimitive, SwaggerProperty, SwaggerRef, SwaggerTypeable} 13 | import simulacrum_.typeclass 14 | 15 | @typeclass 16 | trait ToTypeable[S[_]] { 17 | def toTypeable: S ~> SwaggerTypeable 18 | } 19 | 20 | object ToTypeable { 21 | implicit class ToTypeableOps[S[_], A](s: S[A]) { 22 | def typeable(implicit TT: ToTypeable[S]): SwaggerTypeable[A] = TT.toTypeable(s) 23 | } 24 | 25 | implicit def schemaToTypeable[P[_]: ToTypeable]: ToTypeable[Schema[P, *]] = new ToTypeable[Schema[P, *]] { 26 | override val toTypeable: Schema[P, *] ~> SwaggerTypeable = new (Schema[P, *] ~> SwaggerTypeable) { 27 | override def apply[I](schema: Schema[P, I]): SwaggerTypeable[I] = 28 | HFix.cataNT[SchemaF[P, *[_], *], SwaggerTypeable](typAlg[P]).apply(schema) 29 | } 30 | } 31 | 32 | implicit def annSchemaToTypeable[P[_]: ToTypeable, A[_]: *[_] ~> λ[T => Endo[SwaggerTypeable[T]]]] 33 | : ToTypeable[AnnotatedSchema[P, A, *]] = 34 | new ToTypeable[AnnotatedSchema[P, A, *]] { 35 | override val toTypeable: AnnotatedSchema[P, A, *] ~> SwaggerTypeable = 36 | new (AnnotatedSchema[P, A, *] ~> SwaggerTypeable) { 37 | override def apply[I](schema: AnnotatedSchema[P, A, I]): SwaggerTypeable[I] = 38 | HFix.cataNT[HEnvT[A, SchemaF[P, *[_], *], *[_], *], SwaggerTypeable](annTypAlg).apply(schema) 39 | } 40 | } 41 | 42 | def typAlg[P[_]: ToTypeable]: HAlgebra[SchemaF[P, *[_], *], SwaggerTypeable] = 43 | new HAlgebra[SchemaF[P, *[_], *], SwaggerTypeable] { 44 | def apply[I](schema: SchemaF[P, SwaggerTypeable, I]): SwaggerTypeable[I] = schema match { 45 | case s: PrimSchema[P, SwaggerTypeable, I] => ToTypeable[P].toTypeable(s.prim) 46 | case s: OneOfSchema[P, SwaggerTypeable, I] => 47 | SwaggerTypeable.make( 48 | SwaggerOneOf( 49 | s.discriminator.cata( 50 | dField => { 51 | def discriminatorProp(id: String) = 52 | SwaggerProperty(dField, None, Eval.now(SwaggerPrimitive.string.mod(_.copy(pattern = id.some)))) 53 | 54 | s.alts 55 | .map { case Alt(id, b, p) => 56 | Option(id) -> Eval.now(b.typ match { 57 | case SwaggerObject(properties, required, discriminator) => 58 | SwaggerRef( 59 | id, 60 | None, 61 | Eval.now( 62 | SwaggerObject( 63 | properties :+ discriminatorProp(id), 64 | required.map(_ :+ dField), 65 | discriminator 66 | ) 67 | ) 68 | ) 69 | case other => other 70 | }) 71 | } 72 | .toList 73 | .toVector 74 | }, 75 | s.alts 76 | .map { case Alt(field, b, p) => 77 | Option.empty[String] -> Eval.now( 78 | SwaggerObject( 79 | Vector( 80 | SwaggerProperty(field, None, Eval.now(b.typ)) 81 | ), 82 | Eval.now(Vector(field)) 83 | ) 84 | ) 85 | } 86 | .toList 87 | .toVector 88 | ), 89 | s.discriminator 90 | ) 91 | ) 92 | 93 | case s: RecordSchema[P, SwaggerTypeable, I] => recordTypeable[P, I](s.props) 94 | case s: IsoSchema[P, SwaggerTypeable, i0, I] => s.base.as[I] 95 | } 96 | } 97 | 98 | def annTypAlg[P[_]: ToTypeable, Ann[_]](implicit 99 | interpret: Ann ~> λ[T => Endo[SwaggerTypeable[T]]] 100 | ): HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], SwaggerTypeable] = 101 | new HAlgebra[HEnvT[Ann, SchemaF[P, *[_], *], *[_], *], SwaggerTypeable] { 102 | override def apply[A](schema: HEnvT[Ann, SchemaF[P, *[_], *], SwaggerTypeable, A]): SwaggerTypeable[A] = 103 | interpret.apply(schema.ask).apply(typAlg[P].apply(schema.fa)) 104 | } 105 | 106 | def recordTypeable[P[_]: ToTypeable, I]( 107 | rb: FreeApplicative[PropSchema[I, SwaggerTypeable, *], I] 108 | ): SwaggerTypeable[I] = { 109 | implicit val som: Monoid[SwaggerObject] = new Monoid[SwaggerObject] { 110 | override val empty: SwaggerObject = SwaggerObject() 111 | 112 | override def combine(x: SwaggerObject, y: SwaggerObject): SwaggerObject = 113 | SwaggerObject( 114 | (x.properties ++ y.properties).groupBy(_.name).map(_._2.head).toVector, // .distinctBy(_.name) 115 | (for { 116 | xr <- x.required 117 | yr <- y.required 118 | } yield (xr ++ yr).distinct).memoize 119 | ) 120 | } 121 | 122 | SwaggerTypeable.make[I]( 123 | rb.foldMap[Const[SwaggerObject, *]]( 124 | new (PropSchema[I, SwaggerTypeable, *] ~> Const[SwaggerObject, *]) { 125 | def apply[B](ps: PropSchema[I, SwaggerTypeable, B]): Const[SwaggerObject, B] = 126 | ps match { 127 | case req: Required[I, SwaggerTypeable, i] => 128 | req.default.cata( 129 | _ => { 130 | val optionalField = SwaggerProperty(req.fieldName, None, Eval.now(req.base.typ)) 131 | Const.of(SwaggerObject(Vector(optionalField))) 132 | }, { 133 | val requiredField = SwaggerProperty(req.fieldName, None, Eval.now(req.base.typ)) 134 | Const.of(SwaggerObject(Vector(requiredField), Eval.now(Vector(req.fieldName)))) 135 | } 136 | ) 137 | 138 | case opt: Optional[I, SwaggerTypeable, i] => 139 | val optionalField = SwaggerProperty(opt.fieldName, None, Eval.now(opt.base.typ)) 140 | Const.of(SwaggerObject(Vector(optionalField))) 141 | case const: Constant[I, SwaggerTypeable, i] => 142 | Const.of(SwaggerObject.withProps()) 143 | case abs: Absent[I, SwaggerTypeable, i] => 144 | Const.of(SwaggerObject()) 145 | } 146 | } 147 | ).getConst 148 | ) 149 | } 150 | 151 | implicit def eitherKTypeable[P[_]: ToTypeable, Q[_]: ToTypeable]: ToTypeable[EitherK[P, Q, *]] = 152 | new ToTypeable[EitherK[P, Q, *]] { 153 | override val toTypeable: EitherK[P, Q, *] ~> SwaggerTypeable = 154 | new (EitherK[P, Q, *] ~> SwaggerTypeable) { 155 | override def apply[A](fa: EitherK[P, Q, A]): SwaggerTypeable[A] = fa.run.fold( 156 | ToTypeable[P].toTypeable(_), 157 | ToTypeable[Q].toTypeable(_), 158 | ) 159 | } 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /typedschema/src/test/scala/morphling/tschema/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.tschema 2 | 3 | import cats.free.Trampoline 4 | import cats.instances.vector.* 5 | import cats.syntax.traverse.* 6 | import cats.~> 7 | import io.circe.Json 8 | import morphling.protocol.SType.SSchema 9 | import ru.tinkoff.tschema.swagger.SwaggerTypeable 10 | 11 | object Implicits extends TypeablePack { 12 | implicit val primToTypeable: ToTypeable[SSchema] = new ToTypeable[SSchema] { self => 13 | val toTypeable: SSchema ~> SwaggerTypeable = new (SSchema ~> SwaggerTypeable) { 14 | def apply[A](s: SSchema[A]): SwaggerTypeable[A] = sTypeGen[SSchema[A]#Inner].apply(s.unmutu) 15 | } 16 | } 17 | 18 | implicit class JsonOps(private val json: Json) extends AnyVal { 19 | def dropNulls: Trampoline[Json] = 20 | json.arrayOrObject( 21 | Trampoline.done(json), 22 | arr => arr.traverse(j => Trampoline.defer(j.dropNulls)).map(Json.fromValues), 23 | _.filter { case (_, j) => !j.isNull } 24 | .traverse(obj => Trampoline.defer(obj.dropNulls)) 25 | .map(Json.fromJsonObject) 26 | ) 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /typedschema/src/test/scala/morphling/tschema/TSchemaSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.tschema 2 | 3 | import cats.Eq 4 | import cats.instances.function.* 5 | import com.stephenn.scalatest.circe.JsonMatchers 6 | import io.circe.Json 7 | import io.circe.syntax.* 8 | import morphling.samples.Person 9 | import morphling.tschema.Implicits.* 10 | import morphling.tschema.ToTypeable.* 11 | import org.scalactic.Equality 12 | import org.scalatest.funsuite.AnyFunSuite 13 | import org.scalatest.matchers.should.Matchers 14 | 15 | import scala.reflect.ClassTag 16 | 17 | class TSchemaSpec extends AnyFunSuite with Matchers with JsonMatchers { 18 | implicit def eqEquality[T: Eq: ClassTag]: Equality[T] = 19 | (a: T, b: Any) => 20 | b match { 21 | case bt: T => Eq.eqv(a, bt) 22 | case _ => false 23 | } 24 | 25 | test("Typeable should be generated") { 26 | val personTypeableJson = Person.schema.typeable.typ.asJson.dropNulls.run 27 | 28 | personTypeableJson should matchJsonString( 29 | Json 30 | .obj( 31 | "type" := "object", 32 | "required" := "name" :: "birthDate" :: "roles" :: Nil, 33 | "properties" := Json.obj( 34 | "roles" := Json.obj( 35 | "type" := "array", 36 | "items" := Json.obj( 37 | "type" := "object", 38 | "oneOf" := Json.arr( 39 | Json.obj( 40 | "type" := "object", 41 | "required" := "user" :: Nil, 42 | "properties" := Json.obj( 43 | "user" := Json.obj( 44 | "type" := "object", 45 | "properties" := Json.obj() 46 | ) 47 | ) 48 | ), 49 | Json.obj( 50 | "type" := "object", 51 | "required" := "administrator" :: Nil, 52 | "properties" := Json.obj( 53 | "administrator" := Json.obj( 54 | "type" := "object", 55 | "required" := "department" :: "subordinateCount" :: Nil, 56 | "properties" := Json.obj( 57 | "department" := Json.obj("type" := "string"), 58 | "subordinateCount" := Json.obj("format" := "int32", "type" := "integer") 59 | ) 60 | ) 61 | ) 62 | ) 63 | ) 64 | ) 65 | ), 66 | "name" := Json.obj("type" := "string"), 67 | "birthDate" := Json.obj("format" := "int64", "type" := "integer"), 68 | "updateCounter" := Json.obj("format" := "int32", "type" := "integer") 69 | ) 70 | ) 71 | .spaces2 72 | ) 73 | } 74 | 75 | test("Flat typeable should be generated") { 76 | val personTypeableJson = Person.flatSchema.typeable.typ.asJson.dropNulls.run 77 | 78 | personTypeableJson should matchJsonString( 79 | Json 80 | .obj( 81 | "type" := "object", 82 | "required" := "name" :: "birthDate" :: "roles" :: Nil, 83 | "properties" := Json.obj( 84 | "roles" := Json.obj( 85 | "type" := "array", 86 | "items" := Json.obj( 87 | "type" := "object", 88 | "oneOf" := Json.arr( 89 | Json.obj( 90 | "$ref" := "#/components/schemas/user" 91 | ), 92 | Json.obj( 93 | "$ref" := "#/components/schemas/administrator" 94 | ) 95 | ), 96 | "discriminator" := Json.obj( 97 | "propertyName" := "type", 98 | "mapping" := Json.obj( 99 | "user" := "#/components/schemas/user", 100 | "administrator" := "#/components/schemas/administrator" 101 | ) 102 | ) 103 | ) 104 | ), 105 | "name" := Json.obj("type" := "string"), 106 | "birthDate" := Json.obj("format" := "int64", "type" := "integer"), 107 | "updateCounter" := Json.obj("format" := "int32", "type" := "integer") 108 | ) 109 | ) 110 | .spaces2 111 | ) 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /typedschema/src/test/scala/morphling/tschema/TypeablePack.scala: -------------------------------------------------------------------------------- 1 | package morphling.tschema 2 | 3 | import cats.~> 4 | import morphling.protocol.* 5 | import ru.tinkoff.tschema.swagger.SwaggerTypeable 6 | 7 | trait TypeablePack { 8 | def sTypeGen[F[_]: ToTypeable]: (SType[F, *] ~> SwaggerTypeable) = 9 | new (SType[F, *] ~> SwaggerTypeable) { 10 | import ToTypeable.* 11 | 12 | override def apply[I](st: SType[F, I]): SwaggerTypeable[I] = st match { 13 | case SNullT() => SwaggerTypeable.swaggerTypeableUnit 14 | case SBoolT() => SwaggerTypeable.swaggerTypeableBoolean 15 | case SIntT() => SwaggerTypeable.swaggerTypeableInteger 16 | case SLongT() => SwaggerTypeable.swaggerTypeableLong 17 | case SFloatT() => SwaggerTypeable.swaggerTypeableFloat 18 | case SDoubleT() => SwaggerTypeable.swaggerTypeableDouble 19 | case SCharT() => SwaggerTypeable.swaggerTypeableString.as[Char] 20 | case SStrT() => SwaggerTypeable.swaggerTypeableString 21 | case arr: SArrayT[F, i] => 22 | SwaggerTypeable.swaggerVectorTypeable(arr.elem.typeable) 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /typedschema/src/test/scala/morphling/tschema/annotated/Implicits.scala: -------------------------------------------------------------------------------- 1 | package morphling.tschema.annotated 2 | 3 | import cats.{Endo, ~>} 4 | import morphling.protocol.annotated.STypeAnn.ASchema 5 | import morphling.protocol.annotated.{Non, Range, Restriction} 6 | import morphling.tschema.{ToTypeable, TypeablePack} 7 | import ru.tinkoff.tschema.swagger.{SwaggerPrimitive, SwaggerTypeable} 8 | 9 | object Implicits extends TypeablePack { 10 | implicit val typeableRestriction: (Restriction ~> λ[T => Endo[SwaggerTypeable[T]]]) = 11 | new (Restriction ~> λ[T => Endo[SwaggerTypeable[T]]]) { 12 | override def apply[A](rs: Restriction[A]): Endo[SwaggerTypeable[A]] = rs match { 13 | case Non() => identity 14 | case Range(from, to) => 15 | (typ: SwaggerTypeable[Int]) => 16 | typ.updateTyp { 17 | case SwaggerPrimitive.integer => 18 | SwaggerPrimitive.integer.mod(_.copy(minimum = Some(from), maximum = Some(to))) 19 | case other => other 20 | } 21 | } 22 | } 23 | 24 | implicit val primToTypeable: ToTypeable[ASchema] = new ToTypeable[ASchema] { self => 25 | val toTypeable: ASchema ~> SwaggerTypeable = new (ASchema ~> SwaggerTypeable) { 26 | def apply[A](s: ASchema[A]): SwaggerTypeable[A] = sTypeGen[ASchema[A]#Inner].apply(s.unmutu) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /typedschema/src/test/scala/morphling/tschema/annotated/TSchemaAnnotatedSpec.scala: -------------------------------------------------------------------------------- 1 | package morphling.tschema.annotated 2 | 3 | import cats.Eq 4 | import cats.instances.function.* 5 | import com.stephenn.scalatest.circe.JsonMatchers 6 | import io.circe.Json 7 | import io.circe.syntax.* 8 | import morphling.samples.annotated.Server 9 | import morphling.tschema.Implicits.JsonOps 10 | import morphling.tschema.ToTypeable.* 11 | import morphling.tschema.annotated.Implicits.* 12 | import org.scalactic.Equality 13 | import org.scalatest.funsuite.AnyFunSuite 14 | import org.scalatest.matchers.should.Matchers 15 | 16 | import scala.reflect.ClassTag 17 | 18 | class TSchemaAnnotatedSpec extends AnyFunSuite with Matchers with JsonMatchers { 19 | implicit def eqEquality[T: Eq: ClassTag]: Equality[T] = 20 | (a: T, b: Any) => 21 | b match { 22 | case bt: T => Eq.eqv(a, bt) 23 | case _ => false 24 | } 25 | 26 | test("Annotated typeable should contain restrictions") { 27 | val serverTypeable = Server.schema.typeable 28 | val serverTypeableJson = serverTypeable.typ.asJson.dropNulls.run 29 | 30 | serverTypeableJson should matchJsonString( 31 | Json 32 | .obj( 33 | "type" := "object", 34 | "required" := "host" :: "port" :: Nil, 35 | "properties" := Json.obj( 36 | "host" := Json.obj( 37 | "type" := "string" 38 | ), 39 | "port" := Json.obj( 40 | "format" := "int32", 41 | "maximum" := 65535, 42 | "minimum" := 1, 43 | "type" := "integer" 44 | ) 45 | ) 46 | ) 47 | .spaces2 48 | ) 49 | } 50 | } 51 | --------------------------------------------------------------------------------