├── project ├── build.properties └── plugins.sbt ├── .gitignore ├── modules ├── scodec │ └── src │ │ ├── main │ │ └── scala │ │ │ └── xenomorph │ │ │ └── scodec │ │ │ ├── package.scala │ │ │ ├── LowPriorityScalazInstances.scala │ │ │ └── ToScodec.scala │ │ └── test │ │ └── scala │ │ └── xenomorph │ │ └── scodec │ │ ├── ScodecSpec.scala │ │ └── Implicits.scala ├── core │ └── src │ │ ├── main │ │ └── scala │ │ │ └── xenomorph │ │ │ ├── package.scala │ │ │ ├── HFunctor.scala │ │ │ ├── Schema.scala │ │ │ └── SchemaF.scala │ │ └── test │ │ └── scala │ │ └── xenomorph │ │ ├── Samples.scala │ │ └── json │ │ └── JType.scala ├── scalacheck │ └── src │ │ ├── test │ │ └── scala │ │ │ └── xenomorph │ │ │ └── scalacheck │ │ │ └── Implicits.scala │ │ └── main │ │ └── scala │ │ └── xenomorph │ │ └── scalacheck │ │ └── ToGen.scala └── argonaut │ └── src │ ├── test │ └── scala │ │ └── xenomorph.argonaut │ │ ├── ArgonautSpec.scala │ │ └── Implicits.scala │ └── main │ └── scala │ └── xenomorph │ └── argonaut │ ├── ToJson.scala │ └── FromJson.scala ├── README.md └── LICENSE /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.16 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # sbt specific 2 | target/ 3 | 4 | # editors specific 5 | .scala_dependencies 6 | .worksheet 7 | .idea 8 | -------------------------------------------------------------------------------- /modules/scodec/src/main/scala/xenomorph/scodec/package.scala: -------------------------------------------------------------------------------- 1 | package xenomorph 2 | 3 | package object scodec extends LowPriorityScalazInstances { 4 | 5 | } 6 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.21") 2 | addSbtPlugin("org.portable-scala" % "sbt-crossproject" % "0.3.0") 3 | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "0.3.0") 4 | addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.3.6") 5 | -------------------------------------------------------------------------------- /modules/core/src/main/scala/xenomorph/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | 16 | /** A library for creating values which describe the structure of 17 | * sums-of-products data structures. 18 | */ 19 | package object xenomorph { 20 | } 21 | -------------------------------------------------------------------------------- /modules/scodec/src/main/scala/xenomorph/scodec/LowPriorityScalazInstances.scala: -------------------------------------------------------------------------------- 1 | package xenomorph.scodec 2 | 3 | import scodec.{Attempt, Decoder} 4 | 5 | import scalaz.{Applicative, Monad, Traverse} 6 | 7 | trait LowPriorityScalazInstances { 8 | 9 | implicit val AttemptInstance: Monad[Attempt] with Traverse[Attempt] = new Monad[Attempt] with Traverse[Attempt]{ 10 | def point[A](a: => A) = Attempt.successful(a) 11 | def bind[A, B](fa: Attempt[A])(f: A => Attempt[B]) = fa.flatMap(f) 12 | 13 | def traverseImpl[G[_], A, B](fa: Attempt[A])(f: A => G[B])(implicit G: Applicative[G]): G[Attempt[B]] = 14 | fa match { 15 | case Attempt.Successful(value) => G.map(f(value))(Attempt.Successful(_)) 16 | case failure@Attempt.Failure(_) => G.point(failure) 17 | } 18 | } 19 | 20 | implicit val DecoderInstance : Monad[Decoder] = new Monad[Decoder]{ 21 | def point[A](a: => A) = Decoder.point(a) 22 | def bind[A, B](fa: Decoder[A])(f: A => Decoder[B]) = fa.flatMap(f) 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /modules/scalacheck/src/test/scala/xenomorph/scalacheck/Implicits.scala: -------------------------------------------------------------------------------- 1 | package xenomorph.scalacheck 2 | 3 | import xenomorph.Schema.Schema 4 | import xenomorph.json.JType.JSchema 5 | import xenomorph.json._ 6 | 7 | import scalaz.~> 8 | 9 | object Implicits { 10 | 11 | implicit val toGen: ToGen[JSchema] = new ToGen[JSchema] { self => 12 | import org.scalacheck.Arbitrary._ 13 | import org.scalacheck.Gen 14 | import org.scalacheck.Gen._ 15 | def toGen = new (JSchema ~> Gen) { 16 | def apply[A](s: JSchema[A]): Gen[A] = s.unmutu match { 17 | case JNullT() => arbitrary[Unit] 18 | case JBoolT() => arbitrary[Boolean] 19 | case JByteT() => arbitrary[Byte] 20 | case JShortT() => arbitrary[Short] 21 | case JIntT() => arbitrary[Int] 22 | case JLongT() => Gen.chooseNum(Long.MinValue + 808L, Long.MaxValue) // Magic number to circumvent Instant#toEpochMillis throwing exceptions 23 | case JFloatT() => arbitrary[Float] 24 | case JDoubleT() => arbitrary[Double] 25 | case JCharT() => arbitrary[Char] 26 | case JStrT() => arbitrary[String] 27 | case arr: JArrayT[Schema[JSchema, ?], i] => 28 | val baseDecoder: Gen[i] = ToGen.schemaToGen[JSchema](self).toGen(arr.elem) 29 | containerOf[Vector, i](baseDecoder) 30 | } 31 | } 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /modules/scodec/src/test/scala/xenomorph/scodec/ScodecSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph.scodec 16 | 17 | import _root_.scodec.Attempt 18 | import _root_.scodec.bits.BitVector 19 | import org.scalacheck.Arbitrary 20 | import org.scalatest.FunSuite 21 | import org.scalatest.prop.Checkers 22 | import xenomorph.Schema.Schema 23 | import xenomorph.json.JType.JSchema 24 | import xenomorph.samples.Person 25 | 26 | class ScodecSpec extends FunSuite with Checkers { 27 | 28 | import xenomorph.scalacheck.ToGen._ 29 | import xenomorph.scalacheck.Implicits._ 30 | import xenomorph.scodec.ToEncoder._ 31 | import xenomorph.scodec.ToDecoder._ 32 | import xenomorph.scodec.Implicits._ 33 | 34 | test("Serialization of values to binary should round-trip values produced by a generator"){ 35 | 36 | val schema: Schema[JSchema, Person] = Person.schema 37 | implicit val arbPerson : Arbitrary[Person] = Arbitrary(schema.toGen) 38 | 39 | check( 40 | (p: Person) => { 41 | val res = for { 42 | enc <- schema.toEncoder.encode(p) 43 | dec <- schema.toDecoder.decode(enc) 44 | } yield dec 45 | 46 | (res.map(_.value) == Attempt.successful(p)) && 47 | (res.map(_.remainder) == Attempt.successful(BitVector.empty)) 48 | } 49 | ) 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /modules/argonaut/src/test/scala/xenomorph.argonaut/ArgonautSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph.argonaut 16 | 17 | import org.scalacheck._ 18 | import org.scalatest.FunSuite 19 | import org.scalatest.prop.Checkers 20 | import xenomorph.samples._ 21 | 22 | class ArgonautSpec extends FunSuite with Checkers { 23 | 24 | import xenomorph.scalacheck.ToGen._ 25 | import xenomorph.scalacheck.Implicits._ 26 | import xenomorph.argonaut.Implicits._ 27 | import xenomorph.argonaut.ToJson._ 28 | import xenomorph.argonaut.FromJson._ 29 | 30 | test("A value should serialise to JSON") { 31 | val result = Person.schema.toJson(person) 32 | assert(result.toString == """{"roles":[{"administrator":{"subordinateCount":0,"department":"windmill-tilting"}}],"birthDate":20147028000,"name":"Kris Nuttycombe"}""") 33 | } 34 | 35 | test("A value should be deserialised from JSON"){ 36 | val result = Person.schema.toJson(person) 37 | assert(Person.schema.fromJson(result).toOption == Some(person)) 38 | } 39 | 40 | 41 | test("Serialization should round-trip values produced by a generator"){ 42 | implicit val arbPerson : Arbitrary[Person] = Arbitrary(Person.schema.toGen) 43 | check{ 44 | (p: Person) => 45 | Person.schema.fromJson(Person.schema.toJson(p)).toOption == Some(p) 46 | } 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /modules/core/src/test/scala/xenomorph/Samples.scala: -------------------------------------------------------------------------------- 1 | package xenomorph 2 | 3 | import java.time.Instant 4 | 5 | import monocle.Iso 6 | import monocle.macros._ 7 | import shapeless.HNil 8 | import xenomorph.Schema._ 9 | import xenomorph.json.JType._ 10 | 11 | import scalaz.syntax.apply._ 12 | 13 | package samples { 14 | 15 | @Lenses case class Person( 16 | name: String, 17 | birthDate: Instant, 18 | roles: Vector[Role] 19 | ) 20 | 21 | object Person { 22 | 23 | val schema: Schema[JSchema, Person] = rec( 24 | ^^( 25 | required("name", jStr, Person.name.asGetter), 26 | required( 27 | "birthDate", jLong.composeIso(Iso(Instant.ofEpochMilli(_:Long))((_ : Instant).toEpochMilli)), 28 | Person.birthDate.asGetter 29 | ), 30 | required("roles", jArray(Role.schema), Person.roles.asGetter) 31 | )(Person.apply) 32 | ) 33 | } 34 | 35 | sealed trait Role 36 | 37 | object Role { 38 | val schema: Schema[JSchema, Role] = Schema.oneOf( 39 | alt[JSchema, Role, User.type]( 40 | "user", 41 | Schema.const(User), 42 | User.prism 43 | ) :: 44 | alt[JSchema, Role, Administrator]( 45 | "administrator", 46 | rec( 47 | ^( 48 | required("department", jStr, Administrator.department.asGetter), 49 | required("subordinateCount", jInt, Administrator.subordinateCount.asGetter) 50 | )(Administrator.apply) 51 | ), 52 | Administrator.prism 53 | ) :: HNil 54 | ) 55 | } 56 | 57 | case object User extends Role { 58 | val prism = GenPrism[Role, User.type] 59 | } 60 | 61 | @Lenses case class Administrator(department: String, subordinateCount: Int) extends Role 62 | object Administrator { 63 | val prism = GenPrism[Role, Administrator] 64 | } 65 | } 66 | 67 | package object samples { 68 | val person = Person( 69 | "Kris Nuttycombe", 70 | Instant.ofEpochMilli(20147028000l), 71 | Vector(Administrator("windmill-tilting", 0)) 72 | ) 73 | } 74 | -------------------------------------------------------------------------------- /modules/core/src/test/scala/xenomorph/json/JType.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph.json 16 | 17 | import xenomorph.HMutu 18 | import xenomorph.Schema._ 19 | 20 | 21 | sealed trait JType[F[_], I] 22 | 23 | case class JNullT[F[_]]() extends JType[F, Unit] 24 | case class JBoolT[F[_]]() extends JType[F, Boolean] 25 | 26 | case class JByteT[F[_]]() extends JType[F, Byte] 27 | case class JShortT[F[_]]() extends JType[F, Short] 28 | case class JIntT[F[_]]() extends JType[F, Int] 29 | case class JLongT[F[_]]() extends JType[F, Long] 30 | 31 | case class JFloatT[F[_]]() extends JType[F, Float] 32 | case class JDoubleT[F[_]]() extends JType[F, Double] 33 | 34 | case class JCharT[F[_]]() extends JType[F, Char] 35 | case class JStrT[F[_]]() extends JType[F, String] 36 | 37 | case class JArrayT[F[_], I](elem: F[I]) extends JType[F, Vector[I]] 38 | 39 | object JType { 40 | type JSchema[I] = HMutu[JType, Schema, I] 41 | 42 | val jNull = prim(HMutu[JType, Schema, Unit](JNullT())) 43 | val jBool = prim(HMutu[JType, Schema, Boolean](JBoolT())) 44 | val jShort = prim(HMutu[JType, Schema, Short](JShortT())) 45 | val jInt = prim(HMutu[JType, Schema, Int](JIntT())) 46 | val jLong = prim(HMutu[JType, Schema, Long](JLongT())) 47 | val jFloat = prim(HMutu[JType, Schema, Float](JFloatT())) 48 | val jDouble = prim(HMutu[JType, Schema, Double](JDoubleT())) 49 | val jChar = prim(HMutu[JType, Schema, Char](JCharT())) 50 | val jStr = prim(HMutu[JType, Schema, String](JStrT())) 51 | 52 | def jArray[I](elem: Schema[JSchema, I]) = prim(HMutu[JType, Schema, Vector[I]](JArrayT(elem))) 53 | 54 | } 55 | 56 | -------------------------------------------------------------------------------- /modules/argonaut/src/test/scala/xenomorph.argonaut/Implicits.scala: -------------------------------------------------------------------------------- 1 | package xenomorph.argonaut 2 | 3 | import argonaut.DecodeJson._ 4 | import argonaut.{DecodeJson, Json} 5 | import xenomorph.Schema.Schema 6 | import xenomorph.json.JType.JSchema 7 | import xenomorph.json._ 8 | 9 | import scalaz.~> 10 | 11 | object Implicits { 12 | 13 | implicit val primToJson: ToJson[JSchema] = new ToJson[JSchema] { self => 14 | val serialize = new (JSchema ~> (? => Json)) { 15 | def apply[I](s: JSchema[I]): I => Json = s.unmutu match { 16 | case JNullT() => (_: I) => Json.jNull 17 | case JBoolT() => Json.jBool(_) 18 | case JByteT() => i => Json.jNumber(i.toInt) 19 | case JShortT() => i => Json.jNumber(i.toInt) 20 | case JIntT() => Json.jNumber(_) 21 | case JLongT() => Json.jNumber(_) 22 | case JFloatT() => f => Json.jNumberOrString(f.toDouble) 23 | case JDoubleT() => Json.jNumberOrString(_) 24 | case JCharT() => c => Json.jString(c.toString) 25 | case JStrT() => Json.jString(_) 26 | case JArrayT(elem) => 27 | xs => Json.jArray(xs.map(sToJ.serialize(elem)).toList) 28 | } 29 | } 30 | 31 | val sToJ: ToJson[Schema[JSchema, ?]] = ToJson.schemaToJson(self) 32 | } 33 | 34 | implicit val primFromJson: FromJson[JSchema] = new FromJson[JSchema] { self => 35 | val decoder = new (JSchema ~> DecodeJson) { 36 | def apply[I](s: JSchema[I]): DecodeJson[I] = s.unmutu match { 37 | case JNullT() => UnitDecodeJson 38 | case JBoolT() => BooleanDecodeJson 39 | case JByteT() => IntDecodeJson.map(_.toByte) 40 | case JShortT() => ShortDecodeJson 41 | case JIntT() => IntDecodeJson 42 | case JLongT() => LongDecodeJson 43 | case JFloatT() => FloatDecodeJson 44 | case JDoubleT() => DoubleDecodeJson 45 | case JCharT() => CharDecodeJson 46 | case JStrT() => StringDecodeJson 47 | case JArrayT(elem) => 48 | ListDecodeJson(sFromJ.decoder(elem)).map(_.toVector) 49 | } 50 | } 51 | 52 | val sFromJ: FromJson[Schema[JSchema, ?]] = FromJson.schemaFromJson(self) 53 | } 54 | 55 | } 56 | 57 | -------------------------------------------------------------------------------- /modules/scalacheck/src/main/scala/xenomorph/scalacheck/ToGen.scala: -------------------------------------------------------------------------------- 1 | package xenomorph.scalacheck 2 | 3 | import org.scalacheck.Gen 4 | 5 | import scalaz.~> 6 | import scalaz.Applicative 7 | import scalaz.FreeAp 8 | import scalaz.syntax.std.option._ 9 | 10 | import xenomorph._ 11 | import xenomorph.Schema._ 12 | import xenomorph.HFunctor._ 13 | 14 | trait ToGen[S[_]] { 15 | def toGen: S ~> Gen 16 | } 17 | 18 | object ToGen { 19 | implicit class ToGenOps[S[_], A](s: S[A]) { 20 | def toGen(implicit TG: ToGen[S]): Gen[A] = TG.toGen(s) 21 | } 22 | 23 | implicit def schemaToGen[P[_]: ToGen]: ToGen[Schema[P, ?]] = new ToGen[Schema[P, ?]] { 24 | def toGen = new (Schema[P, ?] ~> Gen) { 25 | override def apply[I](schema: Schema[P, I]) = { 26 | HFix.cataNT[SchemaF[P, ?[_], ?], Gen](genAlg).apply(schema) 27 | } 28 | } 29 | } 30 | 31 | def genAlg[P[_]: ToGen]: HAlgebra[SchemaF[P, ?[_], ?], Gen] = 32 | new HAlgebra[SchemaF[P, ?[_], ?], Gen] { 33 | def apply[I](schema: SchemaF[P, Gen, I]): Gen[I] = schema match { 34 | case s: PrimSchema[P, Gen, I] => implicitly[ToGen[P]].toGen(s.prim) 35 | case s: OneOfSchema[P, Gen, I] => 36 | val altGens = s.alts.map({ case Alt(_, b, p) => b.map(p.reverseGet) }) 37 | altGens.tail.headOption.cata( 38 | th => Gen.oneOf(altGens.head, th, altGens.tail.toList.tail: _*), 39 | altGens.head 40 | ) 41 | 42 | case s: RecordSchema[P, Gen, I] => recordGen[P,I](s.props) 43 | case s: IsoSchema[P, Gen, i0, I] => s.base.map(s.iso.get(_)) 44 | } 45 | } 46 | 47 | def recordGen[P[_]: ToGen, I](rb: FreeAp[PropSchema[I, Gen, ?], I]): Gen[I] = { 48 | implicit val djap: Applicative[Gen] = new Applicative[Gen] { 49 | def point[T](a: => T) = Gen.const(a) 50 | def ap[T, U](fa: => Gen[T])(ff: => Gen[T => U]): Gen[U] = { 51 | fa.flatMap(a => ff.map(_(a))) 52 | } 53 | } 54 | 55 | rb.foldMap( 56 | new (PropSchema[I, Gen, ?] ~> Gen) { 57 | def apply[B](ps: PropSchema[I, Gen, B]): Gen[B] = ps match { 58 | case Required(_, base, _, _) => base 59 | case opt: Optional[I, Gen, i] => Gen.option(opt.base) 60 | } 61 | } 62 | ) 63 | } 64 | } 65 | 66 | -------------------------------------------------------------------------------- /modules/scodec/src/test/scala/xenomorph/scodec/Implicits.scala: -------------------------------------------------------------------------------- 1 | package xenomorph.scodec 2 | 3 | import scodec.bits.BitVector 4 | import scodec.codecs.implicits._ 5 | import scodec.{Attempt, Codec, Decoder, Encoder, _} 6 | import xenomorph.Schema.Schema 7 | import xenomorph.json.JType.JSchema 8 | import xenomorph.json._ 9 | 10 | import scalaz.~> 11 | 12 | object Implicits { 13 | 14 | implicit val toEncoder: ToEncoder[JSchema] = new ToEncoder[JSchema] { self => 15 | override val toEncoder = new (JSchema ~> Encoder) { 16 | def apply[A](s: JSchema[A]): Encoder[A] = s.unmutu match { 17 | case JNullT() => Encoder(_ => Attempt.successful(BitVector.empty)) 18 | case JBoolT() => implicitly[Encoder[Boolean]] 19 | case JByteT() => implicitly[Encoder[Byte]] 20 | case JShortT() => implicitly[Encoder[Short]] 21 | case JIntT() => implicitly[Encoder[Int]] 22 | case JLongT() => implicitly[Encoder[Long]] 23 | case JFloatT() => implicitly[Encoder[Float]] 24 | case JDoubleT() => implicitly[Encoder[Double]] 25 | case JCharT() => implicitly[Encoder[Byte]].xmap((_: Byte).toChar, (_: Char).toByte) 26 | case JStrT() => implicitly[Encoder[String]] 27 | case arr: JArrayT[Schema[JSchema, ?], i] => 28 | val baseEncoder: Encoder[i] = ToEncoder.schemaToEncoder[JSchema](self).toEncoder(arr.elem) 29 | implicit val codec = Codec[i](baseEncoder, null) 30 | implicitly[Encoder[Vector[i]]] 31 | } 32 | } 33 | } 34 | 35 | implicit val toDecoder: ToDecoder[JSchema] = new ToDecoder[JSchema] { self => 36 | override val toDecoder = new (JSchema ~> Decoder) { 37 | def apply[A](s: JSchema[A]): Decoder[A] = s.unmutu match { 38 | case JNullT() => Decoder.point(()) 39 | case JBoolT() => implicitly[Decoder[Boolean]] 40 | case JByteT() => implicitly[Decoder[Byte]] 41 | case JShortT() => implicitly[Decoder[Short]] 42 | case JIntT() => implicitly[Decoder[Int]] 43 | case JLongT() => implicitly[Decoder[Long]] 44 | case JFloatT() => implicitly[Decoder[Float]] 45 | case JDoubleT() => implicitly[Decoder[Double]] 46 | case JCharT() => implicitly[Decoder[Byte]].xmap((_: Byte).toChar, (_: Char).toByte) 47 | case JStrT() => implicitly[Decoder[String]] 48 | case arr: JArrayT[Schema[JSchema, ?], i] => 49 | val baseDecoder: Decoder[i] = ToDecoder.schemaToDecoder[JSchema](self).toDecoder(arr.elem) 50 | implicit val codec = Codec[i](null, baseDecoder) 51 | implicitly[Codec[Vector[i]]].asDecoder 52 | } 53 | } 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /modules/core/src/main/scala/xenomorph/HFunctor.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph 16 | 17 | import scalaz.~> 18 | import scalaz.Functor 19 | import scalaz.Name 20 | import scalaz.Need 21 | 22 | trait HFunctor[F[_[_], _]] { 23 | def hfmap[M[_], N[_]](nt: M ~> N): F[M, ?] ~> F[N, ?] 24 | } 25 | 26 | object HFunctor { 27 | def apply[F[_[_], _]](implicit v: HFunctor[F]) = v 28 | 29 | final implicit class HFunctorOps[F[_[_], _], M[_], A](val fa: F[M, A])(implicit F: HFunctor[F]) { 30 | def hfmap[N[_]](nt: M ~> N): F[N, A] = F.hfmap(nt)(fa) 31 | } 32 | 33 | type HAlgebra[F[_[_], _], G[_]] = F[G, ?] ~> G 34 | } 35 | 36 | /** Fixpoint data type that can preserve a type index through 37 | * its recursive step. 38 | */ 39 | final case class HFix[F[_[_], _], I](unfix: Name[F[HFix[F, ?], I]]) 40 | 41 | object HFix { 42 | import HFunctor._ 43 | 44 | def hfix[F[_[_], _], I](fa: => F[HFix[F, ?], I]): HFix[F, I] = 45 | HFix[F, I](Need(fa)) 46 | 47 | def cataNT[F[_[_], _]: HFunctor, G[_]](alg: HAlgebra[F, G]): (HFix[F, ?] ~> G) = 48 | new (HFix[F, ?] ~> G) { self => 49 | def apply[I](f: HFix[F, I]): G[I] = { 50 | alg.apply[I](f.unfix.value.hfmap[G](self)) 51 | } 52 | } 53 | 54 | type HCofree[F[_[_], _], A, I] = HFix[HEnvT[A, F, ?[_], ?], I] 55 | 56 | /** Smart constructor for HCofree values. */ 57 | def hcofree[F[_[_], _], A, I](ask: A, fga: => F[HCofree[F, A, ?], I]): HCofree[F, A, I] = 58 | hfix[HEnvT[A, F, ?[_], ?], I](HEnvT(ask, fga)) 59 | 60 | /** 61 | * Algebra to discard the annotations from an HCofree structure. 62 | */ 63 | def forget[F[_[_], _], A] = new HAlgebra[HEnvT[A, F, ?[_], ?], HFix[F, ?]] { 64 | def apply[I](env: HEnvT[A, F, HFix[F, ?], I]) = hfix(env.fa) 65 | } 66 | 67 | /** Functor over the annotation type of an HCofree value */ 68 | implicit def functor[F[_[_], _], I](implicit HF: HFunctor[F]): Functor[HCofree[F, ?, I]] = 69 | new Functor[HCofree[F, ?, I]] { 70 | def map[A, B](fa: HCofree[F, A, I])(f: A => B): HCofree[F, B, I] = { 71 | val step = fa.unfix.value 72 | val hf = new (HCofree[F, A, ?] ~> HCofree[F, B, ?]) { 73 | def apply[I0](gcf: HCofree[F, A, I0]) = functor(HF).map(gcf)(f) 74 | } 75 | 76 | hcofree( 77 | f(step.ask), 78 | HF.hfmap[HCofree[F, A, ?], HCofree[F, B, ?]](hf).apply(step.fa) 79 | ) 80 | } 81 | } 82 | } 83 | 84 | final case class HMutu[F[_[_], _], G[_[_], _], I](unmutu: F[G[HMutu[F, G, ?], ?], I]) 85 | 86 | final case class HEnvT[E, F[_[_], _], G[_], I](ask: E, fa: F[G, I]) 87 | 88 | object HEnvT { 89 | import HFunctor._ 90 | 91 | implicit def hfunctor[E, F[_[_], _]: HFunctor]: HFunctor[HEnvT[E, F, ?[_], ?]] = 92 | new HFunctor[HEnvT[E, F, ?[_], ?]] { 93 | def hfmap[M[_], N[_]](nt: M ~> N) = new (HEnvT[E, F, M, ?] ~> HEnvT[E, F, N, ?]) { 94 | def apply[I](fm: HEnvT[E, F, M, I]) = HEnvT(fm.ask, fm.fa.hfmap[N](nt)) 95 | } 96 | } 97 | } 98 | 99 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Xenomorph 2 | ========= 3 | 4 | Xenomorph is a Scala library for building well-typed descriptions of other 5 | Scala data structures, from which one can then automatically derive 6 | serialization, parsing, and generation functions. Implementations are 7 | currently provided for producing [argonaut](http://argonaut.io) `DecodeJson` 8 | and `EncodeJson` instances, as well as [scalacheck](http://scalacheck.org) 9 | `Gen` values. Similar facilities for scodec, protobuf, and whatever else 10 | might be useful will be coming as time permits and interest demands. 11 | 12 | Getting Started 13 | --------------- 14 | 15 | Xenomorph is still awaiting an initial release, so for now you'll have 16 | to build it locally for yourself. 17 | 18 | Creating a Schema 19 | ----------------- 20 | 21 | Begin with the data type for which you wish to create a schema. Here's 22 | an example: 23 | 24 | ~~~scala 25 | import monocle.macros._ 26 | 27 | @Lenses case class Person( 28 | name: String, 29 | birthDate: Instant, 30 | roles: Vector[Role] 31 | ) 32 | 33 | sealed trait Role 34 | 35 | case object User extends Role { 36 | val prism = GenPrism[Role, User.type] 37 | } 38 | 39 | @Lenses case class Administrator(department: String, subordinateCount: Int) extends Role 40 | object Administrator { 41 | val prism = GenPrism[Role, Administrator] 42 | } 43 | ~~~ 44 | 45 | In this example, you can see: 46 | * A simple record type `Person` 47 | * A sum type `Role` with two constructors, `User` and `Administrator` 48 | 49 | To build a schema for the `Person` type, we will use the Scalaz applicative 50 | constructor at arity 3 (`^^`) as shown below: 51 | 52 | ~~~scala 53 | import scalaz.syntax.apply._ 54 | import xenomorph.Schema._ 55 | import xenomorph.json.JType._ 56 | 57 | val personSchema: Schema[JSchema, Person] = rec( 58 | ^^( 59 | required("name", jStr, Person.name.asGetter), 60 | required( 61 | "birthDate", jLong.composeIso(Iso(new Instant(_:Long))((_:Instant).getMillis)), 62 | Person.birthDate.asGetter 63 | ), 64 | required("roles", jArray(roleSchema), Person.roles.asGetter) 65 | )(Person.apply _) 66 | ) 67 | ~~~ 68 | 69 | The schema for the `Role` sum type is created as a nonempty list of 70 | alternatives, each of which provides a prism from the sum type to the 71 | underlying data type associated with each constructor. In the case of the 72 | `User` case object, the underlying schema is that of the empty object, which is 73 | isomorphic to `Unit`. `()` is the empty tuple, so we treat the empty record as 74 | the `Unit` schema constructor. 75 | 76 | ~~~scala 77 | val roleSchema: Schema[JSchema, Role] = Schema.oneOf( 78 | alt[JSchema, Role, User.type]( 79 | "user", 80 | Schema.const(User), 81 | User.prism 82 | ) :: 83 | alt[JSchema, Role, Administrator]( 84 | "administrator", 85 | rec( 86 | ^( 87 | required("department", jStr, Administrator.department.asGetter), 88 | required("subordinateCount", jInt, Administrator.subordinateCount.asGetter) 89 | )(Administrator.apply _) 90 | ), 91 | Administrator.prism 92 | ) :: shapeless.HNil 93 | ) 94 | ~~~ 95 | 96 | This schema is constructed using the `JType` GADT to define the set of recognized 97 | primitive types. However, the set of primitive types is a user-definable feature at 98 | the time of schema construction. 99 | 100 | Once you have a Schema value, you can use it to produce parsers, serializers, and 101 | generators. 102 | 103 | ~~~scala 104 | import argonaut._ 105 | import xenomorph.json.ToJson._ 106 | import xenomorph.json.FromJson._ 107 | import xenomorph.scalacheck.ToGen._ 108 | 109 | val personJson: Json = personSchema.toJson(person) 110 | 111 | val parsedPerson: Option[Person] = personSchema.fromJson(personJson).toOption 112 | 113 | val personGen: Gen[Person] = personSchema.toGen 114 | ~~~ 115 | 116 | Contributors 117 | ------------ 118 | Kris Nuttycombe (\@nuttycom) 119 | Antonio Alonso Dominguez 120 | Doug Clinton 121 | -------------------------------------------------------------------------------- /modules/argonaut/src/main/scala/xenomorph/argonaut/ToJson.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph.argonaut 16 | 17 | import argonaut._ 18 | import argonaut.Json._ 19 | 20 | import scalaz.~> 21 | import scalaz.Coproduct 22 | import scalaz.State 23 | import scalaz.State._ 24 | import scalaz.FreeAp 25 | import scalaz.syntax.foldable._ 26 | import scalaz.syntax.std.option._ 27 | 28 | import xenomorph._ 29 | import xenomorph.Schema._ 30 | import xenomorph.HFunctor._ 31 | 32 | trait ToJson[S[_]] { 33 | def serialize: S ~> (? => Json) 34 | } 35 | 36 | object ToJson { 37 | implicit class ToJsonOps[F[_], A](fa: F[A]) { 38 | def toJson(a: A)(implicit TJ: ToJson[F]): Json = TJ.serialize(fa)(a) 39 | } 40 | 41 | implicit def schemaToJson[P[_]: ToJson]: ToJson[Schema[P, ?]] = new ToJson[Schema[P, ?]] { 42 | def serialize = new (Schema[P, ?] ~> (? => Json)) { 43 | override def apply[I](schema: Schema[P, I]) = { 44 | HFix.cataNT[SchemaF[P, ?[_], ?], ? => Json](serializeAlg).apply(schema) 45 | } 46 | } 47 | } 48 | 49 | def serializeAlg[P[_]: ToJson]: HAlgebra[SchemaF[P, ?[_], ?], ? => Json] = 50 | new HAlgebra[SchemaF[P, ?[_], ?], ? => Json] { 51 | def apply[I](schema: SchemaF[P, ? => Json, I]): I => Json = { 52 | schema match { 53 | case s: PrimSchema[P, ? => Json, I] => 54 | implicitly[ToJson[P]].serialize(s.prim) 55 | 56 | case s: OneOfSchema[P, ? => Json, I] => 57 | (value: I) => { 58 | val results = s.alts.toList flatMap { 59 | case alt: Alt[? => Json, I, i] => { 60 | alt.prism.getOption(value).map(alt.base).toList map { json => 61 | jObject(JsonObject.single(alt.id, json)) 62 | } 63 | } 64 | } 65 | 66 | results.head //yeah, I know 67 | } 68 | 69 | case s: RecordSchema[P, ? => Json, I] => 70 | serializeObjF[P, I](s.props) 71 | 72 | case s: IsoSchema[P, ? => Json, i0, I] => 73 | s.base.compose(s.iso.reverseGet(_)) 74 | } 75 | } 76 | } 77 | 78 | def serializeObjF[P[_]: ToJson, I](rb: FreeAp[PropSchema[I, ? => Json, ?], I]): I => Json = { 79 | (value: I) => jObject( 80 | rb.foldMap[State[JsonObject, ?]]( 81 | new (PropSchema[I, ? => Json, ?] ~> State[JsonObject, ?]) { 82 | def apply[B](ps: PropSchema[I, ? => Json, B]): State[JsonObject, B] = { 83 | for { 84 | _ <- modify { (obj: JsonObject) => 85 | ps match { 86 | case req: Required[I, ? => Json, i] => //(field, base, getter, _) => 87 | obj + (req.fieldName, req.base(req.getter.get(value))) 88 | 89 | case opt: Optional[I, ? => Json, i] => 90 | opt.getter.get(value).cata(v => obj + (opt.fieldName, opt.base(v)), obj) 91 | } 92 | } 93 | } yield ps.getter.get(value) 94 | } 95 | } 96 | ).exec(JsonObject.empty) 97 | ) 98 | } 99 | 100 | implicit def coproductToJson[P[_]: ToJson, Q[_]: ToJson] = new ToJson[Coproduct[P, Q, ?]] { 101 | val serialize = new (Coproduct[P, Q, ?] ~> (? => Json)) { 102 | def apply[A](p: Coproduct[P, Q, A]): A => Json = { 103 | p.run.fold( 104 | implicitly[ToJson[P]].serialize(_), 105 | implicitly[ToJson[Q]].serialize(_) 106 | ) 107 | } 108 | } 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /modules/argonaut/src/main/scala/xenomorph/argonaut/FromJson.scala: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | * Copyright (C) 2017 Kris Nuttycombe 4 | * All rights reserved. 5 | * 6 | * This file is part of the Scala Xenomorph library. 7 | * 8 | * GNU Lesser General Public License Usage 9 | * This file may be used under the terms of the GNU Lesser 10 | * General Public License version 3.0 as published by the Free Software 11 | * Foundation and appearing in the file LICENSE included in the 12 | * packaging of this file. Please review the following information to 13 | * ensure the GNU Lesser General Public License version 3.0 requirements 14 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 15 | */ 16 | package xenomorph.argonaut 17 | 18 | import argonaut._ 19 | import argonaut.DecodeJson._ 20 | 21 | import scalaz.~> 22 | import scalaz.Coproduct 23 | import scalaz.Applicative 24 | import scalaz.FreeAp 25 | import scalaz.syntax.foldable._ 26 | import scalaz.syntax.std.boolean._ 27 | 28 | import xenomorph._ 29 | import xenomorph.Schema._ 30 | import xenomorph.HFunctor._ 31 | 32 | trait FromJson[S[_]] { 33 | def decoder: S ~> DecodeJson 34 | } 35 | 36 | object FromJson { 37 | implicit class FromJsonOps[F[_], A](fa: F[A]) { 38 | def fromJson(a: Json)(implicit FJ: FromJson[F]): DecodeResult[A] = { 39 | FJ.decoder(fa).decodeJson(a) 40 | } 41 | } 42 | 43 | implicit def schemaFromJson[P[_]: FromJson]: FromJson[Schema[P, ?]] = new FromJson[Schema[P, ?]] { 44 | def decoder = new (Schema[P, ?] ~> DecodeJson) { 45 | override def apply[I](schema: Schema[P, I]) = { 46 | HFix.cataNT[SchemaF[P, ?[_], ?], DecodeJson](decoderAlg[P]).apply(schema) 47 | } 48 | } 49 | } 50 | 51 | def decoderAlg[P[_]: FromJson]: HAlgebra[SchemaF[P, ?[_], ?], DecodeJson] = 52 | new HAlgebra[SchemaF[P, ?[_], ?], DecodeJson] { 53 | def apply[I](s: SchemaF[P, DecodeJson, I]): DecodeJson[I] = s match { 54 | case PrimSchema(p) => 55 | implicitly[FromJson[P]].decoder(p) 56 | 57 | case OneOfSchema(alts) => 58 | DecodeJson { (c: HCursor) => 59 | val results = for { 60 | fields <- c.fields.toList 61 | altResult <- alts.toList flatMap { 62 | case Alt(id, base, prism) => 63 | fields.contains(id).option( 64 | c.downField(id).as(base).map(prism.reverseGet) 65 | ).toList 66 | } 67 | } yield altResult 68 | 69 | val altIds = alts.map(_.id) 70 | results match { 71 | case x :: Nil => x 72 | case Nil => DecodeResult.fail(s"No fields found matching any of $altIds", c.history) 73 | case _ => DecodeResult.fail(s"More than one matching field found among $altIds}", c.history) 74 | } 75 | } 76 | 77 | case RecordSchema(rb) => 78 | decodeObj(rb) 79 | 80 | case IsoSchema(base, iso) => 81 | base.map(iso.get) 82 | } 83 | } 84 | 85 | def decodeObj[I](rb: FreeAp[PropSchema[I, DecodeJson, ?], I]): DecodeJson[I] = { 86 | implicit val djap: Applicative[DecodeJson] = new Applicative[DecodeJson] { 87 | def point[T](a: => T) = DecodeJson(_ => DecodeResult.ok(a)) 88 | def ap[T, U](fa: => DecodeJson[T])(ff: => DecodeJson[T => U]): DecodeJson[U] = { 89 | fa.flatMap(a => ff.map(_(a))) 90 | } 91 | } 92 | 93 | rb.foldMap( 94 | new (PropSchema[I, DecodeJson, ?] ~> DecodeJson) { 95 | def apply[B](ps: PropSchema[I, DecodeJson, B]): DecodeJson[B] = ps match { 96 | case Required(field, base, _, _) => 97 | DecodeJson(_.downField(field).as(base)) 98 | 99 | case opt: Optional[I, DecodeJson, i] => 100 | DecodeJson(_.downField(opt.fieldName).as(OptionDecodeJson(opt.base))) 101 | } 102 | } 103 | ) 104 | } 105 | 106 | implicit def coproductFromJson[P[_]: FromJson, Q[_]: FromJson] = new FromJson[Coproduct[P, Q, ?]] { 107 | val decoder = new (Coproduct[P, Q, ?] ~> DecodeJson) { 108 | def apply[A](p: Coproduct[P, Q, A]): DecodeJson[A] = { 109 | p.run.fold( 110 | implicitly[FromJson[P]].decoder(_), 111 | implicitly[FromJson[Q]].decoder(_), 112 | ) 113 | } 114 | } 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /modules/scodec/src/main/scala/xenomorph/scodec/ToScodec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe, Antonio Alonso Dominguez, Doug Clinton 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph.scodec 16 | 17 | import scodec._ 18 | import scodec.bits._ 19 | import scodec.codecs.implicits._ 20 | 21 | import scalaz.~> 22 | import scalaz.StateT 23 | import scalaz.{IList, ICons, INil} 24 | import scalaz.FreeAp 25 | import scalaz.syntax.foldable._ 26 | import scalaz.syntax.monad._ 27 | 28 | import xenomorph._ 29 | import xenomorph.Schema._ 30 | import xenomorph.HFunctor._ 31 | 32 | trait ToEncoder[S[_]] { 33 | def toEncoder: S ~> Encoder 34 | } 35 | 36 | object ToEncoder { 37 | implicit class ToEncoderOps[S[_], A](s: S[A]) { 38 | def toEncoder(implicit TE: ToEncoder[S]): Encoder[A] = TE.toEncoder(s) 39 | } 40 | 41 | implicit def schemaToEncoder[P[_]: ToEncoder]: ToEncoder[Schema[P, ?]] = new ToEncoder[Schema[P, ?]] { 42 | override val toEncoder = new (Schema[P, ?] ~> Encoder) { 43 | override def apply[I](schema: Schema[P, I]) = { 44 | HFix.cataNT[SchemaF[P, ?[_], ?], Encoder](genAlg).apply(schema) 45 | } 46 | } 47 | } 48 | 49 | def genAlg[P[_]: ToEncoder]: HAlgebra[SchemaF[P, ?[_], ?], Encoder] = 50 | new HAlgebra[SchemaF[P, ?[_], ?], Encoder] { 51 | def apply[I](schema: SchemaF[P, Encoder, I]): Encoder[I] = schema match { 52 | case s: PrimSchema[P, Encoder, I] => implicitly[ToEncoder[P]].toEncoder(s.prim) 53 | case s: OneOfSchema[P, Encoder, I] => 54 | Encoder( 55 | (value: I) => { 56 | val results = s.alts.toList flatMap { 57 | case alt: Alt[Encoder, I, i] => { 58 | alt.prism.getOption(value).map( 59 | (baseValue: i) => for { 60 | idBits <- Encoder[String].encode(alt.id) 61 | valueBits <- alt.base.encode(baseValue) 62 | } yield idBits ++ valueBits 63 | ).toList 64 | } 65 | } 66 | 67 | results.head 68 | } 69 | ) 70 | case s: RecordSchema[P, Encoder, I] => recordEncoder[P, I](s.props) 71 | case s: IsoSchema[P, Encoder, i0, I] => Encoder((value: I) => s.base.encode(s.iso.reverseGet(value))) 72 | } 73 | } 74 | 75 | def recordEncoder[P[_]: ToEncoder, I](rb: FreeAp[PropSchema[I, Encoder, ?], I]): Encoder[I] = Encoder( 76 | (value: I) => rb.foldMap[StateT[Attempt, Vector[BitVector], ?]]( 77 | new (PropSchema[I, Encoder, ?] ~> StateT[Attempt, Vector[BitVector], ?]) { 78 | implicit val sms = StateT.stateTMonadState[Vector[BitVector], Attempt] 79 | 80 | def apply[B](ps: PropSchema[I, Encoder, B]): StateT[Attempt, Vector[BitVector], B] = { 81 | for { 82 | newBytes <- ps match { 83 | case req: Required[I, Encoder, i] => 84 | // skipping here the field name ?? 85 | req.base.encode(req.getter.get(value)).liftM[StateT[?[_], Vector[BitVector], ?]] 86 | case opt: Optional[I, Encoder, i] => 87 | implicit val baseCodec: Codec[i] = Codec(opt.base, null) 88 | implicitOptionCodec[i].encode(opt.getter.get(value)).liftM[StateT[?[_], Vector[BitVector], ?]] 89 | } 90 | _ <- sms.modify { (bytes: Vector[BitVector]) => bytes :+ newBytes } 91 | } yield ps.getter.get(value) 92 | } 93 | } 94 | ).exec(Vector.empty).map(BitVector.concat _) 95 | ) 96 | } 97 | 98 | trait ToDecoder[S[_]] { 99 | def toDecoder: S ~> Decoder 100 | } 101 | 102 | object ToDecoder { 103 | implicit class ToDecoderOps[S[_], A](s: S[A]) { 104 | def toDecoder(implicit TE: ToDecoder[S]): Decoder[A] = TE.toDecoder(s) 105 | } 106 | 107 | implicit def schemaToDecoder[P[_]: ToDecoder]: ToDecoder[Schema[P, ?]] = new ToDecoder[Schema[P, ?]] { 108 | override val toDecoder = new (Schema[P, ?] ~> Decoder) { 109 | override def apply[I](schema: Schema[P, I]) = { 110 | HFix.cataNT[SchemaF[P, ?[_], ?], Decoder](genAlg).apply(schema) 111 | } 112 | } 113 | } 114 | 115 | def genAlg[P[_]: ToDecoder]: HAlgebra[SchemaF[P, ?[_], ?], Decoder] = 116 | new HAlgebra[SchemaF[P, ?[_], ?], Decoder] { 117 | def apply[I](schema: SchemaF[P, Decoder, I]): Decoder[I] = schema match { 118 | case s: PrimSchema[P, Decoder, I] => implicitly[ToDecoder[P]].toDecoder(s.prim) 119 | case s: OneOfSchema[P, Decoder, I] => 120 | Decoder[String] flatMap { altId => 121 | val underlying: IList[Decoder[I]] = s.alts.list.flatMap{ 122 | case Alt(id, base, prism) => 123 | if (id == altId) IList.single(base.map(prism.reverseGet)) else IList.empty 124 | } 125 | 126 | val altIds = s.alts.map(_.id) 127 | underlying match { 128 | case ICons(x, INil()) => x 129 | case INil() => Decoder.liftAttempt(Attempt.failure(Err(s"No fields found matching any of ${altIds}"))) 130 | case _ => Decoder.liftAttempt(Attempt.failure(Err(s"More than one matching field found among ${altIds}"))) 131 | } 132 | } 133 | 134 | case s: RecordSchema[P, Decoder, I] => recordDecoder[P, I](s.props) 135 | case s: IsoSchema[P, Decoder, i0, I] => s.base.map(s.iso.get(_)) 136 | } 137 | } 138 | 139 | def recordDecoder[P[_]: ToDecoder, I](rb: FreeAp[PropSchema[I, Decoder, ?], I]): Decoder[I] = { 140 | rb.foldMap( 141 | new (PropSchema[I, Decoder, ?] ~> Decoder) { 142 | def apply[B](ps: PropSchema[I, Decoder, B]): Decoder[B] = ps match { 143 | case Required(_, base, _, _) => base 144 | case opt: Optional[I, Decoder, i] => 145 | implicit val baseCodec: Codec[i] = Codec(null, opt.base) 146 | implicitOptionCodec[i].asDecoder 147 | } 148 | } 149 | ) 150 | } 151 | } 152 | 153 | final class ToCodec[S[_]](val toCodec: S ~> Codec) { 154 | val toToEncoder: ToEncoder[S] = new ToEncoder[S] { 155 | override val toEncoder = new (S ~> Encoder) { 156 | def apply[I](s: S[I]): Encoder[I] = toCodec(s).asEncoder 157 | } 158 | } 159 | 160 | val toToDecoder: ToDecoder[S] = new ToDecoder[S] { 161 | override val toDecoder = new (S ~> Decoder) { 162 | def apply[I](s: S[I]): Decoder[I] = toCodec(s).asDecoder 163 | } 164 | } 165 | } 166 | 167 | object ToCodec { 168 | import ToEncoder._ 169 | import ToDecoder._ 170 | 171 | implicit class ToCodecOps[S[_], A](s: S[A]) { 172 | def toCodec(implicit TC: ToCodec[S]): Codec[A] = TC.toCodec(s) 173 | } 174 | 175 | implicit def schemaToCodec[P[_]: ToCodec]: ToCodec[Schema[P, ?]] = new ToCodec[Schema[P, ?]]( 176 | new (Schema[P, ?] ~> Codec) { 177 | override def apply[I](schema: Schema[P, I]) = Codec( 178 | schemaToEncoder(implicitly[ToCodec[P]].toToEncoder).toEncoder(schema), 179 | schemaToDecoder(implicitly[ToCodec[P]].toToDecoder).toDecoder(schema) 180 | ) 181 | } 182 | ) 183 | } 184 | 185 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /modules/core/src/main/scala/xenomorph/Schema.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph 16 | 17 | import annotation.implicitNotFound 18 | 19 | import scalaz.~> 20 | import scalaz.Applicative 21 | import scalaz.NonEmptyList 22 | import scalaz.Profunctor 23 | import scalaz.FreeAp 24 | import scalaz.syntax.std.list._ 25 | 26 | import monocle.Iso 27 | import monocle.Getter 28 | import monocle.Prism 29 | 30 | import shapeless._ 31 | import shapeless.ops.hlist.{Align, Comapped, ToTraversable} 32 | import shapeless.ops.coproduct.ToHList 33 | 34 | import xenomorph.HFix._ 35 | import xenomorph.HFunctor._ 36 | 37 | /** Data types and smart constructors which simplify the creation 38 | * of schema values. 39 | * 40 | * @define PDefn The GADT type constructor for a sum type which defines 41 | * the set of primitive types used in the schema. 42 | * @define IDefn The type of the Scala value to be produced (or consumed) 43 | * by an interpreter of the schema. Also known as the "index" type 44 | * of the schema. 45 | * @define ODefn The type of a Scala record - an object or a tuple, 46 | * the property of which is being defined. 47 | * @define ADefn The type of the annotation applied to each node of the schema 48 | */ 49 | object Schema { 50 | /** The type of an unannotated schema. 51 | * 52 | * This is an alias for the HFix fixpoint applied to the SchemaF type constructor. 53 | * 54 | * @tparam P $PDefn 55 | * @tparam I $IDefn 56 | */ 57 | type Schema[P[_], I] = HFix[SchemaF[P, ?[_], ?], I] 58 | 59 | /** The type of free applicative values which are used to capture the structure 60 | * of individual record properties. 61 | * 62 | * @tparam P $PDefn 63 | * @tparam O $ODefn 64 | * @tparam I $IDefn 65 | */ 66 | type Prop[P[_], O, I] = FreeAp[PropSchema[O, Schema[P, ?], ?], I] 67 | 68 | implicit def propApplicative[P[_], O]: Applicative[Prop[P, O, ?]] = 69 | FreeAp.freeInstance[PropSchema[O, Schema[P, ?], ?]] 70 | 71 | implicit def propProfunctor[P[_]]: Profunctor[Prop[P, ?, ?]] = new Profunctor[Prop[P, ?, ?]] { 72 | def mapfst[O, I, N](prop: Prop[P, O, I])(f: N => O): Prop[P, N, I] = prop.hoist[PropSchema[N, Schema[P, ?], ?]]( 73 | PropSchema.contraNT[O, N, Schema[P, ?]](f) 74 | ) 75 | 76 | def mapsnd[O, I, J](prop: Prop[P, O, I])(f: I => J): Prop[P, O, J] = prop.map(f) 77 | } 78 | 79 | /** The type of free applicative values which are used to capture the structure 80 | * of record (product) types. 81 | * 82 | * @tparam P $PDefn 83 | * @tparam R The type of the Scala value to be produced (or consumed) 84 | * by an interpreter of the schema. This is usually the type 85 | * of a record - an object or a tuple. 86 | */ 87 | type Props[P[_], R] = Prop[P, R, R] 88 | 89 | /** Lifts a SchemaF value into an unannotated Schema 90 | * 91 | * @tparam P $PDefn 92 | * @tparam I $IDefn 93 | * @param sf The value to be annotated 94 | * @return the newly constructed schema value 95 | */ 96 | def schema[P[_], I](sf: => SchemaF[P, Schema[P, ?], I]): Schema[P, I] = 97 | hfix[SchemaF[P, ?[_], ?], I](sf) 98 | 99 | /** Lifts a value in an algebra of primitives into an unannotated Schema 100 | * 101 | * @tparam P $PDefn 102 | * @tparam I $IDefn 103 | * @param p a value of the `P` algebra 104 | * @return the newly constructed schema value 105 | */ 106 | def prim[P[_], I](p: P[I]): Schema[P, I] = 107 | schema(PrimSchema[P, Schema[P, ?], I](p)) 108 | 109 | /** Builds an un-annotated schema for a record type from the free 110 | * applicative capture of that record's structure. 111 | * 112 | * @tparam P $PDefn 113 | * @tparam I $IDefn 114 | * @param props The free-applicative value that captures the structure 115 | * of the record type. 116 | */ 117 | def rec[P[_], I](props: Props[P, I]): Schema[P, I] = 118 | schema(RecordSchema[P, Schema[P, ?], I](props)) 119 | 120 | /** Smart constructor for required Prop instances. 121 | * 122 | * @tparam P $PDefn 123 | * @tparam O $ODefn 124 | * @tparam I $IDefn 125 | * @param fieldName name of the record property 126 | * @param valueSchema schema for the record property's type 127 | * @param getter Getter lens from the record type to the property's value 128 | */ 129 | def required[P[_], O, I](fieldName: String, valueSchema: Schema[P, I], getter: Getter[O, I]): Prop[P, O, I] = { 130 | FreeAp.lift[PropSchema[O, Schema[P, ?], ?], I]( 131 | Required[O, Schema[P, ?], I](fieldName, valueSchema, getter, None) 132 | ) 133 | } 134 | 135 | /** Smart constructor for required Prop instances, with a default 136 | * provided for the case where a serialized form is missing the 137 | * required field. 138 | * 139 | * @tparam P $PDefn 140 | * @tparam O $ODefn 141 | * @tparam I $IDefn 142 | * @param fieldName Name of the record property 143 | * @param valueSchema Schema for the record property's type 144 | * @param default Default value for use in the case that a serialized form 145 | * is missing the required field. 146 | * @param getter Getter lens from the record type to the property's value 147 | */ 148 | def property[P[_], O, I](fieldName: String, valueSchema: Schema[P, I], default: I, getter: Getter[O, I]): Prop[P, O, I] = { 149 | FreeAp.lift[PropSchema[O, Schema[P, ?], ?], I]( 150 | Required[O, Schema[P, ?], I](fieldName, valueSchema, getter, Some(default)) 151 | ) 152 | } 153 | 154 | /** Smart constructor for optional Prop instances. 155 | * @tparam P $PDefn 156 | * @tparam O $ODefn 157 | * @tparam I $IDefn 158 | * @param fieldName name of the record property 159 | * @param valueSchema schema for the record property's type 160 | * @param getter Getter lens from the record type to the property's value 161 | */ 162 | def optional[P[_], O, I](fieldName: String, valueSchema: Schema[P, I], getter: Getter[O, Option[I]]): Prop[P, O, Option[I]] = { 163 | FreeAp.lift[PropSchema[O, Schema[P, ?], ?], Option[I]]( 164 | Optional[O, Schema[P, ?], I](fieldName, valueSchema, getter) 165 | ) 166 | } 167 | 168 | /** The unannotated empty record schema, representing a constant value. 169 | * 170 | * @tparam P $PDefn 171 | */ 172 | def const[P[_], A](a: A): Schema[P, A] = 173 | rec[P, A](FreeAp.pure[PropSchema[A, Schema[P, ?], ?], A](a)) 174 | 175 | /** Builds an un-annotated schema for the sum type `I` from an HList of alternatives. 176 | * 177 | * Each alternative value in the list describes a single constructor of `I`. 178 | * For example, to construct the schema for [[scala.util.Either]] one would provide 179 | * two alternatives, one for the `Left` constructor and one for `Right`. 180 | * 181 | * An easier-to-read type signature for this function is below: 182 | * 183 | * {{{ 184 | * def oneOf[P[_], I](alts: NonEmptyList[Alt[Schema[P, ?], I, _]]): Schema[P, I] 185 | * }}} 186 | * 187 | * @tparam P $PDefn 188 | * @tparam I $IDefn 189 | */ 190 | def oneOf[P[_], I]: ToOneOf[P, I] = new ToOneOf[P, I] 191 | 192 | /** Builder class used to construct a OneOfSchema value from 193 | * an HList of alternatives which are proven to provide handling for 194 | * every constructor of the sum type `I`. 195 | */ 196 | final class ToOneOf[P[_], I] { 197 | def apply[H <: HList](ctrs: H)(implicit ev: Constructors[I, Alt[Schema[P, ?], I, ?], H]): Schema[P, I] = { 198 | schema(OneOfSchema[P, Schema[P, ?], I](ev.toNel(ctrs))) 199 | } 200 | } 201 | 202 | /** Builds an un-annotated schema for the sum type `I` from a list of alternatives. 203 | * 204 | * Each alternative value in the list describes a single constructor of `I`. 205 | * For example, to construct the schema for [[scala.util.Either]] one would provide 206 | * two alternatives, one for the `Left` constructor and one for `Right`. 207 | * 208 | * This convenience constructor is unsafe in that the compiler will not prove that 209 | * handling is present for every constructor of your sum type; however, it may sometimes 210 | * be needed in the case that failures of the Scala compiler to correctly identify 211 | * all the constructors of a sum type make it otherwise impossible to build a schema 212 | * value. 213 | * 214 | * @tparam P $PDefn 215 | * @tparam I $IDefn 216 | */ 217 | def unsafeOneOf[P[_], I](alts: NonEmptyList[Alt[Schema[P, ?], I, J] forSome {type J}]): Schema[P, I] = 218 | schema(OneOfSchema[P, Schema[P, ?], I](alts)) 219 | 220 | /** Convenience constructor for oneOf schema alternatives. 221 | * 222 | * @tparam P $PDefn 223 | * @tparam I $IDefn 224 | * @tparam J The type of the base value which can be mapped into the `I` algebra. 225 | * @param id The unique identifier of the constructor 226 | * @param base The schema for the `J` type 227 | * @param prism Prism between the sum type and the selected constructor. 228 | */ 229 | def alt[P[_], I, J](id: String, base: Schema[P, J], prism: Prism[I, J]) = 230 | Alt[Schema[P, ?], I, J](id, base, prism) 231 | 232 | /** HAlgebra for primitive type constructor transformation. 233 | */ 234 | def hfmapAlg[P[_], Q[_]](nt: P ~> Q) = new HAlgebra[SchemaF[P, ?[_], ?], Schema[Q, ?]] { 235 | def apply[I](s: SchemaF[P, Schema[Q, ?], I]): Schema[Q, I] = hfix(s.pmap(nt)) 236 | } 237 | 238 | /** Constructs the HFunctor instance for a Schema. 239 | * 240 | * An easier-to-read type signature for this function is below: 241 | * 242 | * {{{ 243 | * implicit def hfunctor: HFunctor[Schema] 244 | * }}} 245 | */ 246 | implicit def hfunctor: HFunctor[Schema] = new HFunctor[Schema] { 247 | def hfmap[P[_], Q[_]](nt: P ~> Q) = cataNT(hfmapAlg(nt)) 248 | } 249 | 250 | implicit class SchemaOps[P[_], I](base: Schema[P, I]) { 251 | def composeIso[J](iso: Iso[I, J]): Schema[P, J] = { 252 | schema(IsoSchema[P, Schema[P, ?], I, J](base, iso)) 253 | } 254 | } 255 | } 256 | 257 | /** Implicit proof type 258 | * 259 | */ 260 | @implicitNotFound(msg = "Cannot prove the completeness of your oneOf definition; you may have not provided an alternative for each constructor of your sum type ${I}") 261 | sealed trait Constructors[I, F[_], H <: HList] { 262 | def toNel(h: H): NonEmptyList[F[I0] forSome { type I0 }] 263 | } 264 | 265 | object Constructors { 266 | implicit def evidence[I, F[_], C <: Coproduct, H0 <: HList, H1 <: HList, H <: HList](implicit 267 | G: Generic.Aux[I, C], 268 | L: ToHList.Aux[C, H1], 269 | M: Comapped.Aux[H, F, H0], 270 | A: Align[H0, H1], 271 | T: ToTraversable.Aux[H, List, F[X] forSome { type X }]): Constructors[I, F, H] = new Constructors[I, F, H] { 272 | def toNel(h: H): NonEmptyList[F[I0] forSome { type I0 }] = { 273 | h.toList.toNel.get 274 | } 275 | } 276 | } 277 | 278 | -------------------------------------------------------------------------------- /modules/core/src/main/scala/xenomorph/SchemaF.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2017 Kris Nuttycombe 3 | * All rights reserved. 4 | * 5 | * This file is part of the Scala Xenomorph library. 6 | * 7 | * GNU Lesser General Public License Usage 8 | * This file may be used under the terms of the GNU Lesser 9 | * General Public License version 3.0 as published by the Free Software 10 | * Foundation and appearing in the file LICENSE included in the 11 | * packaging of this file. Please review the following information to 12 | * ensure the GNU Lesser General Public License version 3.0 requirements 13 | * will be met: https://www.gnu.org/licenses/lgpl-3.0.txt 14 | */ 15 | package xenomorph 16 | 17 | import scalaz.~> 18 | import scalaz.FreeAp 19 | import scalaz.NonEmptyList 20 | 21 | import monocle.Iso 22 | import monocle.Getter 23 | import monocle.Prism 24 | 25 | /** The base trait for the schema GADT. 26 | * 27 | * @define PDefn The GADT type constructor for a sum type which defines 28 | * the set of primitive types used in the schema. 29 | * @define IDefn The type of the Scala value to be produced (or consumed) 30 | * by an interpreter of the schema. Also known as the "index" type 31 | * of the schema. 32 | * @define FDefn The functor through which the structure of the schema will 33 | * be interpreted. This will almost always be a fixpoint type such as 34 | * [[xenomorph.HCofree]], which is used to introduce the ability to 35 | * create recursive (tree-structured) schema. 36 | * 37 | * @tparam P $PDefn 38 | * @tparam F $FDefn 39 | * @tparam I $IDefn 40 | */ 41 | sealed trait SchemaF[P[_], F[_], I] { 42 | /** HFunctor operation which allows transformation of the 43 | * functor through which the structure of the schema will 44 | * be interpreted. 45 | * 46 | * Defining this operation directly on the SchemaF type 47 | * rather than in [[xenomorph.SchemaF.hfunctor]] simplifies 48 | * type inference. 49 | */ 50 | def hfmap[G[_]](nt: F ~> G): SchemaF[P, G, I] 51 | 52 | /** HFunctor operation which allows transformation of the 53 | * primitive algebra of the schema. 54 | * 55 | * Defining this operation directly on the SchemaF type 56 | * rather than in [[xenomorph.SchemaF.hfunctor]] simplifies 57 | * type inference. 58 | */ 59 | def pmap[Q[_]](nt: P ~> Q): SchemaF[Q, F, I] 60 | } 61 | 62 | object SchemaF { 63 | implicit def hfunctor[P[_]]: HFunctor[SchemaF[P, ?[_], ?]] = new HFunctor[SchemaF[P, ?[_], ?]] { 64 | def hfmap[M[_], N[_]](nt: M ~> N) = new (SchemaF[P, M, ?] ~> SchemaF[P, N, ?]) { 65 | def apply[I](fa: SchemaF[P, M, I]): SchemaF[P, N, I] = fa.hfmap(nt) 66 | } 67 | } 68 | } 69 | 70 | /** Schema constructor that wraps a value of an underlying GADT 71 | * of allowed primitive types. 72 | * 73 | * The underlying GADT defines a set of types via GADT constructors; 74 | * see [[xenomorph.json.JType]] for an example. This set of types 75 | * defines what types may be treated as primitive (and have parsing/ 76 | * serialization/etc deferred to an external handler) when interpreting 77 | * a schema value. For example, one might want to construct a GADT for 78 | * for the Scala primitive types as such: 79 | * 80 | * {{{ 81 | * sealed trait SType[I] 82 | * 83 | * case object SNullT extends SType[Unit] 84 | * case object SBoolT extends SType[Boolean] 85 | * 86 | * case object SByteT extends SType[Byte] 87 | * case object SShortT extends SType[Short] 88 | * case object SIntT extends SType[Int] 89 | * case object SLongT extends SType[Long] 90 | * 91 | * case object SFloatT extends SType[Float] 92 | * case object SDoubleT extends SType[Double] 93 | * 94 | * case object SCharT extends SType[Char] 95 | * case object SStrT extends SType[String] 96 | * }}} 97 | * 98 | * This example treats String values as primitive as well, even though 99 | * strictly speaking they're reference types, just because virtually 100 | * any interpreter for a schema algebra will not want to represent 101 | * strings in terms of sum or product types. The same might hold true 102 | * for, for example, [[scala.Array]] but for the purposes of this example 103 | * issues related to `ClassManifest` instances would introduce excessive 104 | * complexity. 105 | * 106 | * @tparam P $PDefn 107 | * @tparam F $FDefn 108 | * @tparam I $IDefn 109 | * @param prim value identifying a primitive type. 110 | */ 111 | final case class PrimSchema[P[_], F[_], I](prim: P[I]) extends SchemaF[P, F, I] { 112 | def hfmap[G[_]](nt: F ~> G) = PrimSchema[P, G, I](prim) 113 | def pmap[Q[_]](nt: P ~> Q) = PrimSchema[Q, F, I](nt(prim)) 114 | } 115 | 116 | /** Constructor that enables creation of schema for sum types. 117 | * 118 | * Each constructor of the sum type `I` is represented as a member 119 | * of the list of alternatives. Each alternative defines a prism 120 | * between a single constructor of the sum type, and an underlying 121 | * type describing the arguments demanded by that constructor. 122 | * 123 | * Consider the following sum type. The first constructor takes 124 | * no arguments; the second takes two. 125 | * 126 | * {{{ 127 | * sealed trait Role 128 | * 129 | * case object User extends Role 130 | * case class Administrator(department: String, subordinateCount: Int) extends Role 131 | * }}} 132 | * 133 | * A schema value for this type looks like: 134 | * 135 | * {{{ 136 | * val roleSchema = oneOf( 137 | * alt[Unit, Prim, Role, Unit]( 138 | * "user", 139 | * Schema.empty, 140 | * (_: Unit) => User, 141 | * { 142 | * case User => Some(Unit) 143 | * case _ => None 144 | * } 145 | * ) :: 146 | * alt[Unit, Prim, Role, Administrator]( 147 | * "administrator", 148 | * rec[Prim, Administrator]( 149 | * ^[Schema.Prop[Unit, Prim, Administrator, ?], String, Int, Administrator]( 150 | * required("department", Prim.str, (_: Administrator).department), 151 | * required("subordinateCount", Prim.int, (_: Administrator).subordinateCount) 152 | * )(Administrator(_, _)) 153 | * ), 154 | * identity, 155 | * { 156 | * case a @ Administrator(_, _) => Some(a) 157 | * case _ => None 158 | * } 159 | * ) :: Nil 160 | * ) 161 | * }}} 162 | * 163 | * @tparam P $PDefn 164 | * @tparam F $FDefn 165 | * @tparam I $IDefn 166 | */ 167 | final case class OneOfSchema[P[_], F[_], I](alts: NonEmptyList[Alt[F, I, I0] forSome { type I0 }]) extends SchemaF[P, F, I] { 168 | def hfmap[G[_]](nt: F ~> G) = OneOfSchema[P, G, I](alts.map(_.hfmap(nt))) 169 | def pmap[Q[_]](nt: P ~> Q) = OneOfSchema[Q, F, I](alts) 170 | } 171 | 172 | /** A prism between a base type containing the arguments required by 173 | * a single constructor of a sum type, and that sum type, along with 174 | * the schema for the base type is used to describe those constructor 175 | * arguments. The identifier is used to distinguish which constructor 176 | * is being represented in the serialized form. 177 | * 178 | * @define IDefn The type of the Scala value to be produced (or consumed) 179 | * by an interpreter of the schema. Also known as the "index" type 180 | * of the schema. 181 | * 182 | * @define FDefn The functor through which the structure of the schema will 183 | * be interpreted. This will almost always be a fixpoint type such as 184 | * [[xenomorph.HCofree]], which is used to introduce the ability to 185 | * create recursive (tree-structured) schema. 186 | * 187 | * @tparam F $FDefn 188 | * @tparam I $IDefn 189 | * @tparam I0 The base type which corresponds to the arguments to 190 | * the selected constructor. 191 | * @param id The unique identifier of the constructor 192 | * @param base The schema for the `I0` type 193 | * @param prism Prism between the sum type and the selected constructor. 194 | */ 195 | final case class Alt[F[_], I, I0](id: String, base: F[I0], prism: Prism[I, I0]) { 196 | def hfmap[G[_]](nt: F ~> G): Alt[G, I, I0] = Alt(id, nt(base), prism) 197 | } 198 | 199 | /** Wrapper for the free applicative structure which is used to construct 200 | * and disassemble values of product types. 201 | * 202 | * @tparam P $PDefn 203 | * @tparam F $FDefn 204 | * @tparam I $IDefn 205 | * @param props the free applicative value composed of zero or more PropSchema instances 206 | */ 207 | final case class RecordSchema[P[_], F[_], I](props: FreeAp[PropSchema[I, F, ?], I]) extends SchemaF[P, F, I] { 208 | def hfmap[G[_]](nt: F ~> G) = RecordSchema[P, G, I](props.hoist[PropSchema[I, G, ?]](PropSchema.instances[I].hfmap[F, G](nt))) 209 | def pmap[Q[_]](nt: P ~> Q) = RecordSchema[Q, F, I](props) 210 | } 211 | 212 | /** Base trait for values which describe record properties. 213 | * 214 | * @define FDefn The functor through which the structure of the schema will 215 | * be interpreted. This will almost always be a fixpoint type such as 216 | * [[xenomorph.HCofree]], which is used to introduce the ability to 217 | * create recursive (tree-structured) schema. 218 | * 219 | * @tparam O The record type. 220 | * @tparam F $FDefn 221 | * @tparam I The type of the property value. 222 | */ 223 | sealed trait PropSchema[O, F[_], I] { 224 | def fieldName: String 225 | def getter: Getter[O, I] 226 | 227 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] 228 | } 229 | 230 | /** Class describing a required property of a record. 231 | * 232 | * @param fieldName The name of the property. 233 | * @param base Schema for the property's value type. 234 | * @param getter Getter lens from the record type to the property. 235 | * @param default Optional default value, for use in the case that a 236 | * serialized form is missing the property. 237 | */ 238 | final case class Required[O, F[_], I]( 239 | fieldName: String, 240 | base: F[I], 241 | getter: Getter[O, I], 242 | default: Option[I] 243 | ) extends PropSchema[O, F, I] { 244 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, I] = 245 | Required(fieldName, nt(base), getter, default) 246 | } 247 | 248 | /** Class describing an optional property of a record. Since in many 249 | * serialized forms optional properties may be omitted entirely from 250 | * the serialized form, a distinct type is needed in order to be able 251 | * to correctly interpret the absence of a field. 252 | * 253 | * @param fieldName The name of the property. 254 | * @param base Schema for the property's value type. 255 | * @param getter Getter lens from the record type to the property. 256 | */ 257 | final case class Optional[O, F[_], I]( 258 | fieldName: String, 259 | base: F[I], 260 | getter: Getter[O, Option[I]] 261 | ) extends PropSchema[O, F, Option[I]] { 262 | def hfmap[G[_]](nt: F ~> G): PropSchema[O, G, Option[I]] = 263 | Optional(fieldName, nt(base), getter) 264 | } 265 | 266 | object PropSchema { 267 | implicit def instances[O] = new HFunctor[PropSchema[O, ?[_], ?]] { 268 | def hfmap[M[_], N[_]](nt: M ~> N) = new (PropSchema[O, M, ?] ~> PropSchema[O, N, ?]) { 269 | def apply[I](ps: PropSchema[O, M, I]): PropSchema[O, N, I] = ps.hfmap(nt) 270 | } 271 | } 272 | 273 | def contraNT[O, N, F[_]](f: N => O) = new (PropSchema[O, F, ?] ~> PropSchema[N, F, ?]) { 274 | def apply[I](pso: PropSchema[O, F, I]): PropSchema[N, F, I] = { 275 | pso match { 276 | case Required(n, s, g, d) => Required(n, s, Getter(f).composeGetter(g), d) 277 | case opt: Optional[O, F, i] => Optional(opt.fieldName, opt.base, Getter(f).composeGetter(opt.getter)) 278 | } 279 | } 280 | } 281 | } 282 | 283 | case class IsoSchema[P[_], F[_], I, J](base: F[I], iso: Iso[I, J]) extends SchemaF[P, F, J] { 284 | def hfmap[G[_]](nt: F ~> G) = IsoSchema(nt(base), iso) 285 | def pmap[Q[_]](nt: P ~> Q) = IsoSchema(base, iso) 286 | } 287 | --------------------------------------------------------------------------------