├── .gitignore ├── core ├── src │ └── dupin │ │ ├── package.scala │ │ ├── basic │ │ ├── package.scala │ │ ├── all │ │ │ └── package.scala │ │ └── DupinBasicDsl.scala │ │ ├── core │ │ ├── Context.scala │ │ ├── PathPart.scala │ │ ├── Path.scala │ │ ├── ValidatorInstances.scala │ │ ├── ParserInstances.scala │ │ ├── Parser.scala │ │ └── Validator.scala │ │ ├── syntax │ │ ├── package.scala │ │ └── DupinSyntax.scala │ │ └── DupinCoreDsl.scala ├── test │ ├── src │ │ └── dupin │ │ │ ├── readme │ │ │ ├── ReadmeDomainValidatorFixture.scala │ │ │ ├── MessageCustomizationDomainFixture.scala │ │ │ ├── KindCustomizationDomainFixture.scala │ │ │ ├── CustomValidatingPackage.scala │ │ │ ├── PredefinedValidatorsSpec.scala │ │ │ ├── KindCustomizationSpec.scala │ │ │ ├── MessageCustomizationSpec.scala │ │ │ ├── QuickStartValidatorSpec.scala │ │ │ └── ComplexExampleFixture.scala │ │ │ ├── custom │ │ │ └── package.scala │ │ │ ├── CustomLawSpec.scala │ │ │ ├── PathSpec.scala │ │ │ ├── CustomSpec.scala │ │ │ ├── ValidatorLawSpec.scala │ │ │ ├── ParserLawSpec.scala │ │ │ ├── ValidatorSpec.scala │ │ │ └── ParserSpec.scala │ ├── src-2.13+ │ │ └── dupin │ │ │ └── readme │ │ │ ├── QuickStartParserSpec.scala │ │ │ └── ComplexExampleWithParserSpec.scala │ └── src-2.12 │ │ └── dupin │ │ └── readme │ │ ├── QuickStartParserSpec.scala │ │ └── ComplexExampleWithParserSpec.scala ├── src-2 │ └── dupin │ │ └── core │ │ ├── ParserBinCompat.scala │ │ ├── ParserMacro.scala │ │ ├── PartiallyAppliedValidatorConstructorBinCompat.scala │ │ ├── ValidatorBinCompat.scala │ │ └── ValidatorMacro.scala └── src-3 │ └── dupin │ └── core │ ├── ParserBinCompat.scala │ ├── PartiallyAppliedValidatorConstructorBinCompat.scala │ ├── ValidatorBinCompat.scala │ └── ValidatorMacro.scala ├── .github └── workflows │ └── build.yml ├── .scalafmt.conf ├── LICENSE ├── mill └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | out 2 | .idea 3 | .bsp 4 | .run -------------------------------------------------------------------------------- /core/src/dupin/package.scala: -------------------------------------------------------------------------------- 1 | package object dupin extends DupinCoreDsl -------------------------------------------------------------------------------- /core/src/dupin/basic/package.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | package object basic extends DupinBasicDsl 4 | -------------------------------------------------------------------------------- /core/src/dupin/core/Context.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | case class Context[+A](path: Path, value: A) -------------------------------------------------------------------------------- /core/src/dupin/syntax/package.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | package object syntax extends DupinSyntax 4 | -------------------------------------------------------------------------------- /core/src/dupin/basic/all/package.scala: -------------------------------------------------------------------------------- 1 | package dupin.basic 2 | 3 | import dupin.syntax.DupinSyntax 4 | 5 | package object all extends DupinBasicDsl with DupinSyntax 6 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/ReadmeDomainValidatorFixture.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | object ReadmeDomainValidatorFixture { 4 | case class Name(value: String) 5 | case class Member(name: Name, age: Int) 6 | case class Team(name: Name, members: List[Member]) 7 | } 8 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/MessageCustomizationDomainFixture.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | object MessageCustomizationDomainFixture { 4 | case class I18nMessage( 5 | description: String, 6 | key: String, 7 | params: List[String] 8 | ) 9 | } 10 | -------------------------------------------------------------------------------- /core/src/dupin/core/PathPart.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | sealed trait PathPart { 4 | def value: String 5 | override def toString: String = value 6 | } 7 | 8 | case class FieldPart(value: String) extends PathPart 9 | 10 | case class IndexPart(index: String) extends PathPart { 11 | val value = s"[$index]" 12 | } 13 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/KindCustomizationDomainFixture.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | trait KindCustomizationDomainFixture { 4 | import scala.concurrent.Future 5 | 6 | class NameService { 7 | private val allowedNames = Set("Ada") 8 | def contains(name: String): Future[Boolean] = 9 | // Emulation of DB call 10 | Future.successful(allowedNames(name)) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | pull_request: 6 | branches: 7 | - master 8 | jobs: 9 | core: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v3 13 | - uses: coursier/cache-action@v6 14 | - uses: actions/setup-java@v2 15 | with: 16 | distribution: 'temurin' 17 | java-version: '8' 18 | - run: ./mill __.__.test -------------------------------------------------------------------------------- /core/src-2/dupin/core/ParserBinCompat.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.Functor 4 | import scala.language.experimental.macros 5 | 6 | trait ParserBinCompat[F[_], E, A, B] { this: Parser[F, E, A, B] => 7 | /** 8 | * Contravariant map with macros generated path prefix. 9 | * 10 | * @see [comap] 11 | */ 12 | def comapP[AA](f: AA => A): Parser[F, E, AA, B] = 13 | macro ParserMacro.comapPImpl[F, E, A, B, AA] 14 | } 15 | -------------------------------------------------------------------------------- /core/src/dupin/basic/DupinBasicDsl.scala: -------------------------------------------------------------------------------- 1 | package dupin.basic 2 | 3 | import cats.Id 4 | import dupin.DupinCoreDsl 5 | 6 | trait DupinBasicDsl extends DupinCoreDsl { 7 | type BasicValidator[A] = Validator[Id, String, A] 8 | val BasicValidator = Validator[Id, String] 9 | 10 | type BasicParser[A, R] = Parser[Id, String, A, R] 11 | val BasicParser = Parser[Id, String] 12 | 13 | type BasicMessageBuilder[-A] = MessageBuilder[A, String] 14 | } 15 | -------------------------------------------------------------------------------- /core/src-2/dupin/core/ParserMacro.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.Functor 4 | import scala.reflect.macros.blackbox 5 | 6 | private[dupin] class ParserMacro(val c: blackbox.Context) { 7 | import dupin.core.ValidatorMacro._ 8 | 9 | def comapPImpl[F[_], E, A, B, AA](f: c.Expr[A => AA]): c.Expr[Parser[F, E, AA, B]] = { 10 | import c.universe._ 11 | c.Expr(q"""${c.prefix}.comapPE(${getFieldPath(c)(f)}, $f)""") 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /core/src-2/dupin/core/PartiallyAppliedValidatorConstructorBinCompat.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import scala.language.experimental.macros 4 | 5 | trait PartiallyAppliedValidatorConstructorBinCompat[F[_], E] { 6 | /** 7 | * Creates a root validator from implicit validators for all fields that have accessors 8 | * using macros generated path. 9 | */ 10 | def derive[A]: Validator[F, E, A] = macro ValidatorMacro.deriveImpl[F, E, A] 11 | } 12 | -------------------------------------------------------------------------------- /core/src-3/dupin/core/ParserBinCompat.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import scala.language.experimental.macros 4 | 5 | trait ParserBinCompat[F[_], E, A, B] { this: Parser[F, E, A, B] => 6 | /** 7 | * Contravariant map with macros generated path prefix. 8 | * 9 | * @see [comap] 10 | */ 11 | inline def comapP[AA](inline f: AA => A): Parser[F, E, AA, B] = ${ 12 | ValidatorMacro.runWithFieldPath('{ path => this.comapPE(path, f) }, 'f) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /core/test/src/dupin/custom/package.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import dupin.readme.MessageCustomizationDomainFixture._ 4 | import dupin.syntax.DupinSyntax 5 | import scala.concurrent.Future 6 | 7 | package object custom extends DupinCoreDsl with DupinSyntax { 8 | type CustomValidator[A] = Validator[Future, I18nMessage, A] 9 | val CustomValidator = Validator[Future, I18nMessage] 10 | 11 | type CustomParser[A, B] = Parser[Future, I18nMessage, A, B] 12 | val CustomParser = Parser[Future, I18nMessage] 13 | } 14 | -------------------------------------------------------------------------------- /core/test/src/dupin/CustomLawSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import cats.laws.discipline.ExhaustiveCheck 4 | import org.scalatest.wordspec.AnyWordSpec 5 | import org.scalatestplus.scalacheck.Checkers 6 | import org.typelevel.discipline.scalatest.WordSpecDiscipline 7 | 8 | class CustomLawSpec extends AnyWordSpec with WordSpecDiscipline with Checkers { 9 | implicit def exhaustiveCheckForContext[A: ExhaustiveCheck]: ExhaustiveCheck[Context[A]] = 10 | ExhaustiveCheck.instance(ExhaustiveCheck[A].allValues.map(Context(Path.empty, _))) 11 | } 12 | -------------------------------------------------------------------------------- /core/src/dupin/DupinCoreDsl.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | trait DupinCoreDsl { 4 | type Validator[F[_], E, A] = core.Validator[F, E, A] 5 | val Validator = core.Validator 6 | 7 | type Parser[F[_], E, A, B] = core.Parser[F, E, A, B] 8 | type IdParser[F[_], E, A] = Parser[F, E, A, A] 9 | val Parser = core.Parser 10 | 11 | type Context[+A] = core.Context[A] 12 | val Context = core.Context 13 | type MessageBuilder[-A, +E] = Context[A] => E 14 | type PathPart = core.PathPart 15 | type Path = core.Path 16 | val Path = core.Path 17 | } 18 | -------------------------------------------------------------------------------- /core/src-3/dupin/core/PartiallyAppliedValidatorConstructorBinCompat.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.Applicative 4 | import scala.compiletime.* 5 | import scala.deriving.* 6 | import scala.quoted.* 7 | 8 | trait PartiallyAppliedValidatorConstructorBinCompat[F[_], E] { 9 | /** 10 | * Creates a root validator from implicit validators for all fields that have accessors 11 | * using macros generated path. 12 | */ 13 | inline def derive[A](implicit inline A: Applicative[F]): Validator[F, E, A] = ${ 14 | ValidatorMacro.derive[F, E, A]('A) 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /core/src/dupin/core/Path.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.data.Chain 4 | 5 | case class Path(chain: Chain[PathPart]) { 6 | def concat(p: Path): Path = Path(chain.concat(p.chain)) 7 | def ++(p: Path): Path = concat(p) 8 | def prepend(p: PathPart): Path = Path(chain.prepend(p)) 9 | def +:(p: PathPart): Path = prepend(p) 10 | def append(p: PathPart): Path = Path(chain.append(p)) 11 | def :+(p: PathPart): Path = append(p) 12 | override def toString: String = chain.iterator.mkString(".", ".", "") 13 | } 14 | 15 | object Path { 16 | val empty: Path = Path(Chain.empty) 17 | def apply(elems: PathPart*): Path = Path(Chain(elems: _*)) 18 | } 19 | -------------------------------------------------------------------------------- /core/test/src/dupin/PathSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import dupin.core.FieldPart 4 | import dupin.core.IndexPart 5 | import org.scalatest.freespec.AnyFreeSpec 6 | 7 | class PathSpec extends AnyFreeSpec { 8 | "Path should pass basic checks" in { 9 | val namePart = FieldPart("name") 10 | val firstPart = IndexPart("0") 11 | assert(Path.empty ++ Path.empty == Path.empty) 12 | assert(Path(namePart) ++ Path(firstPart) == Path(namePart, firstPart)) 13 | assert(namePart +: Path.empty == Path(namePart)) 14 | assert(Path.empty :+ namePart == Path(namePart)) 15 | assert(Path.empty.toString() == ".") 16 | assert(Path(namePart).toString() == ".name") 17 | assert(Path(namePart, firstPart).toString() == ".name.[0]") 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.7.12 2 | runner.dialect = scala213 3 | maxColumn = 120 4 | assumeStandardLibraryStripMargin = true 5 | indent { 6 | main = 4 7 | callSite = 4 8 | } 9 | indentOperator.exemptScope = aloneEnclosed 10 | align.tokens = [] 11 | rewrite { 12 | rules = [Imports, SortModifiers] 13 | trailingCommas.style = keep 14 | imports { 15 | expand = true 16 | sort = original 17 | } 18 | } 19 | newlines { 20 | source = keep 21 | avoidForSimpleOverflow = [tooLong, slc] 22 | topLevelStatementBlankLines = [ 23 | { 24 | maxNest = 0 25 | blanks = 1 26 | }, 27 | { 28 | minBreaks = 2 29 | blanks = 1 30 | } 31 | ] 32 | selectChains = unfold 33 | } 34 | docstrings { 35 | wrap = no 36 | style = Asterisk 37 | removeEmpty = true 38 | } 39 | binPack.parentConstructors = keep 40 | project.git = false -------------------------------------------------------------------------------- /core/src/dupin/syntax/DupinSyntax.scala: -------------------------------------------------------------------------------- 1 | package dupin.syntax 2 | 3 | import cats.Functor 4 | import cats.data.IorNec 5 | import cats.data.ValidatedNec 6 | import dupin.core.Parser 7 | import dupin.core.Validator 8 | 9 | trait DupinSyntax { 10 | implicit def validatableOps[A](value: A): ValidatableOps[A] = new ValidatableOps(value) 11 | implicit def parsableOps[A](value: A): ParsableOps[A] = new ParsableOps(value) 12 | } 13 | 14 | class ValidatableOps[A](val value: A) extends AnyVal { 15 | def validate[F[_], E](implicit V: Validator[F, E, A], F: Functor[F]): F[ValidatedNec[E, A]] = 16 | V.validate(value) 17 | 18 | def isValid[F[_], E](implicit V: Validator[F, E, A], F: Functor[F]): F[Boolean] = 19 | F.map(V.validate(value))(_.isValid) 20 | } 21 | 22 | class ParsableOps[A](val value: A) extends AnyVal { 23 | def parse[F[_], E, B](implicit A: Parser[F, E, A, B]): F[IorNec[E, B]] = 24 | A.parse(value) 25 | } 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Yakiv Yereskovskyi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /core/test/src/dupin/readme/CustomValidatingPackage.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import dupin.readme.MessageCustomizationDomainFixture._ 4 | import dupin.readme.ReadmeDomainValidatorFixture._ 5 | import org.scalatest.freespec.AsyncFreeSpec 6 | import scala.concurrent.Future 7 | 8 | class CustomValidatingPackage extends AsyncFreeSpec with KindCustomizationDomainFixture { 9 | "Custom validating package should" - { 10 | "be correct" in { 11 | import cats.implicits._ 12 | import dupin.custom._ 13 | 14 | val nameService = new NameService 15 | 16 | implicit val nameValidator: CustomValidator[Name] = CustomValidator.rootF[Name]( 17 | n => nameService.contains(n.value), 18 | c => I18nMessage( 19 | s"${c.path} should be non empty", 20 | "validator.name.empty", 21 | List(c.path.toString()) 22 | ) 23 | ) 24 | 25 | val validName = Name("Ada") 26 | val valid: Future[Boolean] = validName.isValid 27 | 28 | valid.map(assert(_)) 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/PredefinedValidatorsSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.Validated 4 | import dupin.readme.ReadmeDomainValidatorFixture._ 5 | import org.scalatest.freespec.AnyFreeSpec 6 | 7 | trait PredefinedValidatorsFixture { 8 | import dupin.basic.all._ 9 | 10 | def min(value: Int) = BasicValidator.root[Int](_ > value, c => s"${c.path} should be greater than $value") 11 | def max(value: Int) = BasicValidator.root[Int](_ < value, c => s"${c.path} should be less than $value") 12 | } 13 | 14 | class PredefinedValidatorsSpec extends AnyFreeSpec with PredefinedValidatorsFixture { 15 | "Predefined validators should" - { 16 | "be correct" in { 17 | import cats._ 18 | import dupin.basic.all._ 19 | 20 | implicit val memberValidator: BasicValidator[Member] = BasicValidator 21 | .success[Member] 22 | .combineP(_.age)(min(18) && max(40).failureAs(_ => "updated validation message")) 23 | 24 | val invalidMember = Member(Name("Ada"), 0) 25 | val result = invalidMember.validate 26 | 27 | assert(result == Validated.invalidNec(".age should be greater than 18")) 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /core/test/src/dupin/CustomSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import org.scalatest.Assertion 4 | import org.scalatest.exceptions.TestFailedException 5 | import org.scalatest.freespec.AsyncFreeSpec 6 | import org.scalactic.source.Position 7 | import scala.util.matching.Regex 8 | 9 | class CustomSpec extends AsyncFreeSpec { 10 | def assertCompilationErrorMessagePattern( 11 | compilesAssert: => Assertion, 12 | pattern: Regex, 13 | )(implicit 14 | pos: Position, 15 | ): Assertion = { 16 | try { 17 | compilesAssert 18 | fail("Compilation was successful") 19 | } catch { 20 | case e: TestFailedException => 21 | val from = "but got the following type error: \"" 22 | val to = "\", for code:" 23 | val message = e.getMessage().substring( 24 | e.getMessage().indexOf(from) + from.size, 25 | e.getMessage().indexOf(to), 26 | ) 27 | assert( 28 | pattern.pattern.matcher(message).matches(), 29 | s"""\nCompilation error "$message" does not match "$pattern" pattern""", 30 | ) 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /core/src/dupin/core/ValidatorInstances.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats._ 4 | 5 | trait ValidatorInstances { 6 | implicit final def validatorContravariantMonoidal[F[_]: Applicative, E]: ContravariantMonoidal[Validator[F, E, *]] = 7 | new ContravariantMonoidal[Validator[F, E, *]] { 8 | override def unit: Validator[F, E, Unit] = Validator[F, E].success[Unit] 9 | override def product[A, B](fa: Validator[F, E, A], fb: Validator[F, E, B]): Validator[F, E, (A, B)] = 10 | fa.product(fb) 11 | override def contramap[A, B](fa: Validator[F, E, A])(f: B => A): Validator[F, E, B] = fa.comap(f) 12 | } 13 | 14 | implicit final def validatorMonoidK[F[_]: Applicative, E]: MonoidK[Validator[F, E, *]] = 15 | new MonoidK[Validator[F, E, *]] { 16 | override def empty[A]: Validator[F, E, A] = Validator[F, E].success[A] 17 | override def combineK[A](x: Validator[F, E, A], y: Validator[F, E, A]): Validator[F, E, A] = 18 | x.combine(y) 19 | } 20 | 21 | implicit final def validatorLiftedToTraverseP[F[_], E, A, G[_]](implicit 22 | v: Validator[F, E, A], 23 | F: Applicative[F], 24 | G: Traverse[G], 25 | ): Validator[F, E, G[A]] = v.liftToTraverseP[G] 26 | } 27 | -------------------------------------------------------------------------------- /core/test/src/dupin/ValidatorLawSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import cats._ 4 | import cats.data.ValidatedNec 5 | import cats.implicits._ 6 | import cats.laws.discipline.ExhaustiveCheck 7 | import cats.laws.discipline.SemigroupalTests.Isomorphisms 8 | import cats.laws.discipline._ 9 | import cats.laws.discipline.arbitrary._ 10 | import cats.laws.discipline.eq._ 11 | import org.scalacheck.Arbitrary 12 | import org.scalacheck.Arbitrary._ 13 | import org.scalatest.wordspec.AnyWordSpec 14 | import org.scalatestplus.scalacheck.Checkers 15 | import org.typelevel.discipline.scalatest.WordSpecDiscipline 16 | 17 | class ValidatorLawSpec extends CustomLawSpec { 18 | implicit def isomorphismsForValidator[F[_], E](implicit 19 | V: Invariant[Validator[F, E, *]] 20 | ): Isomorphisms[Validator[F, E, *]] = Isomorphisms.invariant[Validator[F, E, *]] 21 | 22 | implicit def catsLawsArbitraryForValidator[F[_]: Functor, E, A](implicit 23 | V: Arbitrary[A => F[ValidatedNec[E, Unit]]] 24 | ): Arbitrary[Validator[F, E, A]] = 25 | Arbitrary(V.arbitrary.map(f => Validator[F, E].runF(c => f(c.value)))) 26 | 27 | implicit def validatorEq[F[_], E, A](implicit 28 | A: ExhaustiveCheck[Context[A]], 29 | FE: Eq[F[ValidatedNec[E, Unit]]], 30 | ): Eq[Validator[F, E, A]] = 31 | Eq.by[Validator[F, E, A], Context[A] => F[ValidatedNec[E, Unit]]](validator => c => validator.runF(c)) 32 | 33 | checkAll( 34 | "Validator[Option, String, *].MonoidKTests", 35 | MonoidKTests[Validator[Option, String, *]].monoidK[MiniInt] 36 | ) 37 | checkAll( 38 | "Validator[Option, String, *].ContravariantMonoidalTests", 39 | ContravariantMonoidalTests[Validator[Option, String, *]].contravariantMonoidal[MiniInt, Boolean, Boolean] 40 | ) 41 | } 42 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/KindCustomizationSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.NonEmptyChain 4 | import cats.data.Validated 5 | import cats.data.ValidatedNec 6 | import dupin._ 7 | import dupin.readme.ReadmeDomainValidatorFixture._ 8 | import org.scalatest.freespec.AsyncFreeSpec 9 | import scala.concurrent.Future 10 | 11 | trait KindCustomizationDslFixture extends KindCustomizationDomainFixture { 12 | import scala.concurrent.Future 13 | 14 | type FutureValidator[A] = Validator[Future, String, A] 15 | val FutureValidator = Validator[Future, String] 16 | } 17 | 18 | trait KindCustomizationValidatorFixture extends AsyncFreeSpec with KindCustomizationDslFixture { 19 | import cats.implicits._ 20 | import scala.concurrent.Future 21 | 22 | val nameService = new NameService 23 | 24 | implicit val nameValidator: FutureValidator[Name] = FutureValidator.rootF[Name]( 25 | n => nameService.contains(n.value), c => s"${c.path} should be non empty" 26 | ) 27 | 28 | implicit val memberValidator: FutureValidator[Member] = FutureValidator.success[Member] 29 | .combinePI(_.name) 30 | .combinePR(_.age)(a => a > 18 && a < 40, c => s"${c.path} should be between 18 and 40") 31 | } 32 | 33 | class KindCustomizationSpec extends KindCustomizationValidatorFixture { 34 | "Kind customization validators should" - { 35 | "return custom kind" in { 36 | import dupin.syntax._ 37 | 38 | val invalidMember = Member(Name(""), 0) 39 | val result: Future[ValidatedNec[String, Member]] = invalidMember.validate 40 | 41 | result.map(r => assert(r == Validated.invalid(NonEmptyChain( 42 | ".name should be non empty", 43 | ".age should be between 18 and 40" 44 | )))) 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /core/src-2/dupin/core/ValidatorBinCompat.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.Functor 4 | import dupin.core.Validator.PartiallyAppliedCombineP 5 | import dupin.core.Validator.PartiallyAppliedCombinePC 6 | import dupin.core.Validator.PartiallyAppliedCombinePR 7 | import dupin.core.Validator.PartiallyAppliedCombinePRF 8 | import scala.language.experimental.macros 9 | 10 | trait ValidatorBinCompat[F[_], E, A] { this: Validator[F, E, A] => 11 | /** 12 | * Contravariant map with macros generated path prefix. 13 | * 14 | * @see [comap] 15 | */ 16 | def comapP[AA](f: AA => A): Validator[F, E, AA] = 17 | macro ValidatorMacro.comapPImpl[F, E, A, AA] 18 | 19 | /** 20 | * Combines with field validator using macros generated path. 21 | */ 22 | def combineP[AA](f: A => AA): PartiallyAppliedCombineP[F, E, A, AA] = 23 | macro ValidatorMacro.combinePImpl[F, E, A, AA] 24 | 25 | /** 26 | * Combines with field validator from context using macros generated path. 27 | */ 28 | def combinePC[AA](f: A => AA): PartiallyAppliedCombinePC[F, E, A, AA] = 29 | macro ValidatorMacro.combinePCImpl[F, E, A, AA] 30 | 31 | /** 32 | * Combines with field validator passed by separate arguments using macros generated path. 33 | */ 34 | def combinePR[AA](f: A => AA): PartiallyAppliedCombinePR[F, E, A, AA] = 35 | macro ValidatorMacro.combinePRImpl[F, E, A, AA] 36 | 37 | def combinePRF[AA](f: A => AA): PartiallyAppliedCombinePRF[F, E, A, AA] = 38 | macro ValidatorMacro.combinePRFImpl[F, E, A, AA] 39 | 40 | /** 41 | * Combines with implicit field validator using macros generated path 42 | */ 43 | def combinePI[AA](f: A => AA)(implicit AA: Validator[F, E, AA]): Validator[F, E, A] = 44 | macro ValidatorMacro.combinePIImpl[F, E, A, AA] 45 | } 46 | -------------------------------------------------------------------------------- /core/test/src/dupin/ParserLawSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import cats._ 4 | import cats.data._ 5 | import cats.implicits._ 6 | import cats.laws.discipline.ExhaustiveCheck 7 | import cats.laws.discipline.SemigroupalTests.Isomorphisms 8 | import cats.laws.discipline._ 9 | import cats.laws.discipline.arbitrary._ 10 | import cats.laws.discipline.eq._ 11 | import org.scalacheck.Arbitrary 12 | import org.scalacheck.Arbitrary._ 13 | import org.scalatest.wordspec.AnyWordSpec 14 | import org.scalatestplus.scalacheck.Checkers 15 | import org.typelevel.discipline.scalatest.WordSpecDiscipline 16 | 17 | class ParserLawSpec extends CustomLawSpec { 18 | implicit def isomorphismsForParser[F[_], E, A](implicit 19 | V: Invariant[Parser[F, E, A, *]] 20 | ): Isomorphisms[Parser[F, E, A, *]] = Isomorphisms.invariant[Parser[F, E, A, *]] 21 | 22 | implicit def catsLawsArbitraryForParser[F[_] : Functor, E, A, B](implicit 23 | B: Arbitrary[A => F[IorNec[E, B]]] 24 | ): Arbitrary[Parser[F, E, A, B]] = 25 | Arbitrary(B.arbitrary.map(f => Parser[F, E].runF(c => f(c.value)))) 26 | 27 | implicit def parserEq[F[_], E, A, B](implicit 28 | A: ExhaustiveCheck[Context[A]], 29 | B: Eq[F[IorNec[E, B]]], 30 | ): Eq[Parser[F, E, A, B]] = 31 | Eq.by[Parser[F, E, A, B], Context[A] => F[IorNec[E, B]]](p => c => p.runF(c)) 32 | 33 | checkAll( 34 | "Parser[Option, String, MiniInt, *].MonadTests", 35 | MonadTests[Parser[Option, String, MiniInt, *]].monad[Int, Int, Int] 36 | ) 37 | checkAll( 38 | "Parser[Option, String, MiniInt, *].ParallelTests", 39 | ParallelTests[Parser[Option, String, MiniInt, *]].parallel[MiniInt, MiniInt] 40 | ) 41 | checkAll( 42 | "Parser[Option, String, *, *].ArrowTests", 43 | ArrowTests[Parser[Option, String, *, *]].arrow[MiniInt, Boolean, MiniInt, Boolean, MiniInt, Boolean] 44 | ) 45 | } 46 | -------------------------------------------------------------------------------- /mill: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # This is a wrapper script, that automatically download mill from GitHub release pages 4 | # You can give the required mill version with MILL_VERSION env variable 5 | # If no version is given, it falls back to the value of DEFAULT_MILL_VERSION 6 | DEFAULT_MILL_VERSION=0.10.11 7 | 8 | set -e 9 | 10 | if [ -z "$MILL_VERSION" ] ; then 11 | if [ -f ".mill-version" ] ; then 12 | MILL_VERSION="$(head -n 1 .mill-version 2> /dev/null)" 13 | elif [ -f "mill" ] && [ "$BASH_SOURCE" != "mill" ] ; then 14 | MILL_VERSION=$(grep -F "DEFAULT_MILL_VERSION=" "mill" | head -n 1 | cut -d= -f2) 15 | else 16 | MILL_VERSION=$DEFAULT_MILL_VERSION 17 | fi 18 | fi 19 | 20 | if [ "x${XDG_CACHE_HOME}" != "x" ] ; then 21 | MILL_DOWNLOAD_PATH="${XDG_CACHE_HOME}/mill/download" 22 | else 23 | MILL_DOWNLOAD_PATH="${HOME}/.cache/mill/download" 24 | fi 25 | MILL_EXEC_PATH="${MILL_DOWNLOAD_PATH}/${MILL_VERSION}" 26 | 27 | version_remainder="$MILL_VERSION" 28 | MILL_MAJOR_VERSION="${version_remainder%%.*}"; version_remainder="${version_remainder#*.}" 29 | MILL_MINOR_VERSION="${version_remainder%%.*}"; version_remainder="${version_remainder#*.}" 30 | 31 | if [ ! -s "$MILL_EXEC_PATH" ] ; then 32 | mkdir -p $MILL_DOWNLOAD_PATH 33 | if [ "$MILL_MAJOR_VERSION" -gt 0 ] || [ "$MILL_MINOR_VERSION" -ge 5 ] ; then 34 | ASSEMBLY="-assembly" 35 | fi 36 | DOWNLOAD_FILE=$MILL_EXEC_PATH-tmp-download 37 | MILL_VERSION_TAG=$(echo $MILL_VERSION | sed -E 's/([^-]+)(-M[0-9]+)?(-.*)?/\1\2/') 38 | MILL_DOWNLOAD_URL="https://github.com/com-lihaoyi/mill/releases/download/${MILL_VERSION_TAG}/$MILL_VERSION${ASSEMBLY}" 39 | curl --fail -L -o "$DOWNLOAD_FILE" "$MILL_DOWNLOAD_URL" 40 | chmod +x "$DOWNLOAD_FILE" 41 | mv "$DOWNLOAD_FILE" "$MILL_EXEC_PATH" 42 | unset DOWNLOAD_FILE 43 | unset MILL_DOWNLOAD_URL 44 | fi 45 | 46 | unset MILL_DOWNLOAD_PATH 47 | unset MILL_VERSION 48 | 49 | exec $MILL_EXEC_PATH "$@" 50 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/MessageCustomizationSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.NonEmptyChain 4 | import cats.data.Validated 5 | import dupin.readme.MessageCustomizationDomainFixture._ 6 | import dupin.readme.ReadmeDomainValidatorFixture._ 7 | import org.scalatest.freespec.AnyFreeSpec 8 | 9 | trait MessageCustomizationDslFixture { 10 | import dupin._ 11 | 12 | type I18nValidator[A] = Validator[cats.Id, I18nMessage, A] 13 | val I18nValidator = Validator[cats.Id, I18nMessage] 14 | } 15 | 16 | trait MessageCustomizationValidatorFixture extends MessageCustomizationDslFixture { 17 | import cats._ 18 | 19 | implicit val nameValidator: I18nValidator[Name] = I18nValidator.root[Name]( 20 | _.value.nonEmpty, c => I18nMessage( 21 | s"${c.path} should be non empty", 22 | "validator.name.empty", 23 | List(c.path.toString()) 24 | ) 25 | ) 26 | 27 | implicit val memberValidator: I18nValidator[Member] = I18nValidator.success[Member] 28 | .combinePI(_.name) 29 | .combinePR(_.age)(a => a > 18 && a < 40, c => I18nMessage( 30 | s"${c.path} should be between 18 and 40", 31 | "validator.member.age", 32 | List(c.path.toString()) 33 | )) 34 | } 35 | 36 | class MessageCustomizationSpec extends AnyFreeSpec with MessageCustomizationValidatorFixture { 37 | "Message customization validators should" - { 38 | "return custom messages" in { 39 | import dupin.syntax._ 40 | 41 | val invalidMember = Member(Name(""), 0) 42 | val result = invalidMember.validate 43 | 44 | assert(result == Validated.invalid(NonEmptyChain( 45 | I18nMessage( 46 | ".name should be non empty", 47 | "validator.name.empty", 48 | List(".name") 49 | ), 50 | I18nMessage( 51 | ".age should be between 18 and 40", 52 | "validator.member.age", 53 | List(".age") 54 | ) 55 | ))) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /core/src-3/dupin/core/ValidatorBinCompat.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.Applicative 4 | import cats.Functor 5 | import dupin.core.Validator.PartiallyAppliedCombineP 6 | import dupin.core.Validator.PartiallyAppliedCombinePC 7 | import dupin.core.Validator.PartiallyAppliedCombinePR 8 | import dupin.core.Validator.PartiallyAppliedCombinePRF 9 | 10 | trait ValidatorBinCompat[F[_], E, A] { this: Validator[F, E, A] => 11 | /** 12 | * Contravariant map with macros generated path prefix. 13 | * 14 | * @see [comap] 15 | */ 16 | inline def comapP[AA](inline f: AA => A): Validator[F, E, AA] = ${ 17 | ValidatorMacro.runWithFieldPath('{path => this.comapPE(path, f)}, 'f) 18 | } 19 | 20 | /** 21 | * Combines with field validator using macros generated path. 22 | */ 23 | inline def combineP[AA](inline f: A => AA): PartiallyAppliedCombineP[F, E, A, AA] = ${ 24 | ValidatorMacro.runWithFieldPath('{path => PartiallyAppliedCombineP(this, path, f)}, 'f) 25 | } 26 | 27 | /** 28 | * Combines with field validator from context using macros generated path. 29 | */ 30 | inline def combinePC[AA](inline f: A => AA): PartiallyAppliedCombinePC[F, E, A, AA] = ${ 31 | ValidatorMacro.runWithFieldPath('{path => PartiallyAppliedCombinePC(this, path, f)}, 'f) 32 | } 33 | 34 | /** 35 | * Combines with field validator passed by separate arguments using macros generated path. 36 | */ 37 | inline def combinePR[AA](inline f: A => AA): PartiallyAppliedCombinePR[F, E, A, AA] = ${ 38 | ValidatorMacro.runWithFieldPath('{path => PartiallyAppliedCombinePR(this, path, f)}, 'f) 39 | } 40 | 41 | inline def combinePRF[AA](inline f: A => AA): PartiallyAppliedCombinePRF[F, E, A, AA] = ${ 42 | ValidatorMacro.runWithFieldPath('{path => PartiallyAppliedCombinePRF(this, path, f)}, 'f) 43 | } 44 | 45 | /** 46 | * Combines with implicit field validator using macros generated path 47 | */ 48 | inline def combinePI[AA]( 49 | inline f: A => AA)( 50 | implicit V: Validator[F, E, AA], A: Applicative[F] 51 | ): Validator[F, E, A] = ${ 52 | ValidatorMacro.runWithFieldPath('{path => this.combinePE(path, f)(V)}, 'f) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /core/src-3/dupin/core/ValidatorMacro.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.Applicative 4 | import scala.quoted.* 5 | 6 | object ValidatorMacro { 7 | def runWithFieldPath[A : Type](using q: Quotes)(run: Expr[Path => A], f: Expr[_ => _]): Expr[A] = 8 | '{${run}(${getFieldPath(f)})} 9 | 10 | def getFieldPath(using q: Quotes)(f: Expr[_ => _]): Expr[Path] = { 11 | import q.reflect.* 12 | def abort = report.throwError(s"Unable to retrieve field path from function ${f.show}") 13 | def rec(argName: String, selects: Tree, acc: Expr[Path]): Expr[Path] = selects match { 14 | case Ident(identName) if identName == argName => acc 15 | case Select(qualifier, name) => '{ 16 | ${rec(argName, qualifier, acc).asExprOf[Path]} 17 | .append(FieldPart(${Literal(StringConstant(name)).asExprOf[String]})) 18 | } 19 | case _ => abort 20 | } 21 | f.asTerm match { 22 | case Inlined(_, _, Lambda(List(ValDef(argName, _, _)), selects)) => 23 | rec(argName, selects, '{ Path.empty }) 24 | case _ => abort 25 | } 26 | } 27 | 28 | def derive[F[_] : Type, E : Type, A : Type](using q: Quotes)(A: Expr[Applicative[F]]): Expr[Validator[F, E, A]] = { 29 | import q.reflect.* 30 | val atpe = TypeRepr.of[A] 31 | atpe.typeSymbol.fieldMembers.sortBy(_.fullName).map(_.tree).collect { 32 | case m: ValDef => m 33 | }.foldLeft('{Validator[F, E].success[A]($A)}) { case (t, m) => 34 | atpe.memberType(m.symbol).asType match { case '[t] => 35 | val resolvedValidator = Implicits.search(TypeRepr.of[Validator[F, E, t]]) match { 36 | case iss: ImplicitSearchSuccess => iss.tree.asExpr.asInstanceOf[Expr[Validator[F, E, t]]] 37 | case isf: ImplicitSearchFailure => report.errorAndAbort(isf.explanation) 38 | } 39 | '{ 40 | $t.combinePE( 41 | Path(FieldPart(${Literal(StringConstant(m.symbol.name)).asExprOf[String]})), 42 | a => ${Select('a.asTerm, m.symbol).asExprOf[t]} 43 | )($resolvedValidator)($A) 44 | } 45 | } 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /core/test/src-2.13+/dupin/readme/QuickStartParserSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.Ior 4 | import cats.data.NonEmptyChain 5 | import org.scalatest.freespec.AnyFreeSpec 6 | import dupin.readme.ReadmeDomainValidatorFixture._ 7 | 8 | trait QuickStartParserFixture { 9 | import cats._ 10 | import cats.implicits._ 11 | import dupin.basic.all._ 12 | 13 | case class RawMember(name: String, age: Int) 14 | case class RawTeam(name: String, members: List[RawMember]) 15 | 16 | //parser for simple type or value class 17 | implicit val nameParser: BasicParser[String, Name] = BasicParser.root[String, Name]( 18 | Option(_).filter(_.nonEmpty).map(Name.apply), 19 | c => s"${c.path} should be non empty", 20 | ) 21 | 22 | //idiomatic parser for complex type 23 | implicit val memberParser: BasicParser[RawMember, Member] = 24 | ( 25 | nameParser.comapP[RawMember](_.name), 26 | BasicParser.idRoot[Int]( 27 | Option(_).filter(a => a > 18 && a < 40), 28 | c => s"${c.path} should be between 18 and 40", 29 | ).comapP[RawMember](_.age), 30 | ) 31 | .parMapN(Member.apply) 32 | 33 | implicit val teamParser: BasicParser[RawTeam, Team] = 34 | ( 35 | nameParser.comapP[RawTeam](_.name), 36 | memberParser.liftToTraverseCombiningP[List].comapP[RawTeam](_.members), 37 | ) 38 | .parMapN(Team.apply) 39 | .andThen( 40 | //if you need id parser that filters out value by condition, 41 | //you can simply create a validator and convert it to parser 42 | BasicValidator 43 | .root[Team](_.members.size <= 8, _ => "team should be fed with two pizzas!") 44 | .toParser 45 | ) 46 | } 47 | 48 | class QuickStartParserSpec extends AnyFreeSpec with QuickStartParserFixture { 49 | "Readme parsers should" - { 50 | "be correct" in { 51 | import dupin.basic.all._ 52 | 53 | val validTeam = RawTeam( 54 | "Bears", 55 | List( 56 | RawMember("Yakiv", 26), 57 | RawMember("Myroslav", 31), 58 | RawMember("Andrii", 25) 59 | ) 60 | ) 61 | 62 | val invalidTeam = RawTeam( 63 | "", 64 | RawMember("", 0) :: (1 to 10).map(_ => RawMember("Valid name", 20)).toList 65 | ) 66 | 67 | assert(validTeam.parse == Ior.right(Team( 68 | Name("Bears"), 69 | List( 70 | Member(Name("Yakiv"), 26), 71 | Member(Name("Myroslav"), 31), 72 | Member(Name("Andrii"), 25) 73 | ) 74 | ))) 75 | assert(invalidTeam.parse == Ior.left(NonEmptyChain( 76 | ".name should be non empty", 77 | ".members.[0].name should be non empty", 78 | ".members.[0].age should be between 18 and 40", 79 | ))) 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /core/test/src-2.12/dupin/readme/QuickStartParserSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.Ior 4 | import cats.data.NonEmptyChain 5 | import org.scalatest.freespec.AnyFreeSpec 6 | import dupin.readme.ReadmeDomainValidatorFixture._ 7 | 8 | trait QuickStartParserFixture { 9 | import cats._ 10 | import cats.implicits._ 11 | import dupin.basic.all._ 12 | 13 | case class RawMember(name: String, age: Int) 14 | case class RawTeam(name: String, members: List[RawMember]) 15 | 16 | //parser for simple type or value class 17 | implicit val nameParser: BasicParser[String, Name] = BasicParser.root[String, Name]( 18 | Option(_).filter(_.nonEmpty).map(Name.apply), 19 | c => s"${c.path} should be non empty", 20 | ) 21 | 22 | //idiomatic parser for complex type 23 | implicit val memberParser: BasicParser[RawMember, Member] = 24 | Parser.parserParallelWithSequentialEffect[cats.Id, String, RawMember].applicative.map2( 25 | nameParser.comapP[RawMember](_.name), 26 | BasicParser.idRoot[Int]( 27 | Option(_).filter(a => a > 18 && a < 40), 28 | c => s"${c.path} should be between 18 and 40", 29 | ).comapP[RawMember](_.age), 30 | )(Member.apply) 31 | 32 | implicit val teamParser: BasicParser[RawTeam, Team] = 33 | Parser.parserParallelWithSequentialEffect[cats.Id, String, RawTeam].applicative.map2( 34 | nameParser.comapP[RawTeam](_.name), 35 | memberParser.liftToTraverseCombiningP[List].comapP[RawTeam](_.members), 36 | )(Team.apply) 37 | .andThen( 38 | //if you need id parser that filters out value by condition, 39 | //you can simply create a validator and convert it to parser 40 | BasicValidator 41 | .root[Team](_.members.size <= 8, _ => "team should be fed with two pizzas!") 42 | .toParser 43 | ) 44 | } 45 | 46 | class QuickStartParserSpec extends AnyFreeSpec with QuickStartParserFixture { 47 | "Readme parsers should" - { 48 | "be correct" in { 49 | import dupin.basic.all._ 50 | 51 | val validTeam = RawTeam( 52 | "Bears", 53 | List( 54 | RawMember("Yakiv", 26), 55 | RawMember("Myroslav", 31), 56 | RawMember("Andrii", 25) 57 | ) 58 | ) 59 | 60 | val invalidTeam = RawTeam( 61 | "", 62 | RawMember("", 0) :: (1 to 10).map(_ => RawMember("Valid name", 20)).toList 63 | ) 64 | 65 | assert(validTeam.parse == Ior.right(Team( 66 | Name("Bears"), 67 | List( 68 | Member(Name("Yakiv"), 26), 69 | Member(Name("Myroslav"), 31), 70 | Member(Name("Andrii"), 25) 71 | ) 72 | ))) 73 | assert(invalidTeam.parse == Ior.left(NonEmptyChain( 74 | ".name should be non empty", 75 | ".members.[0].name should be non empty", 76 | ".members.[0].age should be between 18 and 40", 77 | ))) 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/QuickStartValidatorSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.NonEmptyChain 4 | import cats.data.Validated 5 | import org.scalatest.freespec.AnyFreeSpec 6 | import dupin.readme.ReadmeDomainValidatorFixture._ 7 | 8 | trait QuickStartValidatorFixture { 9 | import cats._ 10 | import dupin.basic.all._ 11 | 12 | //validator for simple type or value class 13 | implicit val nameValidator: BasicValidator[Name] = BasicValidator 14 | .root[Name](_.value.nonEmpty, c => s"${c.path} should be non empty") 15 | 16 | //idiomatic validator for complex type 17 | implicit val memberValidator: BasicValidator[Member] = 18 | nameValidator.comapP[Member](_.name) combine 19 | BasicValidator.root[Int]( 20 | a => a > 18 && a < 40, c => s"${c.path} should be between 18 and 40" 21 | ).comapP[Member](_.age) 22 | 23 | //same validator but with combination helpers for better type resolving 24 | val alternativeMemberValidator: BasicValidator[Member] = BasicValidator 25 | .success[Member] 26 | .combineP(_.name)(nameValidator) 27 | .combinePR(_.age)(a => a > 18 && a < 40, c => s"${c.path} should be between 18 and 40") 28 | 29 | //derived validator 30 | implicit val teamValidator: BasicValidator[Team] = BasicValidator 31 | .derive[Team] 32 | .combineR(_.members.size <= 8, _ => "team should be fed with two pizzas!") 33 | 34 | //two stage validator 35 | val failingTeamValidator: BasicValidator[Team] = teamValidator 36 | .andThen(BasicValidator.failure[Team](_ => "validation error after heavy computations")) 37 | } 38 | 39 | class QuickStartValidatorSpec extends AnyFreeSpec with QuickStartValidatorFixture { 40 | "Readme validators should" - { 41 | "be correct" in { 42 | import dupin.basic.all._ 43 | 44 | val validTeam = Team( 45 | Name("Bears"), 46 | List( 47 | Member(Name("Yakiv"), 26), 48 | Member(Name("Myroslav"), 31), 49 | Member(Name("Andrii"), 25) 50 | ) 51 | ) 52 | 53 | val invalidTeam = Team( 54 | Name(""), 55 | Member(Name(""), 0) :: (1 to 10).map(_ => Member(Name("Valid name"), 20)).toList 56 | ) 57 | 58 | val valid = validTeam.isValid 59 | assert(valid) 60 | assert(invalidTeam.validate == Validated.invalid(NonEmptyChain( 61 | ".members.[0].name should be non empty", 62 | ".members.[0].age should be between 18 and 40", 63 | ".name should be non empty", 64 | "team should be fed with two pizzas!", 65 | ))) 66 | assert(failingTeamValidator.validate(validTeam) == Validated.invalid(NonEmptyChain( 67 | "validation error after heavy computations", 68 | ))) 69 | assert(failingTeamValidator.validate(invalidTeam) == Validated.invalid(NonEmptyChain( 70 | ".members.[0].name should be non empty", 71 | ".members.[0].age should be between 18 and 40", 72 | ".name should be non empty", 73 | "team should be fed with two pizzas!", 74 | ))) 75 | } 76 | } 77 | } -------------------------------------------------------------------------------- /core/src/dupin/core/ParserInstances.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats._ 4 | import cats.arrow.ArrowChoice 5 | import cats.arrow.FunctionK 6 | import cats.data.IorT 7 | import cats.data.NonEmptyChain 8 | import cats.implicits._ 9 | 10 | trait ParserInstance0 { 11 | implicit final def parserMonad[F[_]: Monad, E, A]: Monad[Parser[F, E, A, *]] = 12 | new Monad[Parser[F, E, A, *]] { 13 | override def pure[B](b: B): Parser[F, E, A, B] = Parser[F, E].pure(b) 14 | override def flatMap[B, C](fa: Parser[F, E, A, B])(f: B => Parser[F, E, A, C]): Parser[F, E, A, C] = 15 | fa.flatMap(f) 16 | override def tailRecM[B, C](b: B)(f: B => Parser[F, E, A, Either[B, C]]): Parser[F, E, A, C] = 17 | Parser[F, E].runF[A, C] { a => 18 | FlatMap[IorT[F, NonEmptyChain[E], *]].tailRecM(b)(b => IorT(f(b).runF(a))).value 19 | } 20 | } 21 | 22 | implicit def parserParallelWithSequentialEffect[F0[_]: Monad, E, A]: Parallel.Aux[Parser[F0, E, A, *], Parser[F0, E, A, *]] = 23 | new Parallel[Parser[F0, E, A, *]] { 24 | type F[x] = Parser[F0, E, A, x] 25 | private val identityK: Parser[F0, E, A, *] ~> Parser[F0, E, A, *] = FunctionK.id[Parser[F0, E, A, *]] 26 | private val underlyingParallel: Parallel.Aux[IorT[F0, NonEmptyChain[E], *], IorT[F0, NonEmptyChain[E], *]] = 27 | IorT.catsDataParallelForIorTWithSequentialEffect[F0, NonEmptyChain[E]] 28 | 29 | def parallel: Parser[F0, E, A, *] ~> Parser[F0, E, A, *] = identityK 30 | def sequential: Parser[F0, E, A, *] ~> Parser[F0, E, A, *] = identityK 31 | 32 | val applicative: Applicative[Parser[F0, E, A, *]] = new Applicative[Parser[F0, E, A, *]] { 33 | def pure[B](b: B): Parser[F0, E, A, B] = Parser[F0, E].pure(b) 34 | def ap[B, C](ff: Parser[F0, E, A, B => C])(fa: Parser[F0, E, A, B]): Parser[F0, E, A, C] = 35 | Parser[F0, E].runF[A, C](c => 36 | underlyingParallel.applicative.ap(IorT(ff.runF(c)))(IorT(fa.runF(c))).value 37 | ) 38 | } 39 | 40 | lazy val monad: Monad[Parser[F0, E, A, *]] = Monad[Parser[F0, E, A, *]] 41 | } 42 | } 43 | 44 | trait ParserInstances extends ParserInstance0 { 45 | implicit def parserArrow[F[_]: Monad, E]: ArrowChoice[Parser[F, E, *, *]] = 46 | new ArrowChoice[Parser[F, E, *, *]] { 47 | def choose[A, B, C, D]( 48 | f: Parser[F, E, A, C] 49 | )(g: Parser[F, E, B, D]): Parser[F, E, Either[A, B], Either[C, D]] = Parser[F, E].runF(c => 50 | c.value match { 51 | case Left(a) => f.map(Either.left[C, D](_)).runF(c.copy(value = a)) 52 | case Right(b) => g.map(Either.right[C, D](_)).runF(c.copy(value = b)) 53 | } 54 | ) 55 | 56 | def lift[A, B](f: A => B): Parser[F, E, A, B] = Parser[F, E].run(c => f(c.value).rightIor) 57 | 58 | def first[A, B, C](fa: Parser[F, E, A, B]): Parser[F, E, (A, C), (B, C)] = 59 | Parser[F, E].runF(c => fa.map(_ -> c.value._2).runF(c.copy(value = c.value._1))) 60 | 61 | def compose[A, B, C](f: Parser[F, E, B, C], g: Parser[F, E, A, B]): Parser[F, E, A, C] = f.compose(g) 62 | } 63 | 64 | implicit final def parserLiftedToTraverseP[F[_], E, A, B, G[_]](implicit 65 | p: Parser[F, E, A, B], 66 | F: Applicative[F], 67 | G: Traverse[G], 68 | ): Parser[F, E, G[A], G[B]] = p.liftToTraverseP[G] 69 | } 70 | -------------------------------------------------------------------------------- /core/src-2/dupin/core/ValidatorMacro.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats.Functor 4 | import dupin.core.Validator.PartiallyAppliedCombineP 5 | import dupin.core.Validator.PartiallyAppliedCombinePC 6 | import dupin.core.Validator.PartiallyAppliedCombinePR 7 | import dupin.core.Validator.PartiallyAppliedCombinePRF 8 | import scala.reflect.macros.blackbox 9 | 10 | private[dupin] object ValidatorMacro { 11 | def getFieldPath(c: blackbox.Context)(f: c.Expr[_ => _]): c.Tree = { 12 | import c.universe._ 13 | def abort = c.abort(c.enclosingPosition, s"Unable to retrieve field path from function ${f.tree}") 14 | def rec(argName: Name, selects: Tree, acc: Tree): Tree = selects match { 15 | case Ident(identName) if identName == argName => acc 16 | case Select(qualifier, name) => 17 | q"${rec(argName, qualifier, acc)}.append(_root_.dupin.core.FieldPart(${name.decodedName.toString}))" 18 | case _ => abort 19 | } 20 | f.tree match { 21 | case Function(List(ValDef(_, argName, _, _)), selects) => rec(argName, selects, q"_root_.dupin.Path.empty") 22 | case _ => abort 23 | } 24 | } 25 | } 26 | 27 | private[dupin] class ValidatorMacro(val c: blackbox.Context) { 28 | import ValidatorMacro._ 29 | 30 | def comapPImpl[F[_], E, A, AA](f: c.Expr[A => AA]): c.Expr[Validator[F, E, AA]] = { 31 | import c.universe._ 32 | c.Expr(q"""${c.prefix}.comapPE(${getFieldPath(c)(f)}, $f)""") 33 | } 34 | 35 | def combinePImpl[F[_], E, A, AA](f: c.Expr[A => AA]): c.Expr[PartiallyAppliedCombineP[F, E, A, AA]] = { 36 | import c.universe._ 37 | c.Expr(q"""_root_.dupin.Validator.PartiallyAppliedCombineP(${c.prefix}, ${getFieldPath(c)(f)}, $f)""") 38 | } 39 | 40 | def combinePCImpl[F[_], E, A, AA](f: c.Expr[A => AA]): c.Expr[PartiallyAppliedCombinePC[F, E, A, AA]] = { 41 | import c.universe._ 42 | c.Expr(q"""_root_.dupin.Validator.PartiallyAppliedCombinePC(${c.prefix}, ${getFieldPath(c)(f)}, $f)""") 43 | } 44 | 45 | def combinePRImpl[F[_], E, A, AA](f: c.Expr[A => AA]): c.Expr[PartiallyAppliedCombinePR[F, E, A, AA]] = { 46 | import c.universe._ 47 | c.Expr(q"""_root_.dupin.Validator.PartiallyAppliedCombinePR(${c.prefix}, ${getFieldPath(c)(f)}, $f)""") 48 | } 49 | 50 | def combinePRFImpl[F[_], E, A, AA](f: c.Expr[A => AA]): c.Expr[PartiallyAppliedCombinePRF[F, E, A, AA]] = { 51 | import c.universe._ 52 | c.Expr(q"""_root_.dupin.Validator.PartiallyAppliedCombinePRF(${c.prefix}, ${getFieldPath(c)(f)}, $f)""") 53 | } 54 | 55 | def combinePIImpl[F[_], E, A, AA]( 56 | f: c.Expr[A => AA] 57 | )(AA: c.Expr[Validator[F, E, AA]]): c.Expr[Validator[F, E, A]] = { 58 | import c.universe._ 59 | c.Expr(q"""${c.prefix}.combinePE(${getFieldPath(c)(f)}, $f)($AA)""") 60 | } 61 | 62 | def deriveImpl[F[_], E, A](implicit 63 | FT: c.WeakTypeTag[F[Any]], 64 | ET: c.WeakTypeTag[E], 65 | AT: c.WeakTypeTag[A], 66 | ): c.Expr[Validator[F, E, A]] = { 67 | import c.universe._ 68 | c.Expr(AT.tpe.members.toList.sortBy(_.fullName).collect { 69 | case m: MethodSymbol if m.isParamAccessor => m 70 | }.foldLeft(q"""_root_.dupin.Validator[$FT, $ET].success[$AT]""") { case (t, m) => 71 | val returnTpe = m.returnType.substituteTypes(AT.tpe.typeSymbol.asType.typeParams, AT.tpe.typeArgs) 72 | q""" 73 | $t.combinePE( 74 | _root_.dupin.Path(_root_.dupin.core.FieldPart(${m.name.toString})), 75 | _.${m.name})( 76 | implicitly[_root_.dupin.core.Validator[$FT, $ET, $returnTpe]] 77 | ) 78 | """ 79 | }) 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /core/test/src-2.13+/dupin/readme/ComplexExampleWithParserSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.IorNec 4 | import cats.data.NonEmptyList 5 | import cats.implicits._ 6 | import dupin.basic._ 7 | import org.scalatest.freespec.AnyFreeSpec 8 | import scala.collection.mutable 9 | 10 | class ComplexExampleWithParserSpec extends AnyFreeSpec with ComplexExampleFixture { 11 | //validation types to handle repository effect `R` 12 | type CustomValidator[A] = Validator[R, String, A] 13 | val CustomValidator = Validator[R, String] 14 | type CustomParser[A, B] = Parser[R, String, A, B] 15 | val CustomParser = Parser[R, String] 16 | 17 | //parsers per requirement: 18 | 19 | //term and mistake should be a single word 20 | val termParser = CustomParser 21 | .root[String, Term]( 22 | Option(_).filter(_.matches("\\w+")).map(Term.apply), 23 | c => s"${c.path}: cannot parse string '${c.value}' to a term" 24 | ) 25 | 26 | //term and mistake should not exist in the database 27 | val repositoryTermParser = CustomValidator 28 | .rootF[Term]( 29 | TermRepository.contains(_).map(!_), 30 | c => s"${c.path}: term '${c.value}' already exists" 31 | ) 32 | .toParser 33 | 34 | //intermediate model to aggregate parsed terms 35 | case class HalfParsedTermModel( 36 | term: Term, 37 | mistakes: List[Term], 38 | ) 39 | 40 | //terms should be unique among other terms in the list 41 | val uniqueTermsParser = CustomParser 42 | //define list level context where terms should be unique 43 | .idContext[List[HalfParsedTermModel]] { _ => 44 | val validTerms = mutable.Set.empty[Term] 45 | CustomValidator 46 | .root[Term](validTerms.add, c => s"${c.path}: term '${c.value}' is duplicate") 47 | .comapP[HalfParsedTermModel](_.term) 48 | .toParser 49 | //lift parser to `List` accumulating errors 50 | .liftToTraverseCombiningP[List] 51 | } 52 | 53 | //mistakes should be unique among other mistakes and terms in the list 54 | val uniqueTermsMistakesParser = CustomParser 55 | .idContext[List[HalfParsedTermModel]] { ms => 56 | val validTerms = ms.view.map(_.term).to(mutable.Set) 57 | CustomParser 58 | .idContext[HalfParsedTermModel] { m => 59 | CustomValidator 60 | .root[Term](validTerms.add, c => s"${c.path}: mistake '${c.value}' is duplicate") 61 | .toParser 62 | .liftToTraverseCombiningP[List] 63 | .comapP[HalfParsedTermModel](_.mistakes) 64 | //update model with unique mistakes 65 | .map(a => m.copy(mistakes = a)) 66 | } 67 | .liftToTraverseCombiningP[List] 68 | } 69 | 70 | //parsed model should have as minimum one mistake 71 | def nelParser[A] = CustomParser 72 | .root[List[A], NonEmptyList[A]](_.toNel, c => s"${c.path}: cannot be empty") 73 | val halfToFullModelParser = CustomParser 74 | .context[HalfParsedTermModel, TermModel](m => 75 | nelParser[Term] 76 | .comapP[HalfParsedTermModel](_.mistakes) 77 | .map(mistakes => TermModel(m.term, mistakes)) 78 | ) 79 | 80 | //combine all parsers together 81 | val modelsParser = ( 82 | termParser 83 | .andThen(repositoryTermParser) 84 | .comapP[RawTermModel](_.term), 85 | termParser 86 | .andThen(repositoryTermParser) 87 | .liftToTraverseCombiningP[List] 88 | .comapP[RawTermModel](_.mistakes), 89 | ) 90 | .parMapN(HalfParsedTermModel.apply) 91 | .liftToTraverseCombiningP[List] 92 | .andThen(uniqueTermsParser) 93 | .andThen(uniqueTermsMistakesParser) 94 | .andThen(halfToFullModelParser.liftToTraverseCombiningP[List]) 95 | 96 | override def parse(rawModels: List[RawTermModel]): R[IorNec[String, List[TermModel]]] = 97 | modelsParser(rawModels) 98 | } 99 | -------------------------------------------------------------------------------- /core/test/src-2.12/dupin/readme/ComplexExampleWithParserSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.Parallel 4 | import cats.data.IorNec 5 | import cats.data.NonEmptyList 6 | import cats.implicits._ 7 | import dupin.basic._ 8 | import org.scalatest.freespec.AnyFreeSpec 9 | import scala.collection.mutable 10 | 11 | class ComplexExampleWithParserSpec extends AnyFreeSpec with ComplexExampleFixture { 12 | //validation types to handle repository effect `R` 13 | type CustomValidator[A] = Validator[R, String, A] 14 | val CustomValidator = Validator[R, String] 15 | type CustomParser[A, B] = Parser[R, String, A, B] 16 | val CustomParser = Parser[R, String] 17 | 18 | //parsers per requirement: 19 | 20 | //term and mistake should be a single word 21 | val termParser = CustomParser 22 | .root[String, Term]( 23 | Option(_).filter(_.matches("\\w+")).map(Term.apply), 24 | c => s"${c.path}: cannot parse string '${c.value}' to a term" 25 | ) 26 | 27 | //term and mistake should not exist in the database 28 | val repositoryTermParser = CustomValidator 29 | .rootF[Term]( 30 | TermRepository.contains(_).map(!_), 31 | c => s"${c.path}: term '${c.value}' already exists" 32 | ) 33 | .toParser 34 | 35 | //intermediate model to aggregate parsed terms 36 | case class HalfParsedTermModel( 37 | term: Term, 38 | mistakes: List[Term], 39 | ) 40 | 41 | //terms should be unique among other terms in the list 42 | val uniqueTermsParser = CustomParser 43 | //define list level context where terms should be unique 44 | .idContext[List[HalfParsedTermModel]] { _ => 45 | val validTerms = mutable.Set.empty[Term] 46 | CustomValidator 47 | .root[Term](validTerms.add, c => s"${c.path}: term '${c.value}' is duplicate") 48 | .comapP[HalfParsedTermModel](_.term) 49 | .toParser 50 | //lift parser to `List` accumulating errors 51 | .liftToTraverseCombiningP[List] 52 | } 53 | 54 | //mistakes should be unique among other mistakes and terms in the list 55 | val uniqueTermsMistakesParser = CustomParser 56 | .idContext[List[HalfParsedTermModel]] { ms => 57 | val validTerms = mutable.Set(ms.view.map(_.term):_*) 58 | CustomParser 59 | .idContext[HalfParsedTermModel] { m => 60 | CustomValidator 61 | .root[Term](validTerms.add, c => s"${c.path}: mistake '${c.value}' is duplicate") 62 | .toParser 63 | .liftToTraverseCombiningP[List] 64 | .comapP[HalfParsedTermModel](_.mistakes) 65 | //update model with unique mistakes 66 | .map(a => m.copy(mistakes = a)) 67 | } 68 | .liftToTraverseCombiningP[List] 69 | } 70 | 71 | //parsed model should have as minimum one mistake 72 | def nelParser[A] = CustomParser 73 | .root[List[A], NonEmptyList[A]](_.toNel, c => s"${c.path}: cannot be empty") 74 | val halfToFullModelParser = CustomParser 75 | .context[HalfParsedTermModel, TermModel](m => 76 | nelParser[Term] 77 | .comapP[HalfParsedTermModel](_.mistakes) 78 | .map(mistakes => TermModel(m.term, mistakes)) 79 | ) 80 | 81 | //combine all parsers together 82 | val modelsParser = Parser.parserParallelWithSequentialEffect[R, String, RawTermModel].applicative.map2( 83 | termParser 84 | .andThen(repositoryTermParser) 85 | .comapP[RawTermModel](_.term), 86 | termParser 87 | .andThen(repositoryTermParser) 88 | .liftToTraverseCombiningP[List] 89 | .comapP[RawTermModel](_.mistakes), 90 | )(HalfParsedTermModel.apply) 91 | .liftToTraverseCombiningP[List] 92 | .andThen(uniqueTermsParser) 93 | .andThen(uniqueTermsMistakesParser) 94 | .andThen(halfToFullModelParser.liftToTraverseCombiningP[List]) 95 | 96 | override def parse(rawModels: List[RawTermModel]): R[IorNec[String, List[TermModel]]] = 97 | modelsParser(rawModels) 98 | } 99 | -------------------------------------------------------------------------------- /core/src/dupin/core/Parser.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats._ 4 | import cats.data._ 5 | import cats.implicits._ 6 | import dupin._ 7 | 8 | /** 9 | * A type class that defines how to parse an instance of `A` to an instance of `B`. 10 | * Can be thought of as a `A => F[IorNec[E, B]]` function. 11 | */ 12 | final class Parser[F[_], E, A, B] private ( 13 | val runF: Context[A] => F[IorNec[E, B]] 14 | ) extends ParserBinCompat[F, E, A, B] { 15 | def apply(a: A): F[IorNec[E, B]] = parse(a) 16 | 17 | def parse(a: A): F[IorNec[E, B]] = runF(Context(Path.empty, a)) 18 | 19 | def handleErrorWith( 20 | f: NonEmptyChain[E] => Parser[F, E, A, B] 21 | )(implicit 22 | F: Monad[F] 23 | ): Parser[F, E, A, B] = Parser[F, E].runF(c => 24 | ApplicativeError[IorT[F, NonEmptyChain[E], *], NonEmptyChain[E]] 25 | .handleErrorWith(IorT(runF(c)))(e => IorT(f(e).runF(c))) 26 | .value 27 | ) 28 | 29 | def mapError[EE](f: E => EE)(implicit F: Functor[F]): Parser[F, EE, A, B] = Parser[F, EE].runF(c => 30 | runF(c).map(_.leftMap(_.map(f))) 31 | ) 32 | 33 | def map[C](f: B => C)(implicit F: Functor[F]): Parser[F, E, A, C] = Parser[F, E].runF(c => 34 | runF(Context(c.path, c.value)).map(_.map(f)) 35 | ) 36 | 37 | def flatMap[C](f: B => Parser[F, E, A, C])(implicit F: Monad[F]): Parser[F, E, A, C] = Parser[F, E].runF(c => 38 | IorT(runF(Context(c.path, c.value))).flatMapF(f(_).runF(c)).value 39 | ) 40 | 41 | /** 42 | * Contravariant map without path changes. Example: 43 | * {{{ 44 | * scala> case class User(age: Int) 45 | * scala> val user = User(1) 46 | * scala> val parser = dupin.basic.BasicParser.idFailure[Int](c => s"${c.path} is wrong") 47 | * 48 | * scala> parser.comap[User](_.age).parse(user) 49 | * res0: cats.Id[cats.data.IorNec[String,Int]] = Left(Chain(. is wrong)) 50 | * 51 | * scala> parser.comapP[User](_.age).parse(user) 52 | * res1: cats.Id[cats.data.IorNec[String,Int]] = Left(Chain(.age is wrong)) 53 | * }}} 54 | */ 55 | def comap[AA](f: AA => A): Parser[F, E, AA, B] = comapPE(Path.empty, f) 56 | 57 | /** 58 | * Contravariant map with explicit path prefix. 59 | */ 60 | def comapPE[AA](p: Path, f: AA => A): Parser[F, E, AA, B] = Parser[F, E].runF(c => 61 | runF(Context(c.path ++ p, f(c.value))) 62 | ) 63 | 64 | /** 65 | * Lifts parser to `G[_]` type using `Traverse` instance, adds index as path prefix. 66 | * Example: 67 | * {{{ 68 | * scala> case class Name(value: String) 69 | * scala> val rawNames = List("", "Doe") 70 | * scala> val parser = dupin.basic.BasicParser.root[String, Name]( 71 | * Option(_).filterNot(_.isEmpty).map(Name.apply), 72 | * c => s"${c.path} is not a name", 73 | * ) 74 | * 75 | * scala> parser.liftToTraverseP[List].parse(rawNames) 76 | * res0: cats.Id[cats.data.IorNec[String,List[Name]]] = Left(Chain(.[0] is not a name)) 77 | * 78 | * scala> parser.liftToTraverseCombiningP[List].parse(rawNames) 79 | * res1: cats.Id[cats.data.IorNec[String,List[Name]]] = Both(Chain(.[0] is not a name),List(Name(Doe))) 80 | * }}} 81 | */ 82 | def liftToTraverseP[G[_]](implicit 83 | F: Applicative[F], 84 | GT: Traverse[G], 85 | ): Parser[F, E, G[A], G[B]] = Parser[F, E].runF[G[A], G[B]](c => 86 | c.value 87 | .mapWithIndex((a, i) => this.comapPE[G[A]](Path(IndexPart(i.toString)), _ => a).runF(c)) 88 | .sequence 89 | .map(Parallel.parSequence[G, IorNec[E, *], B]) 90 | ) 91 | 92 | /** 93 | * Lifts parser to `G[_]` type using `Traverse` instance, adds index as path prefix, 94 | * combines each individual parser result using `MonoidK` instance, therefore allows to skip failures. 95 | * 96 | * @see [[liftToTraverseP]] 97 | */ 98 | def liftToTraverseCombiningP[G[_]](implicit 99 | F: Applicative[F], 100 | GT: Traverse[G], 101 | GA: Applicative[G], 102 | GM: MonoidK[G], 103 | ): Parser[F, E, G[A], G[B]] = Parser[F, E].runF[G[A], G[B]](c => 104 | c.value 105 | .mapWithIndex((a, i) => this.comapPE[G[A]](Path(IndexPart(i.toString)), _ => a).runF(c)) 106 | .sequence 107 | .map(GA 108 | .map(_)(_.map(_.pure[G])) 109 | .foldLeft(Ior.right[NonEmptyChain[E], G[B]](GM.empty))(_.combine(_)(_ combineK _, _ combineK _))) 110 | ) 111 | 112 | def andThen[C](p: Parser[F, E, B, C])(implicit F: Monad[F]): Parser[F, E, A, C] = Parser[F, E].runF(c => 113 | runF(c).flatMap { 114 | case Ior.Both(a, b) => p.runF(Context(c.path, b)).map(_.addLeft(a)((x, y) => y ++ x)) 115 | case Ior.Right(b) => p.runF(Context(c.path, b)) 116 | case r @ Ior.Left(_) => F.pure(r) 117 | } 118 | ) 119 | 120 | def compose[Z](p: Parser[F, E, Z, A])(implicit F: Monad[F]): Parser[F, E, Z, B] = p.andThen(this) 121 | } 122 | 123 | object Parser extends ParserInstances { 124 | def apply[F[_], E]: PartiallyAppliedConstructor[F, E] = PartiallyAppliedConstructor[F, E]() 125 | 126 | case class PartiallyAppliedConstructor[F[_], E]() { 127 | def apply[A, B](implicit B: Parser[F, E, A, B]): Parser[F, E, A, B] = B 128 | 129 | def runF[A, B](runF: Context[A] => F[IorNec[E, B]]): Parser[F, E, A, B] = 130 | new Parser(runF) 131 | 132 | def run[A, B](run: Context[A] => IorNec[E, B])(implicit F: Applicative[F]): Parser[F, E, A, B] = 133 | runF(run andThen F.pure) 134 | 135 | @deprecated("use pure instead") 136 | def success[A, B](b: B)(implicit F: Applicative[F]): Parser[F, E, A, B] = pure(b) 137 | 138 | def success_[A, B](f: A => B)(implicit F: Applicative[F]): Parser[F, E, A, B] = 139 | run[A, B](c => Ior.right(f(c.value))) 140 | 141 | def idSuccess[A](implicit F: Applicative[F]): IdParser[F, E, A] = success_[A, A](identity) 142 | 143 | def pure[A, B](b: B)(implicit F: Applicative[F]): Parser[F, E, A, B] = success_(_ => b) 144 | 145 | def failure[A, B]( 146 | m: MessageBuilder[A, E], 147 | ms: MessageBuilder[A, E]* 148 | )(implicit 149 | F: Applicative[F], 150 | ): Parser[F, E, A, B] = run[A, B](c => Ior.Left(NonEmptyChain(m(c), ms.map(_(c)): _*))) 151 | 152 | def idFailure[A]( 153 | m: MessageBuilder[A, E], 154 | ms: MessageBuilder[A, E]* 155 | )(implicit 156 | F: Applicative[F], 157 | ): IdParser[F, E, A] = failure[A, A](m, ms: _*) 158 | 159 | /** 160 | * Creates parser from context. 161 | */ 162 | def context[A, B](f: A => Parser[F, E, A, B]): Parser[F, E, A, B] = 163 | runF(a => f(a.value).runF(a)) 164 | 165 | def idContext[A](f: A => Parser[F, E, A, A]): Parser[F, E, A, A] = context(f) 166 | 167 | def rootF[A, B](f: A => F[Option[B]], m: MessageBuilder[A, E])(implicit F: Functor[F]): Parser[F, E, A, B] = 168 | runF(c => f(c.value).map(_.fold(Ior.leftNec[E, B](m(c)))(Ior.right))) 169 | 170 | /** 171 | * Creates a root parser from given arguments. 172 | */ 173 | def root[A, B](f: A => Option[B], m: MessageBuilder[A, E])(implicit F: Applicative[F]): Parser[F, E, A, B] = 174 | rootF(f andThen F.pure, m) 175 | 176 | def idRoot[A](f: A => Option[A], m: MessageBuilder[A, E])(implicit F: Applicative[F]): IdParser[F, E, A] = 177 | root(f, m) 178 | } 179 | 180 | } 181 | -------------------------------------------------------------------------------- /core/test/src/dupin/readme/ComplexExampleFixture.scala: -------------------------------------------------------------------------------- 1 | package dupin.readme 2 | 3 | import cats.data.Ior 4 | import cats.data.IorNec 5 | import cats.data.NonEmptyChain 6 | import cats.data.NonEmptyList 7 | import org.scalatest.freespec.AnyFreeSpecLike 8 | 9 | trait ComplexExampleFixture extends AnyFreeSpecLike { 10 | case class RawTermModel( 11 | term: String, 12 | mistakes: List[String], 13 | ) 14 | case class Term(value: String) { 15 | override def toString: String = value 16 | } 17 | case class TermModel( 18 | term: Term, 19 | mistakes: NonEmptyList[Term], 20 | ) 21 | 22 | type R[A] = Either[String, A] 23 | object TermRepository { 24 | def contains(term: Term): R[Boolean] = { 25 | if (term == Term("exists")) Right(true) 26 | else if (term == Term("error")) Left("error") 27 | else Right(false) 28 | } 29 | } 30 | 31 | def parse(rawModels: List[RawTermModel]): R[IorNec[String, List[TermModel]]] 32 | 33 | "Complex example parsers should" - { 34 | val validRawTermWith1Mistake = RawTermModel("term1", List("mistake1")) 35 | val validRawTermWith2Mistakes = RawTermModel("term2", List("mistake21", "mistake22")) 36 | val parsedTermWith1Mistake = TermModel(Term("term1"), NonEmptyList.one(Term("mistake1"))) 37 | val parsedTermWith2Mistakes = TermModel(Term("term2"), NonEmptyList.of(Term("mistake21"), Term("mistake22"))) 38 | "return empty list if empty models were passed" in { 39 | val models = List.empty 40 | assert(parse(models) == Right(Ior.right(List.empty))) 41 | } 42 | "return valid models" in { 43 | val models = List( 44 | validRawTermWith1Mistake, 45 | validRawTermWith2Mistakes 46 | ) 47 | assert(parse(models) == Right(Ior.right(List( 48 | parsedTermWith1Mistake, 49 | parsedTermWith2Mistakes 50 | )))) 51 | } 52 | "filter out term that is not a word" in { 53 | val models = List( 54 | validRawTermWith1Mistake, 55 | RawTermModel("invalid term", validRawTermWith2Mistakes.mistakes), 56 | ) 57 | assert(parse(models) == Right(Ior.both( 58 | NonEmptyChain.one(".[1].term: cannot parse string 'invalid term' to a term"), 59 | List( 60 | parsedTermWith1Mistake, 61 | ) 62 | ))) 63 | } 64 | "filter out term with two invalid fields" in { 65 | val models = List( 66 | validRawTermWith1Mistake, 67 | RawTermModel("invalid term", List("invalid mistake")), 68 | ) 69 | assert(parse(models) == Right(Ior.both( 70 | NonEmptyChain( 71 | ".[1].term: cannot parse string 'invalid term' to a term", 72 | ".[1].mistakes.[0]: cannot parse string 'invalid mistake' to a term", 73 | ), 74 | List( 75 | parsedTermWith1Mistake, 76 | ) 77 | ))) 78 | } 79 | "filter out term that exists in repository" in { 80 | val models = List( 81 | validRawTermWith1Mistake, 82 | RawTermModel("exists", validRawTermWith2Mistakes.mistakes), 83 | ) 84 | assert(parse(models) == Right(Ior.both( 85 | NonEmptyChain.one(".[1].term: term 'exists' already exists"), 86 | List( 87 | parsedTermWith1Mistake, 88 | ) 89 | ))) 90 | } 91 | "filter out mistake that is not a word" in { 92 | val models = List( 93 | validRawTermWith1Mistake, 94 | RawTermModel(validRawTermWith2Mistakes.term, "invalid mistake" :: validRawTermWith2Mistakes.mistakes), 95 | ) 96 | assert(parse(models) == Right(Ior.both( 97 | NonEmptyChain.one(".[1].mistakes.[0]: cannot parse string 'invalid mistake' to a term"), 98 | List( 99 | parsedTermWith1Mistake, 100 | parsedTermWith2Mistakes, 101 | ) 102 | ))) 103 | } 104 | "filter out duplicated terms" in { 105 | val models = List( 106 | validRawTermWith1Mistake, 107 | RawTermModel(validRawTermWith1Mistake.term, validRawTermWith2Mistakes.mistakes) 108 | ) 109 | assert(parse(models) == Right(Ior.both( 110 | NonEmptyChain( 111 | ".[1].term: term 'term1' is duplicate", 112 | ), 113 | List( 114 | parsedTermWith1Mistake, 115 | ) 116 | ))) 117 | } 118 | "filter out duplicated mistakes" in { 119 | val models = List( 120 | RawTermModel(validRawTermWith1Mistake.term, "mistake1" :: validRawTermWith1Mistake.mistakes), 121 | RawTermModel(validRawTermWith2Mistakes.term, "mistake1" :: validRawTermWith2Mistakes.mistakes) 122 | ) 123 | assert(parse(models) == Right(Ior.both( 124 | NonEmptyChain( 125 | ".[0].mistakes.[1]: mistake 'mistake1' is duplicate", 126 | ".[1].mistakes.[0]: mistake 'mistake1' is duplicate", 127 | ), 128 | List( 129 | parsedTermWith1Mistake, 130 | parsedTermWith2Mistakes, 131 | ) 132 | ))) 133 | } 134 | "filter out mistake that duplicates term" in { 135 | val models = List( 136 | RawTermModel(validRawTermWith1Mistake.term, "term2" :: validRawTermWith1Mistake.mistakes), 137 | validRawTermWith2Mistakes, 138 | ) 139 | assert(parse(models) == Right(Ior.both( 140 | NonEmptyChain( 141 | ".[0].mistakes.[0]: mistake 'term2' is duplicate", 142 | ), 143 | List( 144 | parsedTermWith1Mistake, 145 | parsedTermWith2Mistakes, 146 | ) 147 | ))) 148 | } 149 | "filter out term if mistakes are empty" in { 150 | val models = List( 151 | validRawTermWith1Mistake, 152 | RawTermModel("term2", List.empty), 153 | ) 154 | assert(parse(models) == Right(Ior.both( 155 | NonEmptyChain.one(".[1].mistakes: cannot be empty"), 156 | List( 157 | parsedTermWith1Mistake, 158 | ) 159 | ))) 160 | } 161 | "filter out term with invalid mistakes only" in { 162 | val models = List( 163 | validRawTermWith1Mistake, 164 | RawTermModel(validRawTermWith2Mistakes.term, List("invalid mistake 1", "invalid mistake 2")) 165 | ) 166 | assert(parse(models) == Right(Ior.both( 167 | NonEmptyChain( 168 | ".[1].mistakes.[0]: cannot parse string 'invalid mistake 1' to a term", 169 | ".[1].mistakes.[1]: cannot parse string 'invalid mistake 2' to a term", 170 | ".[1].mistakes: cannot be empty", 171 | ), 172 | List( 173 | parsedTermWith1Mistake, 174 | ) 175 | ))) 176 | } 177 | "return error if repository returns error" in { 178 | val models = List(RawTermModel("error", List.empty)) 179 | assert(parse(models) == Left("error")) 180 | } 181 | } 182 | } 183 | -------------------------------------------------------------------------------- /core/src/dupin/core/Validator.scala: -------------------------------------------------------------------------------- 1 | package dupin.core 2 | 3 | import cats._ 4 | import cats.data._ 5 | import cats.implicits._ 6 | import dupin._ 7 | 8 | /** 9 | * A type class that defines how to validate an instance of `A`. 10 | * Can be thought of as a `A => F[ValidatedNec[E, Unit]]` function. 11 | */ 12 | final class Validator[F[_], E, A] private ( 13 | val runF: Context[A] => F[ValidatedNec[E, Unit]] 14 | ) extends ValidatorBinCompat[F, E, A] { 15 | def apply(a: A)(implicit F: Functor[F]): F[ValidatedNec[E, A]] = validate(a) 16 | 17 | def validate(a: A)(implicit F: Functor[F]): F[ValidatedNec[E, A]] = 18 | runF(Context(Path.empty, a)).map(_.map(_ => a)) 19 | 20 | def handleErrorWith( 21 | f: NonEmptyChain[E] => Validator[F, E, A] 22 | )(implicit 23 | F: Monad[F] 24 | ): Validator[F, E, A] = Validator[F, E].runF(c => 25 | runF(c).flatMap { 26 | case r @ Validated.Valid(_) => F.pure(r) 27 | case Validated.Invalid(a) => f(a).runF(c) 28 | } 29 | ) 30 | 31 | def mapError[EE](f: E => EE)(implicit F: Functor[F]): Validator[F, EE, A] = Validator[F, EE].runF(c => 32 | runF(c).map(_.leftMap(_.map(f))) 33 | ) 34 | 35 | /** 36 | * Replaces failure messages with supplied values. 37 | * Optimized version of `.handleErrorWith(_ => Validator.failure(m1, ms))` 38 | */ 39 | def failureAs[EE]( 40 | m1: MessageBuilder[A, EE], 41 | ms: MessageBuilder[A, EE]* 42 | )(implicit 43 | F: Functor[F] 44 | ): Validator[F, EE, A] = Validator[F, EE].runF(c => 45 | runF(c).map(_.leftMap(_ => NonEmptyChain(m1(c), ms.map(_(c)): _*))) 46 | ) 47 | 48 | def mapK[G[_]](f: F ~> G): Validator[G, E, A] = Validator[G, E].runF(a => f(runF(a))) 49 | 50 | /** 51 | * Contravariant map without path changes. Example: 52 | * {{{ 53 | * scala> case class User(age: Int) 54 | * scala> val user = User(1) 55 | * scala> val validator = dupin.basic.BasicValidator.failure[Int](c => s"${c.path} is wrong") 56 | * 57 | * scala> validator.comap[User](_.age).validate(user) 58 | * res0: cats.Id[cats.data.ValidatedNec[String,User]] = Invalid(Chain(. is wrong)) 59 | * 60 | * scala> validator.comapP[User](_.age).validate(user) 61 | * res1: cats.Id[cats.data.ValidatedNec[String,User]] = Invalid(Chain(.age is wrong)) 62 | * }}} 63 | */ 64 | def comap[AA](f: AA => A): Validator[F, E, AA] = comapPE(Path.empty, f) 65 | 66 | /** 67 | * Contravariant map with explicit path prefix. 68 | */ 69 | def comapPE[AA](p: Path, f: AA => A): Validator[F, E, AA] = Validator[F, E].runF(c => 70 | runF(Context(c.path ++ p, f(c.value))) 71 | ) 72 | 73 | /** 74 | * Lifts validator to `G[_]` type using `Traverse` instance, adds index as path prefix. 75 | */ 76 | def liftToTraverseP[G[_]](implicit 77 | F: Applicative[F], 78 | G: Traverse[G], 79 | ): Validator[F, E, G[A]] = Validator[F, E].runF[G[A]](c => c 80 | .value 81 | .mapWithIndex((a, i) => this.comapPE[G[A]](Path(IndexPart(i.toString)), _ => a).runF(c)) 82 | .sequence 83 | .map(_.sequence[ValidatedNec[E, *], Unit].map(_ => ())) 84 | ) 85 | 86 | def product[B]( 87 | v: Validator[F, E, B] 88 | )(implicit FF: Functor[F], FS: Semigroupal[F]): Validator[F, E, (A, B)] = Validator[F, E].runF(a => 89 | (this.runF(a.copy(value = a.value._1)), v.runF(a.copy(value = a.value._2))).mapN(_ combine _) 90 | ) 91 | 92 | /** 93 | * Combines two validators of the same type into one. 94 | * If first validator fails, second one is not invoked. 95 | * Example: 96 | * {{{ 97 | * scala> val value = "value" 98 | * scala> val v1 = dupin.basic.BasicValidator.failure[String](_ => "failure1") 99 | * scala> val v2 = dupin.basic.BasicValidator.failure[String](_ => "failure2") 100 | * scala> val v3 = dupin.basic.BasicValidator.success[String] 101 | * 102 | * scala> (v1 andThen v2).validate(value) 103 | * res0: cats.Id[cats.data.ValidatedNec[String,User]] = Invalid(Chain(failure1)) 104 | * 105 | * scala> (v3 andThen v2).validate(value) 106 | * res1: cats.Id[cats.data.ValidatedNec[String,User]] = Invalid(Chain(failure2)) 107 | * }}} 108 | */ 109 | def andThen(v: Validator[F, E, A])(implicit F: Monad[F]): Validator[F, E, A] = Validator[F, E].runF(c => 110 | runF(c).flatMap { 111 | case Validated.Valid(_) => v.runF(c) 112 | case r @ Validated.Invalid(_) => F.pure(r) 113 | } 114 | ) 115 | 116 | /** 117 | * Combines two validators of the same type into one. 118 | * If either validator fails, error is returned. If both validators fail, errors from both validators are returned. 119 | * Example: 120 | * {{{ 121 | * scala> val value = "value" 122 | * scala> val v1 = dupin.basic.BasicValidator.failure[String](_ => "failure1") 123 | * scala> val v2 = dupin.basic.BasicValidator.failure[String](_ => "failure2") 124 | * scala> val v3 = dupin.basic.BasicValidator.success[String] 125 | * 126 | * scala> (v1 combine v2).validate(value) 127 | * res0: cats.Id[cats.data.ValidatedNec[String,User]] = Invalid(Chain(failure1, failure2)) 128 | * 129 | * scala> (v3 combine v2).validate(value) 130 | * res1: cats.Id[cats.data.ValidatedNec[String,User]] = Invalid(Chain(failure2)) 131 | * }}} 132 | */ 133 | def combine(v: Validator[F, E, A])(implicit F: Applicative[F]): Validator[F, E, A] = 134 | Validator[F, E].runF(a => (this.runF(a), v.runF(a)).mapN(_ combine _)) 135 | 136 | /** 137 | * Combines two validators of the same type into one. 138 | * If either validator fails, success is returned. If both validators fail, errors from right validator are returned. 139 | * Example: 140 | * {{{ 141 | * scala> val value = "value" 142 | * scala> val v1 = dupin.basic.BasicValidator.failure[String](_ => "failure1") 143 | * scala> val v2 = dupin.basic.BasicValidator.failure[String](_ => "failure2") 144 | * scala> val v3 = dupin.basic.BasicValidator.success[String] 145 | * 146 | * scala> (v1 orElse v2).validate(value) 147 | * res0: cats.Id[cats.data.ValidatedNec[String,User]] = Invalid(Chain(failure2)) 148 | * 149 | * scala> (v3 orElse v2).validate(value) 150 | * res1: cats.Id[cats.data.ValidatedNec[String,User]] = Valid(value) 151 | * }}} 152 | */ 153 | def orElse(v: Validator[F, E, A])(implicit F: Applicative[F]): Validator[F, E, A] = 154 | Validator[F, E].runF(a => (this.runF(a), v.runF(a)).mapN(_ orElse _)) 155 | 156 | /** 157 | * Alias for [[combine]] with `$` operator priority 158 | */ 159 | def &&(v: Validator[F, E, A])(implicit F: Applicative[F]): Validator[F, E, A] = combine(v) 160 | 161 | /** 162 | * Alias for [[orElse]] with `|` operator priority 163 | */ 164 | def ||(v: Validator[F, E, A])(implicit F: Applicative[F]): Validator[F, E, A] = orElse(v) 165 | 166 | /** 167 | * Combines `this` with validator from context. 168 | * 169 | * @see [[combine]] 170 | */ 171 | def combineC(f: A => Validator[F, E, A])(implicit F: Applicative[F]): Validator[F, E, A] = 172 | combine(Validator[F, E].context(f)) 173 | 174 | /** 175 | * Combines `this` with root validator passed by separate arguments. 176 | * 177 | * @see [[combine]] 178 | */ 179 | def combineR(f: A => Boolean, m: MessageBuilder[A, E])(implicit F: Applicative[F]): Validator[F, E, A] = 180 | combine(Validator[F, E].root(f, m)) 181 | 182 | def combineRF(f: A => F[Boolean], m: MessageBuilder[A, E])(implicit F: Applicative[F]): Validator[F, E, A] = 183 | combine(Validator[F, E].rootF(f, m)) 184 | 185 | /** 186 | * Combines `this` with field validator using explicit path. 187 | * 188 | * @see [[combine]] 189 | */ 190 | def combinePE[AA]( 191 | p: Path, 192 | f: A => AA 193 | )( 194 | v: Validator[F, E, AA] 195 | )(implicit 196 | F: Applicative[F] 197 | ): Validator[F, E, A] = combine(v.comapPE(p, f)) 198 | 199 | def toParser(implicit F: Functor[F]): IdParser[F, E, A] = Parser[F, E].runF(c => 200 | runF(c).map(_.fold[IorNec[E, A]](Ior.left, _ => Ior.right(c.value))) 201 | ) 202 | } 203 | 204 | object Validator extends ValidatorInstances { 205 | def apply[F[_], E]: PartiallyAppliedConstructor[F, E] = PartiallyAppliedConstructor[F, E]() 206 | 207 | case class PartiallyAppliedConstructor[F[_], E]() extends PartiallyAppliedValidatorConstructorBinCompat[F, E] { 208 | def apply[A](implicit V: Validator[F, E, A]): Validator[F, E, A] = V 209 | 210 | def runF[A](runF: Context[A] => F[ValidatedNec[E, Unit]]): Validator[F, E, A] = 211 | new Validator(runF) 212 | 213 | def run[A](run: Context[A] => ValidatedNec[E, Unit])(implicit F: Applicative[F]): Validator[F, E, A] = 214 | runF(run andThen F.pure) 215 | 216 | /** 217 | * Creates a validator that always returns success result. 218 | */ 219 | def success[A](implicit F: Applicative[F]): Validator[F, E, A] = 220 | run[A](_ => Validated.Valid(())) 221 | 222 | /** 223 | * Creates a validator that always returns fail result. 224 | */ 225 | def failure[A]( 226 | m: MessageBuilder[A, E], 227 | ms: MessageBuilder[A, E]* 228 | )(implicit 229 | F: Applicative[F] 230 | ): Validator[F, E, A] = run[A](c => Validated.Invalid(NonEmptyChain(m(c), ms.map(_(c)): _*))) 231 | 232 | /** 233 | * Creates validator from context. 234 | */ 235 | def context[A](f: A => Validator[F, E, A]): Validator[F, E, A] = 236 | runF(a => f(a.value).runF(a)) 237 | 238 | /** 239 | * Creates a root validator from given arguments. 240 | */ 241 | def root[A](f: A => Boolean, m: MessageBuilder[A, E])(implicit F: Applicative[F]): Validator[F, E, A] = 242 | rootF(f andThen F.pure, m) 243 | 244 | def rootF[A](f: A => F[Boolean], m: MessageBuilder[A, E])(implicit F: Functor[F]): Validator[F, E, A] = 245 | runF(c => F.map(f(c.value))(if (_) Validated.Valid(()) else Validated.invalidNec(m(c)))) 246 | } 247 | 248 | case class PartiallyAppliedCombineP[F[_], E, A, AA](iv: Validator[F, E, A], p: Path, f: A => AA) { 249 | def apply(v: Validator[F, E, AA])(implicit A: Applicative[F]): Validator[F, E, A] = 250 | iv.combinePE(p, f)(v) 251 | } 252 | 253 | case class PartiallyAppliedCombinePC[F[_], E, A, AA](iv: Validator[F, E, A], p: Path, f: A => AA) { 254 | def apply(vf: A => Validator[F, E, AA])(implicit A: Applicative[F]): Validator[F, E, A] = 255 | iv.combineC(a => vf(a).comapPE(p, f)) 256 | } 257 | 258 | case class PartiallyAppliedCombinePRF[F[_], E, A, AA](iv: Validator[F, E, A], p: Path, f: A => AA) { 259 | def apply( 260 | fv: AA => F[Boolean], 261 | m: MessageBuilder[AA, E] 262 | )(implicit 263 | F: Applicative[F] 264 | ): Validator[F, E, A] = iv.combinePE(p, f)(Validator[F, E].rootF(fv, m)) 265 | } 266 | 267 | case class PartiallyAppliedCombinePR[F[_], E, A, AA](iv: Validator[F, E, A], p: Path, f: A => AA) { 268 | def apply( 269 | fv: AA => Boolean, 270 | m: MessageBuilder[AA, E] 271 | )(implicit 272 | F: Applicative[F] 273 | ): Validator[F, E, A] = iv.combinePE(p, f)(Validator[F, E].root(fv, m)) 274 | } 275 | } 276 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Dupin 2 | [![Maven Central](https://img.shields.io/maven-central/v/com.github.yakivy/dupin-core_2.12.svg)](https://mvnrepository.com/search?q=dupin) 3 | [![Sonatype Nexus (Snapshots)](https://img.shields.io/nexus/s/https/oss.sonatype.org/com.github.yakivy/dupin-core_2.13.svg)](https://oss.sonatype.org/content/repositories/snapshots/com/github/yakivy/dupin-core_2.13/) 4 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 5 | Cats friendly 6 | 7 | Dupin is a minimal, idiomatic, customizable validation Scala library. 8 | 9 | You may find Dupin useful if you... 10 | - want a transparent and composable validation approach 11 | - need to return something richer than `String` as validation message 12 | - use effectful logic inside validator (`Future`, `IO`, etc...) 13 | - like [parse don't validate](https://lexi-lambda.github.io/blog/2019/11/05/parse-don-t-validate/) style 14 | - have [cats](https://typelevel.org/cats/) dependency and like their API style 15 | - need Scala 3, Scala JS or Scala Native support 16 | 17 | Library is built around two type classes: 18 | - `Validator[F[_], E, A]` - is a self-sufficient validator for type `A`, represents a function `A => F[ValidatedNec[E, Unit]]` 19 | - `Parser[F[_], E, A, B]` - is a parser from type `A` to type `B`, represents a function `A => F[IorNec[E, B]]` 20 | 21 | ### Table of contents 22 | 1. [Quick start](#quick-start) 23 | 1. [Validate](#validate) 24 | 2. [Parse](#parse) 25 | 2. [Predefined validators](#predefined-validators) 26 | 3. [Message customization](#message-customization) 27 | 4. [Effectful validation](#effectful-validation) 28 | 5. [Custom validating package](#custom-validating-package) 29 | 6. [Complex example](#complex-example) 30 | 7. [Roadmap](#roadmap) 31 | 8. [Changelog](#changelog) 32 | 33 | ### Quick start 34 | Add cats and dupin dependencies to the build file, let's assume you are using sbt: 35 | ```scala 36 | libraryDependencies += Seq( 37 | "org.typelevel" %% "cats-core" % "2.9.0", 38 | "com.github.yakivy" %% "dupin-core" % "0.6.1", 39 | ) 40 | ``` 41 | Describe the domain: 42 | ```scala 43 | case class Name(value: String) 44 | case class Member(name: Name, age: Int) 45 | case class Team(name: Name, members: Seq[Member]) 46 | ``` 47 | 48 | #### Validate 49 | 50 | Define validators: 51 | ```scala 52 | import cats._ 53 | import dupin.basic.all._ 54 | 55 | //validator for simple type or value class 56 | implicit val nameValidator: BasicValidator[Name] = BasicValidator 57 | .root[Name](_.value.nonEmpty, c => s"${c.path} should be non empty") 58 | 59 | //idiomatic validator for complex type 60 | implicit val memberValidator: BasicValidator[Member] = 61 | nameValidator.comapP[Member](_.name) combine 62 | BasicValidator.root[Int]( 63 | a => a > 18 && a < 40, 64 | c => s"${c.path} should be between 18 and 40" 65 | ).comapP[Member](_.age) 66 | 67 | //same validator but with combination helpers for better type resolving 68 | val alternativeMemberValidator: BasicValidator[Member] = BasicValidator 69 | .success[Member] 70 | .combineP(_.name)(nameValidator) 71 | .combinePR(_.age)(a => a > 18 && a < 40, c => s"${c.path} should be between 18 and 40") 72 | 73 | //derived validator 74 | implicit val teamValidator: BasicValidator[Team] = BasicValidator 75 | .derive[Team] 76 | .combineR(_.members.size <= 8, _ => "team should be fed with two pizzas!") 77 | 78 | //two stage validator 79 | val failingTeamValidator: BasicValidator[Team] = teamValidator 80 | .andThen(BasicValidator.failure[Team](_ => "validation error after heavy computations")) 81 | ``` 82 | Validate them all: 83 | ```scala 84 | import dupin.basic.all._ 85 | 86 | val validTeam = Team( 87 | Name("Bears"), 88 | List( 89 | Member(Name("Yakiv"), 26), 90 | Member(Name("Myroslav"), 31), 91 | Member(Name("Andrii"), 25) 92 | ) 93 | ) 94 | 95 | val invalidTeam = Team( 96 | Name(""), 97 | Member(Name(""), 0) :: (1 to 10).map(_ => Member(Name("Valid name"), 20)).toList 98 | ) 99 | 100 | assert(validTeam.isValid) 101 | assert(invalidTeam.validate == Validated.invalid(NonEmptyChain( 102 | ".members.[0].name should be non empty", 103 | ".members.[0].age should be between 18 and 40", 104 | ".name should be non empty", 105 | "team should be fed with two pizzas!", 106 | ))) 107 | assert(failingTeamValidator.validate(validTeam) == Validated.invalid(NonEmptyChain( 108 | "validation error after heavy computations", 109 | ))) 110 | assert(failingTeamValidator.validate(invalidTeam) == Validated.invalid(NonEmptyChain( 111 | ".members.[0].name should be non empty", 112 | ".members.[0].age should be between 18 and 40", 113 | ".name should be non empty", 114 | "team should be fed with two pizzas!", 115 | ))) 116 | ``` 117 | 118 | #### Parse 119 | 120 | Enrich the domain with raw models to parse: 121 | ```scala 122 | case class RawMember(name: String, age: Int) 123 | case class RawTeam(name: String, members: List[RawMember]) 124 | ``` 125 | 126 | Define parsers: 127 | ```scala 128 | import cats._ 129 | import cats.implicits._ 130 | import dupin.basic.all._ 131 | 132 | // parser for simple type or value class 133 | implicit val nameParser: BasicParser[String, Name] = BasicParser.root[String, Name]( 134 | Option(_).filter(_.nonEmpty).map(Name.apply), 135 | c => s"${c.path} should be non empty", 136 | ) 137 | 138 | //idiomatic parser for complex type 139 | implicit val memberParser: BasicParser[RawMember, Member] = 140 | ( 141 | nameParser.comapP[RawMember](_.name), 142 | BasicParser.idRoot[Int]( 143 | Option(_).filter(a => a > 18 && a < 40), 144 | c => s"${c.path} should be between 18 and 40", 145 | ).comapP[RawMember](_.age), 146 | ) 147 | .parMapN(Member.apply) 148 | 149 | implicit val teamParser: BasicParser[RawTeam, Team] = 150 | ( 151 | nameParser.comapP[RawTeam](_.name), 152 | memberParser.liftToTraverseCombiningP[List].comapP[RawTeam](_.members), 153 | ) 154 | .parMapN(Team.apply) 155 | .andThen( 156 | //if you need identity parser that filters out value by condition, 157 | //you can simply create a validator and convert it to parser 158 | BasicValidator 159 | .root[Team](_.members.size <= 8, _ => "team should be fed with two pizzas!") 160 | .toParser 161 | ) 162 | ``` 163 | 164 | Parse them all: 165 | ```scala 166 | val validTeam = RawTeam( 167 | "Bears", 168 | List( 169 | RawMember("Yakiv", 26), 170 | RawMember("Myroslav", 31), 171 | RawMember("Andrii", 25) 172 | ) 173 | ) 174 | 175 | val invalidTeam = RawTeam( 176 | "", 177 | RawMember("", 0) :: (1 to 10).map(_ => RawMember("Valid name", 20)).toList 178 | ) 179 | 180 | assert(validTeam.parse == Ior.right(Team( 181 | Name("Bears"), 182 | List( 183 | Member(Name("Yakiv"), 26), 184 | Member(Name("Myroslav"), 31), 185 | Member(Name("Andrii"), 25) 186 | ) 187 | ))) 188 | assert(invalidTeam.parse == Ior.left(NonEmptyChain( 189 | ".name should be non empty", 190 | ".members.[0].name should be non empty", 191 | ".members.[0].age should be between 18 and 40", 192 | ))) 193 | ``` 194 | 195 | ### Predefined validators 196 | 197 | It also might be useful to extract and reuse validators for common types. Let's define validators for minimum and maximum `Int` value: 198 | ```scala 199 | import dupin.basic.all._ 200 | 201 | def min(value: Int) = BasicValidator.root[Int](_ > value, c => s"${c.path} should be greater than $value") 202 | def max(value: Int) = BasicValidator.root[Int](_ < value, c => s"${c.path} should be less than $value") 203 | ``` 204 | And since validators can be combined, you can use them to create more complex validators: 205 | ```scala 206 | import cats._ 207 | import dupin.basic.all._ 208 | 209 | implicit val memberValidator: BasicValidator[Member] = BasicValidator 210 | .success[Member] 211 | .combineP(_.age)(min(18) && max(40).failureAs(_ => "updated validation message")) 212 | 213 | val invalidMember = Member(Name("Ada"), 0) 214 | val result = invalidMember.validate 215 | 216 | assert(result == Validated.invalidNec(".age should be greater than 18")) 217 | ``` 218 | 219 | ### Message customization 220 | 221 | But not many real projects use strings as validation messages, for example you want to support internationalization: 222 | ```scala 223 | case class I18nMessage( 224 | description: String, 225 | key: String, 226 | params: List[String] 227 | ) 228 | ``` 229 | `BasicValidator[A]` is simply a type alias for `Validator[Id, String, A]`, so you can define own validator type with partially applied builder: 230 | 231 | ```scala 232 | import dupin._ 233 | 234 | type I18nValidator[A] = Validator[cats.Id, I18nMessage, A] 235 | val I18nValidator = Validator[cats.Id, I18nMessage] 236 | ``` 237 | And start creating validators with custom messages: 238 | ```scala 239 | import cats._ 240 | 241 | implicit val nameValidator: I18nValidator[Name] = I18nValidator.root[Name]( 242 | _.value.nonEmpty, 243 | c => I18nMessage( 244 | s"${c.path} should be non empty", 245 | "validator.name.empty", 246 | List(c.path.toString()) 247 | ) 248 | ) 249 | 250 | implicit val memberValidator: I18nValidator[Member] = I18nValidator 251 | .success[Member] 252 | .combinePI(_.name) 253 | .combinePR(_.age)(a => a > 18 && a < 40, c => I18nMessage( 254 | s"${c.path} should be between 18 and 40", 255 | "validator.member.age", 256 | List(c.path.toString()) 257 | )) 258 | ``` 259 | Validation messages will look like: 260 | ```scala 261 | import dupin.syntax._ 262 | 263 | val invalidMember = Member(Name(""), 0) 264 | val result = invalidMember.validate 265 | 266 | assert(result == Validated.invalid(NonEmptyChain( 267 | I18nMessage( 268 | ".name should be non empty", 269 | "validator.name.empty", 270 | List(".name") 271 | ), 272 | I18nMessage( 273 | ".age should be between 18 and 40", 274 | "validator.member.age", 275 | List(".age") 276 | ) 277 | ))) 278 | ``` 279 | 280 | ### Effectful validation 281 | 282 | For example, you want to allow only a limited list of names and it is stored in the database: 283 | ```scala 284 | import scala.concurrent.Future 285 | 286 | class NameService { 287 | private val allowedNames = Set("Ada") 288 | def contains(name: String): Future[Boolean] = 289 | // Emulation of DB call 290 | Future.successful(allowedNames(name)) 291 | } 292 | ``` 293 | So to be able to handle checks that return `Future[Boolean]`, you just need to define your own validator type with partially applied builder: 294 | ```scala 295 | import dupin._ 296 | import scala.concurrent.Future 297 | 298 | type FutureValidator[A] = Validator[Future, String, A] 299 | val FutureValidator = Validator[Future, String] 300 | ``` 301 | Then you can create validators with generic DSL (don't forget to import required type classes, as minimum `Functor[Future]`): 302 | ```scala 303 | import cats.implicits._ 304 | import scala.concurrent.Future 305 | 306 | val nameService = new NameService 307 | 308 | implicit val nameValidator: FutureValidator[Name] = FutureValidator.rootF[Name]( 309 | n => nameService.contains(n.value), 310 | c => s"${c.path} should be non empty" 311 | ) 312 | 313 | implicit val memberValidator: FutureValidator[Member] = FutureValidator 314 | .success[Member] 315 | .combinePI(_.name) 316 | .combinePR(_.age)(a => a > 18 && a < 40, c => s"${c.path} should be between 18 and 40") 317 | ``` 318 | Validation result will look like: 319 | ```scala 320 | import dupin.syntax._ 321 | 322 | val invalidMember = Member(Name(""), 0) 323 | val result: Future[ValidatedNec[String, Member]] = invalidMember.validate 324 | 325 | result.map(r => assert(r == Validated.invalid(NonEmptyChain( 326 | ".name should be non empty", 327 | ".age should be between 18 and 40" 328 | )))) 329 | ``` 330 | 331 | ### Custom validating package 332 | 333 | To avoid imports boilerplate and isolating all customizations, you can define your own dupin package: 334 | ```scala 335 | package object custom extends DupinCoreDsl with DupinSyntax { 336 | type CustomValidator[A] = Validator[Future, I18nMessage, A] 337 | val CustomValidator = Validator[Future, I18nMessage] 338 | 339 | type CustomParser[A, B] = Parser[Future, I18nMessage, A, B] 340 | val CustomParser = Parser[Future, I18nMessage] 341 | } 342 | ``` 343 | Then you can start using custom validator type with a single import: 344 | ```scala 345 | import cats.implicits._ 346 | import dupin.custom._ 347 | 348 | val nameService = new NameService 349 | 350 | implicit val nameValidator: CustomValidator[Name] = CustomValidator.rootF[Name]( 351 | n => nameService.contains(n.value), 352 | c => I18nMessage( 353 | s"${c.path} should be non empty", 354 | "validator.name.empty", 355 | List(c.path.toString()) 356 | ) 357 | ) 358 | 359 | val validName = Name("Ada") 360 | val valid: Future[Boolean] = validName.isValid 361 | 362 | valid.map(assert(_)) 363 | ``` 364 | 365 | ### Complex example 366 | 367 | Let's assume that you need to build a method that receives a list of raw term models (each model is a product of term itself and a list of mistakes that people often make when typing this term, for example: "calendar" -> ["calender", "celender"]) and parses them before saving to the database. Here are some requirements: 368 | - suggested raw model: 369 | ```scala 370 | case class RawTermModel( 371 | term: String, 372 | mistakes: List[String], 373 | ) 374 | ``` 375 | - term and mistake should be a single word 376 | - term and mistake should not exist in the database: 377 | ```scala 378 | type R[A] = Either[String, A] 379 | trait TermRepository { 380 | def contains(term: Term): R[Boolean] = ... 381 | } 382 | ``` 383 | - terms should be unique among other terms in the list 384 | - mistakes should be unique among other mistakes and terms in the list 385 | - parsed model should have as minimum one mistake 386 | - suggested final model: 387 | ```scala 388 | case class Term(value: String) 389 | case class TermModel( 390 | term: Term, 391 | mistakes: NonEmptyList[Term], 392 | ) 393 | ``` 394 | - if validation error occurs in term - skip the model and continue parsing 395 | - if validation error occurs in mistake - skip the mistake only and continue parsing 396 | - all validation errors should be collected and returned after parsing 397 | 398 | So the parser from `RawTermModel` to `TermModel`, considering the requirements above, will look like: 399 | ```scala 400 | //validation types to handle repository effect `R` 401 | type CustomValidator[A] = Validator[R, String, A] 402 | val CustomValidator = Validator[R, String] 403 | type CustomParser[A, B] = Parser[R, String, A, B] 404 | val CustomParser = Parser[R, String] 405 | 406 | //parsers per requirement: 407 | 408 | //term and mistake should be a single word 409 | val termParser = CustomParser 410 | .root[String, Term]( 411 | Option(_).filter(_.matches("\\w+")).map(Term.apply), 412 | c => s"${c.path}: cannot parse string '${c.value}' to a term" 413 | ) 414 | 415 | //term and mistake should not exist in the database 416 | val repositoryTermParser = CustomValidator 417 | .rootF[Term]( 418 | TermRepository.contains(_).map(!_), 419 | c => s"${c.path}: term '${c.value}' already exists" 420 | ) 421 | .toParser 422 | 423 | //intermediate model to aggregate parsed terms 424 | case class HalfParsedTermModel( 425 | term: Term, 426 | mistakes: List[Term], 427 | ) 428 | 429 | //terms should be unique among other terms in the list 430 | val uniqueTermsParser = CustomParser 431 | //define list level context where terms should be unique 432 | .idContext[List[HalfParsedTermModel]] { _ => 433 | val validTerms = mutable.Set.empty[Term] 434 | CustomValidator 435 | .root[Term](validTerms.add, c => s"${c.path}: term '${c.value}' is duplicate") 436 | .comapP[HalfParsedTermModel](_.term) 437 | .toParser 438 | //lift parser to `List` accumulating errors 439 | .liftToTraverseCombiningP[List] 440 | } 441 | 442 | //mistakes should be unique among other mistakes and terms in the list 443 | val uniqueTermsMistakesParser = CustomParser 444 | .idContext[List[HalfParsedTermModel]] { ms => 445 | val validTerms = mutable.Set.from(ms.view.map(_.term)) 446 | CustomParser 447 | .idContext[HalfParsedTermModel] { m => 448 | CustomValidator 449 | .root[Term](validTerms.add, c => s"${c.path}: mistake '${c.value}' is duplicate") 450 | .toParser 451 | .liftToTraverseCombiningP[List] 452 | .comapP[HalfParsedTermModel](_.mistakes) 453 | //update model with unique mistakes 454 | .map(a => m.copy(mistakes = a)) 455 | } 456 | .liftToTraverseCombiningP[List] 457 | } 458 | 459 | //parsed model should have as minimum one mistake 460 | def nelParser[A] = CustomParser 461 | .root[List[A], NonEmptyList[A]](_.toNel, c => s"${c.path}: cannot be empty") 462 | val halfToFullModelParser = CustomParser 463 | .context[HalfParsedTermModel, TermModel](m => 464 | nelParser[Term] 465 | .comapP[HalfParsedTermModel](_.mistakes) 466 | .map(mistakes => TermModel(m.term, mistakes)) 467 | ) 468 | 469 | //combine all parsers together 470 | val modelsParser = ( 471 | termParser 472 | .andThen(repositoryTermParser) 473 | .comapP[RawTermModel](_.term), 474 | termParser 475 | .andThen(repositoryTermParser) 476 | .liftToTraverseCombiningP[List] 477 | .comapP[RawTermModel](_.mistakes), 478 | ) 479 | .parMapN(HalfParsedTermModel.apply) 480 | .liftToTraverseCombiningP[List] 481 | .andThen(uniqueTermsParser) 482 | .andThen(uniqueTermsMistakesParser) 483 | .andThen(halfToFullModelParser.liftToTraverseCombiningP[List]) 484 | ``` 485 | 486 | (full list of test cases can be found [here](https://github.com/yakivy/dupin/blob/master/core/test/src/dupin/readme/ComplexExampleFixture.scala)) 487 | 488 | ### Roadmap 489 | - add unzip from index for validator/parser 490 | - enrich parser tests with validator cases 491 | - optimize `Parser.liftToTraverseCombiningP`, `combineK` is often slow, for example for lists 492 | - add complex example without parser for comparison 493 | - rename comap to contramap 494 | 495 | ### Changelog 496 | 497 | #### 0.6.x 498 | - add Parser type 499 | - replace implicit conversion (`comapToP`, ...) with explicit lift methods (`liftToTraverseP`, ...) 500 | - a couple of minor fixes 501 | 502 | #### 0.5.x 503 | - simplify internal validator function 504 | - expose validator contravariant monoidal instance `ContravariantMonoidal[Validator[F, E, *]]` 505 | 506 | #### 0.4.x 507 | - add Scala 3 support for Scala Native 508 | - update Scala JS version 509 | - optimize path concatenation 510 | - separate F Validator methods (like `rootF`) 511 | - add Validator methods with context (like `combineC`) 512 | 513 | #### 0.3.x: 514 | - rename `dupin.Validator.compose` to `dupin.Validator.comap`, similar to `cats.Contravariant.contramap` 515 | - rename `dupin.Validator.combinePK` to `dupin.Validator.combinePL`, where `L` stands for "lifted" to reflect method signature 516 | - optimize a naive implementation of `ValidatorComapToP.validatorComapToPForTraverse` that threw StackOverflowException for long lists 517 | - minor refactorings 518 | 519 | #### 0.2.x: 520 | - migrate to mill build tool 521 | - add Scala 3, Scala JS and Scala Native support 522 | - expose validator monoid instance `MonoidK[Validator[F, E, *]]` 523 | - rename `dupin.base` package to `dupin.basic` 524 | - various refactorings and cleanups -------------------------------------------------------------------------------- /core/test/src/dupin/ValidatorSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import cats._ 4 | import cats.implicits._ 5 | import cats.data.NonEmptyChain 6 | import cats.data.Validated 7 | import dupin.basic._ 8 | import dupin.core.FieldPart 9 | import org.scalatest.freespec.AsyncFreeSpec 10 | import scala.concurrent.Future 11 | import scala.util.Try 12 | 13 | class ValidatorSpec extends CustomSpec { 14 | "Type validator" - { 15 | "when created from simple type" - { 16 | "should be able to handle error" in { 17 | val v1 = BasicValidator.failure[Int](_ => "failure") 18 | val v2 = v1.handleErrorWith(_ => BasicValidator.success[Int]) 19 | assert(v1.validate(1) == Validated.invalidNec("failure")) 20 | assert(v2.validate(1) == Validated.Valid(1)) 21 | } 22 | "should be able to map error" in { 23 | val v1 = BasicValidator.failure[Int](_ => "failure") 24 | val v2 = v1.mapError(_ => "custom failure") 25 | assert(v1.validate(1) == Validated.invalidNec("failure")) 26 | assert(v2.validate(1) == Validated.invalidNec("custom failure")) 27 | } 28 | } 29 | "when created from list" - { 30 | "should return fail result" in { 31 | val v1 = BasicValidator.failure[Int](c => s"${c.path} value `${c.value}` is wrong") 32 | val v2 = v1.liftToTraverseP[List] 33 | 34 | assert(v1.validate(1) == Validated.invalidNec(". value `1` is wrong")) 35 | assert(v2.validate(List(1, 2, 3)) == Validated.invalid(NonEmptyChain( 36 | ".[0] value `1` is wrong", 37 | ".[1] value `2` is wrong", 38 | ".[2] value `3` is wrong", 39 | ))) 40 | } 41 | "should not throw stack overflow exception" in { 42 | val v1 = BasicValidator.success[Int] 43 | val v2 = v1.liftToTraverseP[List] 44 | 45 | assert(v2.validate(List.fill(1000000)(1)).isValid) 46 | } 47 | } 48 | } 49 | 50 | "One field validator when" - { 51 | case class OneFieldDataStructure(value: String) 52 | val m: BasicMessageBuilder[Any] = c => s"${c.path} is invalid" 53 | val vds = OneFieldDataStructure("valid string") 54 | val ivds = OneFieldDataStructure("invalid string") 55 | 56 | "created from root should" - { 57 | val c: OneFieldDataStructure => Boolean = _.value != "invalid string" 58 | val v1 = BasicValidator.root(c, m) 59 | val v2 = BasicValidator.success[OneFieldDataStructure].combineR(c, m) 60 | 61 | "return success result" in { 62 | val r = Validated.validNec(vds) 63 | assert(v1.validate(vds) == r) 64 | assert(v2.validate(vds) == r) 65 | } 66 | 67 | "return fail result" in { 68 | val r = Validated.invalidNec(". is invalid") 69 | assert(v1.validate(ivds) == r) 70 | assert(v2.validate(ivds) == r) 71 | } 72 | } 73 | 74 | val c: String => Boolean = _ != "invalid string" 75 | "created from explicit field path should" - { 76 | val p = Path(FieldPart("value")) 77 | val bv = BasicValidator.root[String](c, m) 78 | val v1 = bv.comapPE[OneFieldDataStructure](p, _.value) 79 | val v2 = BasicValidator.success[OneFieldDataStructure].combinePE(p, _.value)(bv) 80 | 81 | "return success result" in { 82 | val r = Validated.validNec(vds) 83 | assert(v1.validate(vds) == r) 84 | assert(v2.validate(vds) == r) 85 | } 86 | 87 | "return fail result" in { 88 | val r = Validated.invalidNec(".value is invalid") 89 | assert(v1.validate(ivds) == r) 90 | assert(v2.validate(ivds) == r) 91 | } 92 | } 93 | 94 | "created from macros field path should" - { 95 | val v1 = BasicValidator.success[OneFieldDataStructure].combinePR(_.value)(c, m) 96 | implicit val bv: BasicValidator[String] = BasicValidator.root(c, m) 97 | val v2 = bv.comapP[OneFieldDataStructure](_.value) 98 | val v3 = BasicValidator.success[OneFieldDataStructure].combineP(_.value)(bv) 99 | val v4 = BasicValidator.success[OneFieldDataStructure].combinePI(_.value) 100 | 101 | "return success result" in { 102 | val r = Validated.validNec(vds) 103 | assert(v1.validate(vds) == r) 104 | assert(v2.validate(vds) == r) 105 | assert(v3.validate(vds) == r) 106 | assert(v4.validate(vds) == r) 107 | } 108 | 109 | "return fail result" in { 110 | val r = Validated.invalidNec(".value is invalid") 111 | assert(v1.validate(ivds) == r) 112 | assert(v2.validate(ivds) == r) 113 | assert(v3.validate(ivds) == r) 114 | assert(v4.validate(ivds) == r) 115 | } 116 | 117 | "throw compilation error" in { 118 | assertCompilationErrorMessagePattern( 119 | assertCompiles("""BasicValidator.success[OneFieldDataStructure].combineP(_.value + "test")(bv)"""), 120 | """(?s)Unable to retrieve field path from function 121 | |(\Q((x$1: OneFieldDataStructure) => x$1.value.+("test"))\E|\Q((_$46: OneFieldDataStructure) => _$46.value.+("test"))\E) 122 | |.*""".r, 123 | ) 124 | } 125 | } 126 | 127 | "created from macros lifted field path should" - { 128 | case class OneLiftedFieldDataStructure(value: Option[String]) 129 | 130 | import cats.implicits._ 131 | implicit val bv: BasicValidator[String] = BasicValidator.root(c, m) 132 | val v1 = bv.liftToTraverseP[Option].comapP[OneLiftedFieldDataStructure](_.value) 133 | val v2 = BasicValidator.success[OneLiftedFieldDataStructure].combinePI(_.value) 134 | 135 | "return success result" in { 136 | val ds1 = OneLiftedFieldDataStructure(Option("valid string")) 137 | val r1 = Validated.validNec(ds1) 138 | assert(v1.validate(ds1) == r1) 139 | assert(v2.validate(ds1) == r1) 140 | 141 | val ds2 = OneLiftedFieldDataStructure(None) 142 | val r2 = Validated.validNec(ds2) 143 | assert(v1.validate(ds2) == r2) 144 | assert(v2.validate(ds2) == r2) 145 | } 146 | 147 | "return fail result" in { 148 | val ds = OneLiftedFieldDataStructure(Option("invalid string")) 149 | val r = Validated.invalidNec(".value.[0] is invalid") 150 | assert(v1.validate(ds) == r) 151 | assert(v2.validate(ds) == r) 152 | assert(v2.validate(ds) == r) 153 | } 154 | } 155 | 156 | "effect should" - { 157 | type FutureValidator[A] = Validator[Future, String, A] 158 | val FutureValidator = Validator[Future, String] 159 | 160 | val c1 = c.andThen(Future.successful) 161 | implicit val bv: FutureValidator[String] = FutureValidator.rootF(c1, m) 162 | val v1 = bv.comapP[OneFieldDataStructure](_.value) 163 | val v2 = FutureValidator.success[String].combineRF(c1, m).comapP[OneFieldDataStructure](_.value) 164 | val v3 = FutureValidator.success[String].combineR(c, m).comapP[OneFieldDataStructure](_.value) 165 | val v4 = FutureValidator.success[OneFieldDataStructure].combinePRF(_.value)(c1, m) 166 | val v5 = FutureValidator.success[OneFieldDataStructure].combinePR(_.value)(c, m) 167 | 168 | "return success result" in { 169 | val er = Validated.validNec(vds) 170 | ( 171 | v1.validate(vds), v2.validate(vds), v3.validate(vds), v4.validate(vds), v5.validate(vds) 172 | ).mapN { (vr1, vr2, vr3, vr4, vr5) => 173 | assert(vr1 == er) 174 | assert(vr2 == er) 175 | assert(vr3 == er) 176 | assert(vr4 == er) 177 | assert(vr5 == er) 178 | } 179 | } 180 | 181 | "return fail result" in { 182 | val er = Validated.invalidNec(".value is invalid") 183 | ( 184 | v1.validate(ivds), v2.validate(ivds), v3.validate(ivds), v4.validate(ivds), v5.validate(ivds) 185 | ).mapN { (vr1, vr2, vr3, vr4, vr5) => 186 | assert(vr1 == er) 187 | assert(vr2 == er) 188 | assert(vr3 == er) 189 | assert(vr4 == er) 190 | assert(vr5 == er) 191 | } 192 | } 193 | } 194 | } 195 | 196 | "Two field validator when" - { 197 | case class TwoFieldDataStructure(v1: String, v2: Int) 198 | val m: BasicMessageBuilder[Any] = c => s"${c.path} is invalid" 199 | val c1: String => Boolean = _ != "invalid string" 200 | val c2: Int => Boolean = _ != 0 201 | 202 | "created from root should" - { 203 | val rc1: TwoFieldDataStructure => Boolean = c1.compose[TwoFieldDataStructure](_.v1) 204 | val rc2: TwoFieldDataStructure => Boolean = c2.compose[TwoFieldDataStructure](_.v2) 205 | val v1 = BasicValidator.root(rc1, m) combine BasicValidator.root(rc2, m) 206 | val v2 = BasicValidator.success[TwoFieldDataStructure].combineR(rc1, m).combineR(rc2, m) 207 | 208 | "return success result with two successful checks" in { 209 | val ds = TwoFieldDataStructure("valid string", 1) 210 | val r = Validated.validNec(ds) 211 | assert(v1.validate(ds) == r) 212 | assert(v2.validate(ds) == r) 213 | } 214 | 215 | "return fail result with first fail check" in { 216 | val ds = TwoFieldDataStructure("invalid string", 1) 217 | val r = Validated.invalidNec(". is invalid") 218 | assert(v1.validate(ds) == r) 219 | assert(v2.validate(ds) == r) 220 | } 221 | 222 | "return fail result with second fail check" in { 223 | val ds = TwoFieldDataStructure("valid string", 0) 224 | val r = Validated.invalidNec(". is invalid") 225 | assert(v1.validate(ds) == r) 226 | assert(v2.validate(ds) == r) 227 | } 228 | 229 | "return fail result with two fail checks" in { 230 | val ds = TwoFieldDataStructure("invalid string", 0) 231 | val r = Validated.Invalid(NonEmptyChain(". is invalid", ". is invalid")) 232 | assert(v1.validate(ds) == r) 233 | assert(v2.validate(ds) == r) 234 | } 235 | } 236 | 237 | "created from field path should" - { 238 | val v1 = BasicValidator.success[TwoFieldDataStructure] 239 | .combinePR(_.v1)(c1, m) 240 | .combinePR(_.v2)(c2, m) 241 | implicit val bv1: BasicValidator[String] = BasicValidator.root[String](c1, m) 242 | implicit val bv2: BasicValidator[Int] = BasicValidator.root[Int](c2, m) 243 | val v2 = bv1.comapP[TwoFieldDataStructure](_.v1) combine 244 | bv2.comapP[TwoFieldDataStructure](_.v2) 245 | val v3 = BasicValidator.success[TwoFieldDataStructure] 246 | .combineP(_.v1)(bv1) 247 | .combineP(_.v2)(bv2) 248 | val v4 = BasicValidator.success[TwoFieldDataStructure] 249 | .combinePI(_.v1) 250 | .combinePI(_.v2) 251 | 252 | "return success result with two successful checks" in { 253 | val ds = TwoFieldDataStructure("valid string", 1) 254 | val r = Validated.validNec(ds) 255 | assert(v1.validate(ds) == r) 256 | assert(v2.validate(ds) == r) 257 | assert(v3.validate(ds) == r) 258 | assert(v4.validate(ds) == r) 259 | } 260 | 261 | "return fail result with first fail check" in { 262 | val ds = TwoFieldDataStructure("invalid string", 1) 263 | val r = Validated.invalidNec(".v1 is invalid") 264 | assert(v1.validate(ds) == r) 265 | assert(v2.validate(ds) == r) 266 | assert(v3.validate(ds) == r) 267 | assert(v4.validate(ds) == r) 268 | } 269 | 270 | "return fail result with second fail check" in { 271 | val ds = TwoFieldDataStructure("valid string", 0) 272 | val r = Validated.invalidNec(".v2 is invalid") 273 | assert(v1.validate(ds) == r) 274 | assert(v2.validate(ds) == r) 275 | assert(v3.validate(ds) == r) 276 | assert(v4.validate(ds) == r) 277 | } 278 | 279 | "return fail result with two fail checks" in { 280 | val ds = TwoFieldDataStructure("invalid string", 0) 281 | val r = Validated.Invalid(NonEmptyChain(".v1 is invalid", ".v2 is invalid")) 282 | assert(v1.validate(ds) == r) 283 | assert(v2.validate(ds) == r) 284 | assert(v3.validate(ds) == r) 285 | assert(v4.validate(ds) == r) 286 | } 287 | } 288 | 289 | "created in two stages should" - { 290 | val v = BasicValidator.root(c1, m).comapP[TwoFieldDataStructure](_.v1) andThen 291 | BasicValidator.root(c2, m).comapP[TwoFieldDataStructure](_.v2) 292 | 293 | "return success result with two successful checks" in { 294 | val ds = TwoFieldDataStructure("valid string", 1) 295 | val r = Validated.validNec(ds) 296 | assert(v.validate(ds) == r) 297 | } 298 | 299 | "return fail result with first fail check" in { 300 | val ds = TwoFieldDataStructure("invalid string", 1) 301 | val r = Validated.invalidNec(".v1 is invalid") 302 | assert(v.validate(ds) == r) 303 | } 304 | 305 | "return fail result with second fail check" in { 306 | val ds = TwoFieldDataStructure("valid string", 0) 307 | val r = Validated.invalidNec(".v2 is invalid") 308 | assert(v.validate(ds) == r) 309 | } 310 | 311 | "return only first fail result with two fail checks" in { 312 | val ds = TwoFieldDataStructure("invalid string", 0) 313 | val r = Validated.invalidNec(".v1 is invalid") 314 | assert(v.validate(ds) == r) 315 | } 316 | } 317 | } 318 | 319 | "Two layer validator when" - { 320 | case class SecondLayerDataStructure(v: String) 321 | case class FirstLayerDataStructure(v1: SecondLayerDataStructure, v2: Int) 322 | val m: BasicMessageBuilder[Any] = c => s"${c.path} is invalid" 323 | 324 | val c1: String => Boolean = _ != "invalid string" 325 | val c2: Int => Boolean = _ != 0 326 | implicit val vi1: BasicValidator[SecondLayerDataStructure] = BasicValidator 327 | .success[SecondLayerDataStructure] 328 | .combinePR(_.v)(c1, m) 329 | 330 | "created from field path should" - { 331 | val v1 = BasicValidator.success[FirstLayerDataStructure] 332 | .combinePI(_.v1) 333 | .combinePR(_.v2)(c2, m) 334 | val v2 = BasicValidator.success[FirstLayerDataStructure] 335 | .combinePR(_.v1.v)(c1, m) 336 | .combinePR(_.v2)(c2, m) 337 | 338 | "return success result with two successful checks" in { 339 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 1) 340 | val r = Validated.validNec(ds) 341 | assert(v1.validate(ds) == r) 342 | assert(v2.validate(ds) == r) 343 | } 344 | 345 | "return fail result with first fail check" in { 346 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 1) 347 | val r = Validated.invalidNec(".v1.v is invalid") 348 | assert(v1.validate(ds) == r) 349 | assert(v2.validate(ds) == r) 350 | } 351 | 352 | "return fail result with second fail check" in { 353 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 0) 354 | val r = Validated.invalidNec(".v2 is invalid") 355 | assert(v1.validate(ds) == r) 356 | assert(v2.validate(ds) == r) 357 | } 358 | 359 | "return fail result with two fail checks" in { 360 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 0) 361 | val r = Validated.Invalid(NonEmptyChain(".v1.v is invalid", ".v2 is invalid")) 362 | assert(v1.validate(ds) == r) 363 | assert(v2.validate(ds) == r) 364 | } 365 | } 366 | 367 | "created from context should" - { 368 | val c3: Int => String => Boolean = fv => sv => !Try(sv.toInt).toOption.contains(fv) 369 | val m1: BasicMessageBuilder[Any] = c => s"${c.path} cannot be equal to value from context" 370 | def vi2 = (c: FirstLayerDataStructure) => BasicValidator 371 | .success[SecondLayerDataStructure] 372 | .combinePR(_.v)(c3(c.v2), m1) 373 | 374 | val v1 = BasicValidator.context[FirstLayerDataStructure](vi2(_).comapP(_.v1)) 375 | val v2 = BasicValidator.success[FirstLayerDataStructure].combinePC(_.v1)(vi2) 376 | val v3 = BasicValidator.success[FirstLayerDataStructure].combineC(c => vi2(c).comapP(_.v1)) 377 | 378 | "return success result" in { 379 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 1) 380 | val r = Validated.validNec(ds) 381 | assert(v1.validate(ds) == r) 382 | assert(v2.validate(ds) == r) 383 | assert(v3.validate(ds) == r) 384 | } 385 | "return fail result" in { 386 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("1"), 1) 387 | val r = Validated.Invalid(NonEmptyChain(".v1.v cannot be equal to value from context")) 388 | assert(v1.validate(ds) == r) 389 | assert(v2.validate(ds) == r) 390 | assert(v3.validate(ds) == r) 391 | } 392 | } 393 | 394 | "derived from validator type should" - { 395 | implicit val vi2: BasicValidator[Int] = BasicValidator.root[Int](c2, m) 396 | 397 | val v1 = BasicValidator.derive[FirstLayerDataStructure] 398 | 399 | "return success result with two successful checks" in { 400 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("validdd string"), 1) 401 | val r = Validated.validNec(ds) 402 | assert(v1.validate(ds) == r) 403 | } 404 | 405 | "return fail result with first fail check" in { 406 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 1) 407 | val r = Validated.invalidNec(".v1.v is invalid") 408 | assert(v1.validate(ds) == r) 409 | } 410 | 411 | "return fail result with second fail check" in { 412 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 0) 413 | val r = Validated.invalidNec(".v2 is invalid") 414 | assert(v1.validate(ds) == r) 415 | } 416 | 417 | "return fail result with two fail checks" in { 418 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 0) 419 | val r = Validated.Invalid(NonEmptyChain(".v1.v is invalid", ".v2 is invalid")) 420 | assert(v1.validate(ds) == r) 421 | } 422 | 423 | "return successful result for generic class" in { 424 | case class ValueDto[A](value: A) 425 | def v2[B: BasicValidator] = BasicValidator.derive[ValueDto[B]] 426 | val ds = ValueDto(1) 427 | assert(v2[Int].validate(ds) == Validated.validNec(ds)) 428 | } 429 | } 430 | } 431 | } 432 | -------------------------------------------------------------------------------- /core/test/src/dupin/ParserSpec.scala: -------------------------------------------------------------------------------- 1 | package dupin 2 | 3 | import cats.data.Chain 4 | import cats.data.Ior 5 | import cats.data.NonEmptyChain 6 | import cats.data.Validated 7 | import cats.implicits._ 8 | import dupin.basic._ 9 | import dupin.core.FieldPart 10 | import org.scalatest.freespec.AsyncFreeSpec 11 | import scala.concurrent.Future 12 | import scala.util.Try 13 | 14 | class ParserSpec extends AsyncFreeSpec { 15 | val stringC: String => Boolean = _ != "invalid string" 16 | val invalidStringM: BasicMessageBuilder[Any] = c => s"${c.path} is invalid" 17 | "Id parser" - { 18 | "created from root should" - { 19 | val vds = "valid string" 20 | val ivds = "invalid string" 21 | val p1 = BasicValidator.root(stringC, invalidStringM).toParser 22 | val p2 = BasicParser.idRoot[String](Option(_).filter(stringC), invalidStringM) 23 | 24 | "return success result" in { 25 | val r = Ior.right(vds) 26 | assert(p1.parse(vds) == r) 27 | assert(p2.parse(vds) == r) 28 | } 29 | 30 | "return fail result" in { 31 | val r = Ior.leftNec(". is invalid") 32 | assert(p1.parse(ivds) == r) 33 | assert(p2.parse(ivds) == r) 34 | } 35 | } 36 | "when created from simple type" - { 37 | "should be able to handle error" in { 38 | val p1 = BasicParser.idFailure[Int](_ => "failure") 39 | val p2 = p1.handleErrorWith(_ => BasicParser.idSuccess[Int]) 40 | assert(p1.parse(1) == Ior.leftNec("failure")) 41 | assert(p2.parse(1) == Ior.right(1)) 42 | } 43 | "should be able to map error" in { 44 | val p1 = BasicParser.idFailure[Int](_ => "failure") 45 | val p2 = p1.mapError(_ => "custom failure") 46 | assert(p1.parse(1) == Ior.leftNec("failure")) 47 | assert(p2.parse(1) == Ior.leftNec("custom failure")) 48 | } 49 | } 50 | "when created from list" - { 51 | "should return fail result" in { 52 | val p1 = BasicParser.idRoot[Int](Option(_).filter(_ < 2), c => s"${c.path} value `${c.value}` is wrong") 53 | val p2 = p1.liftToTraverseP[List] 54 | val p3 = p1.liftToTraverseCombiningP[List] 55 | 56 | assert(p1.parse(1) == Ior.right(1)) 57 | assert(p1.parse(3) == Ior.leftNec(". value `3` is wrong")) 58 | assert(p2.parse(List(1, 2, 3)) == Ior.left( 59 | NonEmptyChain( 60 | ".[1] value `2` is wrong", 61 | ".[2] value `3` is wrong", 62 | ) 63 | )) 64 | assert(p3.parse(List(1, 2, 3)) == Ior.both( 65 | NonEmptyChain( 66 | ".[1] value `2` is wrong", 67 | ".[2] value `3` is wrong", 68 | ), 69 | List(1), 70 | )) 71 | } 72 | "should not throw stack overflow exception" in { 73 | val p1 = BasicParser.idSuccess[Int] 74 | val p2 = p1.liftToTraverseP[List] 75 | val p3 = p1.liftToTraverseCombiningP[Chain] 76 | 77 | assert(p2.parse(List.fill(1)(1000000)).isRight) 78 | assert(p3.parse(Chain(List.fill(1)(1000000):_*)).isRight) 79 | } 80 | } 81 | } 82 | 83 | "One field parser when" - { 84 | case class FromOneFieldDataStructure(value: String) 85 | case class ToOneFieldDataStructure(value: String) 86 | val vfds = FromOneFieldDataStructure("valid string") 87 | val vtds = ToOneFieldDataStructure("valid string") 88 | val ivfds = FromOneFieldDataStructure("invalid string") 89 | 90 | "created from root should" - { 91 | val p = BasicParser.root[FromOneFieldDataStructure, ToOneFieldDataStructure]( 92 | Option(_).map(_.value).filter(stringC).map(ToOneFieldDataStructure.apply), 93 | invalidStringM, 94 | ) 95 | 96 | "return success result" in { 97 | val r = Ior.right(vtds) 98 | assert(p.parse(vfds) == r) 99 | } 100 | 101 | "return fail result" in { 102 | val r = Ior.leftNec(". is invalid") 103 | assert(p.parse(ivfds) == r) 104 | } 105 | } 106 | /* 107 | val c: String => Boolean = _ != "invalid string" 108 | "created from explicit field path should" - { 109 | val p = Path(FieldPart("value")) 110 | val bv = BasicValidator.root[String](c, m) 111 | val v1 = bv.comapPE[ToOneFieldDataStructure](p, _.value) 112 | val v2 = BasicValidator.success[ToOneFieldDataStructure].combinePE(p, _.value)(bv) 113 | 114 | "return success result" in { 115 | val r = Validated.validNec(vfds) 116 | assert(v1.validate(vfds) == r) 117 | assert(v2.validate(vfds) == r) 118 | } 119 | 120 | "return fail result" in { 121 | val r = Validated.invalidNec(".value is invalid") 122 | assert(v1.validate(ivfds) == r) 123 | assert(v2.validate(ivfds) == r) 124 | } 125 | } 126 | 127 | "created from macros field path should" - { 128 | val v1 = BasicValidator.success[ToOneFieldDataStructure].combinePR(_.value)(c, m) 129 | implicit val bv: BasicValidator[String] = BasicValidator.root(c, m) 130 | val v2 = bv.comapP[ToOneFieldDataStructure](_.value) 131 | val v3 = BasicValidator.success[ToOneFieldDataStructure].combineP(_.value)(bv) 132 | val v4 = BasicValidator.success[ToOneFieldDataStructure].combinePI(_.value) 133 | 134 | "return success result" in { 135 | val r = Validated.validNec(vfds) 136 | assert(v1.validate(vfds) == r) 137 | assert(v2.validate(vfds) == r) 138 | assert(v3.validate(vfds) == r) 139 | assert(v4.validate(vfds) == r) 140 | } 141 | 142 | "return fail result" in { 143 | val r = Validated.invalidNec(".value is invalid") 144 | assert(v1.validate(ivfds) == r) 145 | assert(v2.validate(ivfds) == r) 146 | assert(v3.validate(ivfds) == r) 147 | assert(v4.validate(ivfds) == r) 148 | } 149 | } 150 | 151 | "created from macros lifted field path should" - { 152 | case class OneLiftedFieldDataStructure(value: Option[String]) 153 | 154 | import cats.implicits._ 155 | implicit val bv: BasicValidator[String] = BasicValidator.root(c, m) 156 | val v1 = bv.liftToTraverseP[Option].comapP[OneLiftedFieldDataStructure](_.value) 157 | val v2 = BasicValidator.success[OneLiftedFieldDataStructure].combinePI(_.value) 158 | 159 | "return success result" in { 160 | val ds1 = OneLiftedFieldDataStructure(Option("valid string")) 161 | val r1 = Validated.validNec(ds1) 162 | assert(v1.validate(ds1) == r1) 163 | assert(v2.validate(ds1) == r1) 164 | 165 | val ds2 = OneLiftedFieldDataStructure(None) 166 | val r2 = Validated.validNec(ds2) 167 | assert(v1.validate(ds2) == r2) 168 | assert(v2.validate(ds2) == r2) 169 | } 170 | 171 | "return fail result" in { 172 | val ds = OneLiftedFieldDataStructure(Option("invalid string")) 173 | val r = Validated.invalidNec(".value.[0] is invalid") 174 | assert(v1.validate(ds) == r) 175 | assert(v2.validate(ds) == r) 176 | assert(v2.validate(ds) == r) 177 | } 178 | } 179 | 180 | "effect should" - { 181 | type FutureValidator[A] = Validator[Future, String, A] 182 | val FutureValidator = Validator[Future, String] 183 | 184 | val c1 = c.andThen(Future.successful) 185 | implicit val bv: FutureValidator[String] = FutureValidator.rootF(c1, m) 186 | val v1 = bv.comapP[ToOneFieldDataStructure](_.value) 187 | val v2 = FutureValidator.success[String].combineRF(c1, m).comapP[ToOneFieldDataStructure](_.value) 188 | val v3 = FutureValidator.success[String].combineR(c, m).comapP[ToOneFieldDataStructure](_.value) 189 | val v4 = FutureValidator.success[ToOneFieldDataStructure].combinePRF(_.value)(c1, m) 190 | val v5 = FutureValidator.success[ToOneFieldDataStructure].combinePR(_.value)(c, m) 191 | 192 | "return success result" in { 193 | val er = Validated.validNec(vfds) 194 | ( 195 | v1.validate(vfds), v2.validate(vfds), v3.validate(vfds), v4.validate(vfds), v5.validate(vfds) 196 | ).mapN { (vr1, vr2, vr3, vr4, vr5) => 197 | assert(vr1 == er) 198 | assert(vr2 == er) 199 | assert(vr3 == er) 200 | assert(vr4 == er) 201 | assert(vr5 == er) 202 | } 203 | } 204 | 205 | "return fail result" in { 206 | val er = Validated.invalidNec(".value is invalid") 207 | ( 208 | v1.validate(ivfds), v2.validate(ivfds), v3.validate(ivfds), v4.validate(ivfds), v5.validate(ivfds) 209 | ).mapN { (vr1, vr2, vr3, vr4, vr5) => 210 | assert(vr1 == er) 211 | assert(vr2 == er) 212 | assert(vr3 == er) 213 | assert(vr4 == er) 214 | assert(vr5 == er) 215 | } 216 | } 217 | }*/ 218 | } 219 | 220 | /*"Two field validator when" - { 221 | case class TwoFieldDataStructure(v1: String, v2: Int) 222 | val m: BasicMessageBuilder[Any] = c => s"${c.path} is invalid" 223 | val c1: String => Boolean = _ != "invalid string" 224 | val c2: Int => Boolean = _ != 0 225 | 226 | "created from root should" - { 227 | val rc1: TwoFieldDataStructure => Boolean = c1.compose[TwoFieldDataStructure](_.v1) 228 | val rc2: TwoFieldDataStructure => Boolean = c2.compose[TwoFieldDataStructure](_.v2) 229 | val v1 = BasicValidator.root(rc1, m) combine BasicValidator.root(rc2, m) 230 | val v2 = BasicValidator.success[TwoFieldDataStructure].combineR(rc1, m).combineR(rc2, m) 231 | 232 | "return success result with two successful checks" in { 233 | val ds = TwoFieldDataStructure("valid string", 1) 234 | val r = Validated.validNec(ds) 235 | assert(v1.validate(ds) == r) 236 | assert(v2.validate(ds) == r) 237 | } 238 | 239 | "return fail result with first fail check" in { 240 | val ds = TwoFieldDataStructure("invalid string", 1) 241 | val r = Validated.invalidNec(". is invalid") 242 | assert(v1.validate(ds) == r) 243 | assert(v2.validate(ds) == r) 244 | } 245 | 246 | "return fail result with second fail check" in { 247 | val ds = TwoFieldDataStructure("valid string", 0) 248 | val r = Validated.invalidNec(". is invalid") 249 | assert(v1.validate(ds) == r) 250 | assert(v2.validate(ds) == r) 251 | } 252 | 253 | "return fail result with two fail checks" in { 254 | val ds = TwoFieldDataStructure("invalid string", 0) 255 | val r = Validated.Invalid(NonEmptyChain(". is invalid", ". is invalid")) 256 | assert(v1.validate(ds) == r) 257 | assert(v2.validate(ds) == r) 258 | } 259 | } 260 | 261 | "created from field path should" - { 262 | val v1 = BasicValidator.success[TwoFieldDataStructure] 263 | .combinePR(_.v1)(c1, m) 264 | .combinePR(_.v2)(c2, m) 265 | implicit val bv1: BasicValidator[String] = BasicValidator.root[String](c1, m) 266 | implicit val bv2: BasicValidator[Int] = BasicValidator.root[Int](c2, m) 267 | val v2 = bv1.comapP[TwoFieldDataStructure](_.v1) combine 268 | bv2.comapP[TwoFieldDataStructure](_.v2) 269 | val v3 = BasicValidator.success[TwoFieldDataStructure] 270 | .combineP(_.v1)(bv1) 271 | .combineP(_.v2)(bv2) 272 | val v4 = BasicValidator.success[TwoFieldDataStructure] 273 | .combinePI(_.v1) 274 | .combinePI(_.v2) 275 | 276 | "return success result with two successful checks" in { 277 | val ds = TwoFieldDataStructure("valid string", 1) 278 | val r = Validated.validNec(ds) 279 | assert(v1.validate(ds) == r) 280 | assert(v2.validate(ds) == r) 281 | assert(v3.validate(ds) == r) 282 | assert(v4.validate(ds) == r) 283 | } 284 | 285 | "return fail result with first fail check" in { 286 | val ds = TwoFieldDataStructure("invalid string", 1) 287 | val r = Validated.invalidNec(".v1 is invalid") 288 | assert(v1.validate(ds) == r) 289 | assert(v2.validate(ds) == r) 290 | assert(v3.validate(ds) == r) 291 | assert(v4.validate(ds) == r) 292 | } 293 | 294 | "return fail result with second fail check" in { 295 | val ds = TwoFieldDataStructure("valid string", 0) 296 | val r = Validated.invalidNec(".v2 is invalid") 297 | assert(v1.validate(ds) == r) 298 | assert(v2.validate(ds) == r) 299 | assert(v3.validate(ds) == r) 300 | assert(v4.validate(ds) == r) 301 | } 302 | 303 | "return fail result with two fail checks" in { 304 | val ds = TwoFieldDataStructure("invalid string", 0) 305 | val r = Validated.Invalid(NonEmptyChain(".v1 is invalid", ".v2 is invalid")) 306 | assert(v1.validate(ds) == r) 307 | assert(v2.validate(ds) == r) 308 | assert(v3.validate(ds) == r) 309 | assert(v4.validate(ds) == r) 310 | } 311 | } 312 | 313 | "created in two stages should" - { 314 | val v = BasicValidator.root(c1, m).comapP[TwoFieldDataStructure](_.v1) andThen 315 | BasicValidator.root(c2, m).comapP[TwoFieldDataStructure](_.v2) 316 | 317 | "return success result with two successful checks" in { 318 | val ds = TwoFieldDataStructure("valid string", 1) 319 | val r = Validated.validNec(ds) 320 | assert(v.validate(ds) == r) 321 | } 322 | 323 | "return fail result with first fail check" in { 324 | val ds = TwoFieldDataStructure("invalid string", 1) 325 | val r = Validated.invalidNec(".v1 is invalid") 326 | assert(v.validate(ds) == r) 327 | } 328 | 329 | "return fail result with second fail check" in { 330 | val ds = TwoFieldDataStructure("valid string", 0) 331 | val r = Validated.invalidNec(".v2 is invalid") 332 | assert(v.validate(ds) == r) 333 | } 334 | 335 | "return only first fail result with two fail checks" in { 336 | val ds = TwoFieldDataStructure("invalid string", 0) 337 | val r = Validated.invalidNec(".v1 is invalid") 338 | assert(v.validate(ds) == r) 339 | } 340 | } 341 | } 342 | 343 | "Two layer validator when" - { 344 | case class SecondLayerDataStructure(v: String) 345 | case class FirstLayerDataStructure(v1: SecondLayerDataStructure, v2: Int) 346 | val m: BasicMessageBuilder[Any] = c => s"${c.path} is invalid" 347 | 348 | val c1: String => Boolean = _ != "invalid string" 349 | val c2: Int => Boolean = _ != 0 350 | implicit val vi1: BasicValidator[SecondLayerDataStructure] = BasicValidator 351 | .success[SecondLayerDataStructure] 352 | .combinePR(_.v)(c1, m) 353 | 354 | "created from field path should" - { 355 | val v1 = BasicValidator.success[FirstLayerDataStructure] 356 | .combinePI(_.v1) 357 | .combinePR(_.v2)(c2, m) 358 | val v2 = BasicValidator.success[FirstLayerDataStructure] 359 | .combinePR(_.v1.v)(c1, m) 360 | .combinePR(_.v2)(c2, m) 361 | 362 | "return success result with two successful checks" in { 363 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 1) 364 | val r = Validated.validNec(ds) 365 | assert(v1.validate(ds) == r) 366 | assert(v2.validate(ds) == r) 367 | } 368 | 369 | "return fail result with first fail check" in { 370 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 1) 371 | val r = Validated.invalidNec(".v1.v is invalid") 372 | assert(v1.validate(ds) == r) 373 | assert(v2.validate(ds) == r) 374 | } 375 | 376 | "return fail result with second fail check" in { 377 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 0) 378 | val r = Validated.invalidNec(".v2 is invalid") 379 | assert(v1.validate(ds) == r) 380 | assert(v2.validate(ds) == r) 381 | } 382 | 383 | "return fail result with two fail checks" in { 384 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 0) 385 | val r = Validated.Invalid(NonEmptyChain(".v1.v is invalid", ".v2 is invalid")) 386 | assert(v1.validate(ds) == r) 387 | assert(v2.validate(ds) == r) 388 | } 389 | } 390 | 391 | "created from context should" - { 392 | val c3: Int => String => Boolean = fv => sv => !Try(sv.toInt).toOption.contains(fv) 393 | val m1: BasicMessageBuilder[Any] = c => s"${c.path} cannot be equal to value from context" 394 | def vi2 = (c: FirstLayerDataStructure) => BasicValidator 395 | .success[SecondLayerDataStructure] 396 | .combinePR(_.v)(c3(c.v2), m1) 397 | 398 | val v1 = BasicValidator.context[FirstLayerDataStructure](vi2(_).comapP(_.v1)) 399 | val v2 = BasicValidator.success[FirstLayerDataStructure].combinePC(_.v1)(vi2) 400 | val v3 = BasicValidator.success[FirstLayerDataStructure].combineC(c => vi2(c).comapP(_.v1)) 401 | 402 | "return success result" in { 403 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 1) 404 | val r = Validated.validNec(ds) 405 | assert(v1.validate(ds) == r) 406 | assert(v2.validate(ds) == r) 407 | assert(v3.validate(ds) == r) 408 | } 409 | "return fail result" in { 410 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("1"), 1) 411 | val r = Validated.Invalid(NonEmptyChain(".v1.v cannot be equal to value from context")) 412 | assert(v1.validate(ds) == r) 413 | assert(v2.validate(ds) == r) 414 | assert(v3.validate(ds) == r) 415 | } 416 | } 417 | 418 | "derived from validator type should" - { 419 | implicit val vi2: BasicValidator[Int] = BasicValidator.root[Int](c2, m) 420 | 421 | val v1 = BasicValidator.derive[FirstLayerDataStructure] 422 | 423 | "return success result with two successful checks" in { 424 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("validdd string"), 1) 425 | val r = Validated.validNec(ds) 426 | assert(v1.validate(ds) == r) 427 | } 428 | 429 | "return fail result with first fail check" in { 430 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 1) 431 | val r = Validated.invalidNec(".v1.v is invalid") 432 | assert(v1.validate(ds) == r) 433 | } 434 | 435 | "return fail result with second fail check" in { 436 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("valid string"), 0) 437 | val r = Validated.invalidNec(".v2 is invalid") 438 | assert(v1.validate(ds) == r) 439 | } 440 | 441 | "return fail result with two fail checks" in { 442 | val ds = FirstLayerDataStructure(SecondLayerDataStructure("invalid string"), 0) 443 | val r = Validated.Invalid(NonEmptyChain(".v1.v is invalid", ".v2 is invalid")) 444 | assert(v1.validate(ds) == r) 445 | } 446 | } 447 | }*/ 448 | } 449 | --------------------------------------------------------------------------------