├── project
├── build.properties
└── plugins.sbt
├── .idea
├── .gitignore
└── codeStyles
│ ├── codeStyleConfig.xml
│ └── Project.xml
├── screenshot_code_completion.png
├── .scalafmt.conf
├── src
├── main
│ └── scala
│ │ ├── internal
│ │ ├── InspectTree.scala
│ │ ├── Utils.scala
│ │ └── MacroUtils.scala
│ │ ├── HkdUtils.scala
│ │ ├── BasicTypeclasses.scala
│ │ └── HkdFor.scala
└── test
│ └── scala
│ ├── TypeclassesTest.scala
│ └── MacroHkdTest.scala
├── LICENSE.MD
├── .gitignore
├── .github
└── workflows
│ └── ci.yml
└── README.MD
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 1.10.5
2 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/screenshot_code_completion.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tschuchortdev/hkd4s/HEAD/screenshot_code_completion.png
--------------------------------------------------------------------------------
/.idea/codeStyles/codeStyleConfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = 3.8.1
2 | runner.dialect = scala3
3 | maxColumn = 130
4 | lineEndings = unix
5 | danglingParentheses.defnSite = false
6 | danglingParentheses.callSite = false
7 | #danglingParentheses.bracketDefnSite = false
8 | #danglingParentheses.bracketCallSite = false
9 | align.openParenDefnSite = true
10 | align.openParenCallSite = true
11 | align.tokens = some
12 | assumeStandardLibraryStripMargin = true
13 | align.stripMargin = true
14 | indent.callSite = 2
15 | newlines.source=keep
16 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("org.jetbrains.scala" % "sbt-ide-settings" % "1.1.2")
2 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21")
3 | addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1")
4 | addSbtPlugin("nl.gn0s1s" % "sbt-dotenv" % "3.0.0")
5 |
6 | // Uses Aether to resolve maven dependencies (might be useful for resolving SNAPSHOTs) and generates the
7 | // maven-metadata.xml correctly, which is needed by tooling like ScalaSteward. sbt-aether-deploy would be an alternative
8 | // with better logging that only changes deployment, not dependency resolution, but unfortunately does not work with sbt-sonatype
9 | addSbtPlugin("org.scala-sbt" % "sbt-maven-resolver" % "0.1.0")
10 |
11 | addSbtPlugin("com.github.sbt" % "sbt-dynver" % "5.0.1")
--------------------------------------------------------------------------------
/src/main/scala/internal/InspectTree.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 | package internal
3 |
4 | import scala.quoted.*
5 | import scala.annotation.MacroAnnotation
6 | import scala.annotation.experimental
7 |
8 | @experimental
9 | protected[tschuchort] class InspectTree extends MacroAnnotation {
10 | override def transform(using q: Quotes)(tree: q.reflect.Definition): List[q.reflect.Definition] =
11 | import q.reflect.{*, given}
12 | // printTastyTypeRepr(tree.symbol.owner.info.memberType(tree.symbol))
13 | println("-----------------------------------------------------")
14 | println(Printer.TreeShortCode.show(tree))
15 | printTastyTree(tree)
16 | List(tree)
17 |
18 | }
19 |
20 | protected[tschuchort] transparent inline def inspectTree(inline expr: Any): Any = ${ inspectTreeImpl('expr) }
21 |
22 | private def inspectTreeImpl(expr: Expr[Any])(using q: Quotes): Expr[Any] =
23 | import q.reflect.*
24 | printTastyTree(expr.asTerm)
25 | expr
26 |
--------------------------------------------------------------------------------
/LICENSE.MD:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Thilo Schuchort
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/.idea/codeStyles/Project.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/src/main/scala/HkdUtils.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 |
3 | import cats.data.Const
4 | import com.tschuchort.hkd.internal.{ArrayProduct, `*`, `.`}
5 | import shapeless3.deriving.{K11, summonAsArray}
6 |
7 | import scala.deriving.Mirror
8 |
9 | /** Summon [[HkdFieldInstances]] to summon instances for every possible field of a higher-kinded data type [[D]]. */
10 | trait HkdFieldInstances[D[_[_]], F[_], I[_]] {
11 | def zip[G[_]](df: D[G]): D[[A] =>> (G[A], I[F[A]])]
12 |
13 | def instancesFor[G[_]](d: D[G])(using FunctorK[D]): D[I `.` F] =
14 | zip[G](d).mapK[I `.` F]([A] => (fieldAndInstance: (G[A], I[F[A]])) => fieldAndInstance._2)
15 | }
16 | object HkdFieldInstances {
17 | class OfProduct[D[_[_]], F[_], I[_]](private val fieldInstances: D[I `.` F])(using
18 | m: Mirror.Product { type MirroredType[G[_]] = D[G]; type MirroredElemTypes[G[_]] <: Tuple }
19 | ) extends HkdFieldInstances[D, F, I] {
20 | override def zip[G[_]](df: D[G]): D[[A] =>> (G[A], I[F[A]])] = {
21 | val fieldsArr = Tuple.fromProduct(df.asInstanceOf[Product]).toArray
22 | val instancesArr = Tuple.fromProduct(fieldInstances.asInstanceOf[Product]).toArray
23 | m.fromProduct(ArrayProduct(fieldsArr.zip(instancesArr).asInstanceOf[Array[Any]])).asInstanceOf[D[G * (I `.` F)]]
24 | }
25 | }
26 |
27 | class OfSum[D[_[_]], F[_], I[_]](private val casesInstances: Array[HkdFieldInstances[D, F, I]])(using
28 | m: Mirror.Sum { type MirroredType[G[_]] = D[G]; type MirroredElemTypes[G[_]] <: Tuple }
29 | ) extends HkdFieldInstances[D, F, I] {
30 | override def zip[G[_]](df: D[G]): D[[A] =>> (G[A], I[F[A]])] = {
31 | casesInstances(m.ordinal(df.asInstanceOf[m.MirroredMonoType])).zip(df)
32 | }
33 | }
34 |
35 | inline given [D[_[_]], F[_], I[_]](using
36 | m: Mirror.Product { type MirroredType[G[_]] = D[G]; type MirroredElemTypes[G[_]] <: Tuple }
37 | ): HkdFieldInstances.OfProduct[D, F, I] =
38 | HkdFieldInstances.OfProduct(
39 | m.fromProduct(ArrayProduct(summonAsArray[m.MirroredElemTypes[I `.` F]])).asInstanceOf[D[I `.` F]]
40 | )
41 |
42 | inline given [D[_[_]], F[_], I[_]](using
43 | m: Mirror.Sum { type MirroredType[G[_]] = D[G]; type MirroredElemTypes[G[_]] <: Tuple }
44 | ): HkdFieldInstances.OfSum[D, F, I] =
45 | HkdFieldInstances.OfSum(
46 | summonAsArray[K11.LiftP[[C[_[_]]] =>> HkdFieldInstances[C, F, I], m.MirroredElemTypes]]
47 | .asInstanceOf[Array[HkdFieldInstances[D, F, I]]]
48 | )
49 | }
50 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ### Intellij template
2 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
3 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
4 |
5 | # User-specific stuff
6 | .idea/**/workspace.xml
7 | .idea/**/tasks.xml
8 | .idea/**/usage.statistics.xml
9 | .idea/**/shelf
10 | .idea/vcs.xml
11 | .idea/jsLibraryMappings.xml
12 | .idea/misc.xml
13 | .idea/scala_settings.xml
14 |
15 | # AWS User-specific
16 | .idea/**/aws.xml
17 |
18 | # Generated files
19 | .idea/**/contentModel.xml
20 |
21 | # Sensitive or high-churn files
22 | .idea/**/dataSources/
23 | .idea/**/dataSources.ids
24 | .idea/**/dataSources.local.xml
25 | .idea/**/sqlDataSources.xml
26 | .idea/**/dynamic.xml
27 | .idea/**/uiDesigner.xml
28 | .idea/**/dbnavigator.xml
29 | .idea/scala_compiler.xml
30 | .idea/sbt.xml
31 |
32 | # Gradle
33 | .idea/**/gradle.xml
34 | .idea/**/libraries
35 |
36 | # Gradle and Maven with auto-import
37 | # When using Gradle or Maven with auto-import, you should exclude module files,
38 | # since they will be recreated, and may cause churn. Uncomment if using
39 | # auto-import.
40 | .idea/artifacts
41 | .idea/compiler.xml
42 | .idea/jarRepositories.xml
43 | .idea/modules.xml
44 | .idea/*.iml
45 | .idea/modules
46 | *.iml
47 | *.ipr
48 |
49 | # CMake
50 | cmake-build-*/
51 |
52 | # Mongo Explorer plugin
53 | .idea/**/mongoSettings.xml
54 |
55 | # File-based project format
56 | *.iws
57 |
58 | # IntelliJ
59 | out/
60 |
61 | # mpeltonen/sbt-idea plugin
62 | .idea_modules/
63 |
64 | # JIRA plugin
65 | atlassian-ide-plugin.xml
66 |
67 | # Cursive Clojure plugin
68 | .idea/replstate.xml
69 |
70 | # SonarLint plugin
71 | .idea/sonarlint/
72 |
73 | # Crashlytics plugin (for Android Studio and IntelliJ)
74 | com_crashlytics_export_strings.xml
75 | crashlytics.properties
76 | crashlytics-build.properties
77 | fabric.properties
78 |
79 | # Editor-based Rest Client
80 | .idea/httpRequests
81 |
82 | # Android studio 3.1+ serialized cache file
83 | .idea/caches/build_file_checksums.ser
84 |
85 | ### JEnv template
86 | # JEnv local Java version configuration file
87 | .java-version
88 |
89 | # Used by previous versions of JEnv
90 | .jenv-version
91 |
92 | ### Java template
93 | # Compiled class file
94 | *.class
95 |
96 | # Log file
97 | *.log
98 |
99 | # BlueJ files
100 | *.ctxt
101 |
102 | # Mobile Tools for Java (J2ME)
103 | .mtj.tmp/
104 |
105 | # Package Files #
106 | *.jar
107 | *.war
108 | *.nar
109 | *.ear
110 | *.zip
111 | *.tar.gz
112 | *.rar
113 |
114 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
115 | hs_err_pid*
116 | replay_pid*
117 |
118 | ### Scala template
119 |
120 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
121 |
122 | /project/.bloop/hkd-playground-build/
123 |
124 | # Generated Metals (Scala Language Server) files
125 | # Reference: https://scalameta.org/metals/
126 | .metals/
127 | .bloop/
128 | project/metals.sbt
129 |
130 | **/target
131 |
132 | project/boot/
133 | project/plugins/project/
134 | **/.bloop
135 | .bsp/
136 | .ammonite/
137 | metals.sbt
138 | metals/project/
139 | coursier
140 | .vscode/
141 | .scala-build/
142 |
143 | local.*
144 |
145 | .DS_Store
146 |
147 | # Scala-IDE specific
148 | .scala_dependencies
149 | .worksheet
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: Continuous Integration
2 |
3 | on:
4 | push:
5 | tags:
6 | - 'v[0-9]+.[0-9]+.[0-9]+'
7 | branches:
8 | - '**'
9 | create:
10 | tags:
11 | - 'v[0-9]+.[0-9]+.[0-9]+'
12 | pull_request:
13 | workflow_dispatch:
14 |
15 | permissions:
16 | contents: write
17 |
18 | jobs:
19 | test:
20 | runs-on: ubuntu-latest
21 | steps:
22 | - uses: actions/checkout@v4
23 | with:
24 | fetch-depth: 0 # needed for sbt-dynver to load all tags
25 | fetch-tags: true
26 | - name: Set up JDK
27 | uses: actions/setup-java@v4
28 | with:
29 | java-version: '17'
30 | distribution: 'temurin'
31 | cache: 'sbt'
32 | - uses: sbt/setup-sbt@v1
33 | - name: Run tests
34 | run: sbt test
35 |
36 | # Optional: This step uploads information to the GitHub dependency graph and unblocking Dependabot alerts for the repository
37 | - name: Upload dependency graph
38 | uses: scalacenter/sbt-dependency-submission@ab086b50c947c9774b70f39fc7f6e20ca2706c91
39 | publish:
40 | name: Publish
41 | runs-on: ubuntu-latest
42 | needs: [ test ]
43 | if: (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags')) &&
44 | (github.event_name == 'push' || github.event_name == 'create') &&
45 | github.repository == 'tschuchortdev/hkd4s'
46 | concurrency:
47 | group: publishing
48 | cancel-in-progress: true
49 | env:
50 | SONATYPE_NEXUS_USERNAME: ${{ secrets.SONATYPE_NEXUS_USERNAME }}
51 | SONATYPE_NEXUS_PASSWORD: ${{ secrets.SONATYPE_NEXUS_PASSWORD }}
52 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }}
53 | PGP_SECRET_KEY: ${{ secrets.PGP_SECRET_KEY }}
54 | permissions:
55 | contents: write
56 | steps:
57 | - uses: actions/checkout@v4
58 | with:
59 | fetch-depth: 0 # needed for sbt-dynver to load all tags
60 | fetch-tags: true
61 | - name: Set up JDK
62 | uses: actions/setup-java@v4
63 | with:
64 | java-version: '17'
65 | distribution: 'temurin'
66 | cache: 'sbt'
67 | - uses: sbt/setup-sbt@v1
68 | - name: Check if snapshot version
69 | shell: bash
70 | run: |
71 | VERSION_NAME=$(sbt "print version" --error -Dsbt.log.noformat=true | tail -n 1)
72 | echo "VERSION_NAME=$VERSION_NAME"
73 | echo "VERSION_NAME=$VERSION_NAME" >> "$GITHUB_ENV"
74 | IS_SNAPSHOT_VERSION=$(sbt "print isSnapshot" --error -Dsbt.log.noformat=true | tail -n 1)
75 | echo "IS_SNAPSHOT_VERSION=$IS_SNAPSHOT_VERSION"
76 | echo "IS_SNAPSHOT_VERSION=$IS_SNAPSHOT_VERSION" >> "$GITHUB_ENV"
77 | - name: Publish to Sonatype Nexus
78 | shell: bash
79 | run: |
80 | echo -n "$PGP_SECRET_KEY" | gpg --batch --import # batch option suppresses password prompt on import
81 | echo "public keys:"
82 | gpg --list-keys
83 | echo "secret keys:"
84 | gpg --list-secret-keys
85 | sbt doRelease
86 | ARTIFACT_PATHS=$(sbt "print signedArtifactPaths" --error -Dsbt.log.noformat=true)
87 | echo ARTIFACT_PATHS="$ARTIFACT_PATHS"
88 | {
89 | echo 'ARTIFACT_PATHS<> "$GITHUB_ENV"
93 | - name: Make release on Github
94 | uses: softprops/action-gh-release@v2
95 | if: ${{ env.IS_SNAPSHOT_VERSION == 'false' }}
96 | with:
97 | name: ${{ env.VERSION_NAME }}
98 | tag_name: v${{ env.VERSION_NAME }}
99 | files: |
100 | ${{ env.ARTIFACT_PATHS }}
101 | fail_on_unmatched_files: true
102 |
--------------------------------------------------------------------------------
/src/test/scala/TypeclassesTest.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 |
3 | import cats.Show
4 | import cats.data.Const
5 | import cats.effect.IO
6 | import internal.`.`
7 | import cats.syntax.all
8 | import cats.effect.unsafe.implicits.global
9 |
10 | class TypeclassesTest extends munit.FunSuite {
11 | sealed trait FooHK[F[_]]
12 | case class Foo1HK[F[_]](a: F[Int]) extends FooHK[F]
13 | sealed trait Foo2HK[F[_]] extends FooHK[F] { val b: F[Int] }
14 | case class Foo21HK[F[_]](b: F[Int], c1: F[Int]) extends Foo2HK[F]
15 | case class Foo22HK[F[_]](b: F[Int], c2: F[Int]) extends Foo2HK[F]
16 |
17 | test("FunctorK maps product") {
18 | val foo = Foo1HK[Option](Some(1))
19 | assertEquals(
20 | FunctorK[Foo1HK].mapK(foo)([A] => (a: Option[A]) => a.toList),
21 | Foo1HK(List(1))
22 | )
23 | }
24 |
25 | test("FunctorK maps sum") {
26 | val foo = Foo21HK[Option](b = Some(1), c1 = None)
27 | assertEquals(
28 | FunctorK[Foo2HK].mapK(foo)([A] => (a: Option[A]) => a.toList),
29 | Foo21HK(List(1), List())
30 | )
31 | }
32 |
33 | test("FunctorK maps nested sum") {
34 | val foo: FooHK[Option] = Foo21HK[Option](b = Some(1), c1 = None)
35 | assertEquals(
36 | FunctorK[FooHK].mapK(foo)([A] => (a: Option[A]) => a.toList),
37 | Foo21HK(List(1), List())
38 | )
39 | }
40 |
41 | test("FunctorK maps phantom typed") {
42 | case class BarHK[F[_]](a: Int)
43 | val bar = BarHK[Option](1)
44 | assertEquals(
45 | FunctorK[BarHK].mapK(bar)([A] => (a: Option[A]) => a.toList),
46 | BarHK[List](1)
47 | )
48 | }
49 |
50 | test("FunctorK maps product of functions") {
51 | case class BarHK[F[_]](f: Int => F[Int])
52 | val bar = BarHK[Option]((x: Int) => Some(x))
53 | val mapped = FunctorK[BarHK].mapK(bar)([A] => (a: Option[A]) => a.toList)
54 | assertEquals(
55 | mapped.f(1),
56 | bar.f(1).toList
57 | )
58 | }
59 |
60 | test("FunctorK maps product of functions with multiple args") {
61 | case class BarHK[F[_]](f: Int => Int => Int => F[String])
62 | val bar = BarHK[Option]((x1: Int) => (x2: Int) => (x3: Int) => Some(s"$x1$x2$x3"))
63 | val mapped = FunctorK[BarHK].mapK(bar)([A] => (a: Option[A]) => a.toList)
64 | assertEquals(
65 | mapped.f(1)(2)(3),
66 | bar.f(1)(2)(3).toList
67 | )
68 | }
69 |
70 | test("FunctorK maps recursive ADT") {
71 | sealed trait BarHK[F[_]]
72 | case class BarHKLeaf[F[_]](a: F[Int]) extends BarHK[F]
73 | case class BarHKBranch[F[_]](b: F[Int], tree: BarHK[F]) extends BarHK[F]
74 |
75 | val bar = BarHKBranch[Option](Some(1), BarHKLeaf(Some(2)))
76 | val mapped = FunctorK[BarHK].mapK(bar)([A] => (a: Option[A]) => a.toList)
77 | assertEquals(mapped, BarHKBranch[List](List(1), BarHKLeaf(List(2))))
78 | }
79 |
80 | test("FunctorK maps recursive wrapped ADT") {
81 | case class BarHK[F[_]](a: F[Int], tree: Option[BarHK[F]])
82 |
83 | val bar = BarHK[Option](Some(1), Some(BarHK(Some(2), None)))
84 | val mapped = FunctorK[BarHK].mapK(bar)([A] => (a: Option[A]) => a.toList)
85 | assertEquals(mapped, BarHK[List](List(1), Some(BarHK(List(2), None))))
86 | }
87 |
88 | test("FunctorK maps product with givens") {
89 | val foo = Foo1HK[Option](Some(1))
90 | val mapped = FunctorK[Foo1HK].mapKGiven(foo)[Show]([T] =>
91 | (field: Option[T]) => (s: Show[Option[T]]) ?=> Const[String, T](Show[Option[T]].show(field)))
92 |
93 | assertEquals(mapped, Foo1HK(Const("Some(1)")))
94 | }
95 |
96 | test("FunctorK maps nested sum with givens") {
97 | val foo = Foo22HK[Option](Some(1), Some(2))
98 | val mapped = FunctorK[Foo22HK].mapKGiven(foo)[Show]([T] =>
99 | (field: Option[T]) => (s: Show[Option[T]]) ?=> Const[String, T](Show[Option[T]].show(field)))
100 |
101 | assertEquals(mapped, Foo22HK(Const("Some(1)"), Const("Some(2)")))
102 | }
103 |
104 | sealed trait ContraHK[F[_]]
105 | case class Contra1HK[F[_]](a: F[Int] => String) extends ContraHK[F]
106 | sealed trait Contra2HK[F[_]] extends ContraHK[F] { val b: F[Int] => String }
107 | case class Contra21HK[F[_]](b: F[Int] => String, c1: F[Int] => String) extends Contra2HK[F]
108 | case class Contra22HK[F[_]](b: F[Int] => String, c2: F[Int] => String) extends Contra2HK[F]
109 |
110 | test("ContravariantK maps product") {
111 | val contra = Contra1HK({ (x: List[Int]) => x.toString })
112 | val mapped = ContravariantK[Contra1HK].contramapK[List](contra)[Option]([A] => (x: Option[A]) => x.toList)
113 | assertEquals(mapped.a(Option(1)), contra.a(List(1)))
114 | }
115 |
116 | test("ContravariantK maps nested sum") {
117 | val contra = Contra21HK(b = { (x: List[Int]) => x.toString }, c1 = { (x: List[Int]) => x.toString })
118 | val mapped = ContravariantK[ContraHK].contramapK[List](contra)[Option]([A] => (x: Option[A]) => x.toList)
119 | mapped match
120 | case mapped: Contra21HK[Option] =>
121 | assertEquals(mapped.b(Option(1)), contra.b(List(1)))
122 | assertEquals(mapped.c1(Option(1)), contra.c1(List(1)))
123 | case _ => throw AssertionError(s"Expected Contra21HK, was: $mapped")
124 | }
125 |
126 | test("TraverseK sequences effects") {
127 | case class BarHK[F[_]](a: F[Int], b: F[Int], c: F[Int])
128 | var res = ""
129 | // Have to use IO here because Writer won't type-check for some reason
130 | val bar = BarHK[IO `.` Option](IO { res += "1"; Some(1) }, IO { res += "2"; Some(2) }, IO { res += "3"; Some(3) })
131 | val bar2 = TraverseK[BarHK].sequenceK(bar).unsafeRunSync()
132 | assertEquals(res, "123")
133 | assertEquals(bar2, BarHK[Option](Some(1), Some(2), Some(3)))
134 | }
135 | }
136 |
--------------------------------------------------------------------------------
/src/main/scala/internal/Utils.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 | package internal
3 |
4 | import scala.util.chaining.*
5 | import scala.util.boundary
6 | import scala.annotation.targetName
7 | import scala.collection.IndexedSeqView
8 | import scala.collection.mutable
9 | import scala.deriving.Mirror
10 | import scala.quoted.*
11 | import cats.implicits.*
12 | import cats.Functor
13 | import scala.annotation.tailrec
14 |
15 | protected[tschuchort] class ArrayProduct(val elems: Array[Any]) extends Product:
16 | def canEqual(that: Any): Boolean = true
17 | def productElement(n: Int): Any = elems(n)
18 | def productArity: Int = elems.length
19 | override def productIterator: Iterator[Any] = elems.iterator
20 |
21 | /** Records the types of all leafs (case classes, case objects, enum cases) of a deep ADT hierarchy that may contain multiple
22 | * levels of sealed traits.
23 | */
24 | trait AdtHierarchyLeafs[T] { type MirroredLeafTypes <: Tuple }
25 |
26 | object AdtHierarchyLeafs:
27 | transparent inline given [T]: AdtHierarchyLeafs[T] = ${ deriveAdtLeafsImpl[T] }
28 |
29 | private def deriveAdtLeafsImpl[T: Type](using q: Quotes): Expr[AdtHierarchyLeafs[T]] =
30 | import q.reflect.{*, given}
31 |
32 | def gatherLeafs(s: TypeRepr): Seq[TypeRepr] = s.asType match
33 | case '[s] =>
34 | Expr.summonOrAbort[Mirror.Of[s]] match
35 | case '{ $m: Mirror.ProductOf[s] } =>
36 | Seq(TypeRepr.of[s])
37 | case '{
38 | type elems <: Tuple;
39 | $m: Mirror.SumOf[s] { type MirroredElemTypes = `elems` }
40 | } =>
41 | tupleToTypeReprs[elems].foldLeft(Seq.empty[TypeRepr]) { case (acc, elemTpr) =>
42 | acc ++ gatherLeafs(elemTpr)
43 | }
44 |
45 | type Leafs <: Tuple
46 | given Type[Leafs] = tupleOfTypeReprs(gatherLeafs(TypeRepr.of[T])).asType.asInstanceOf
47 |
48 | '{ new AdtHierarchyLeafs[T] { override type MirroredLeafTypes = Leafs } }
49 |
50 | extension (b1: Boolean)
51 | protected[tschuchort] infix def implies(b2: Boolean): Boolean = !b1 || b2
52 |
53 | extension [T](s: IndexedSeq[T])
54 | protected[tschuchort] def movingWindow(windowSize: Int): Seq[IndexedSeqView[T]] =
55 | require(windowSize >= 1)
56 | val sLength = s.length
57 | List.unfold(0) { currentIndex =>
58 | if currentIndex <= sLength - 1 then
59 | Some(
60 | (
61 | IndexedSeqView.Slice(s, currentIndex, Math.max(currentIndex + windowSize, sLength - 1)),
62 | currentIndex + 1
63 | )
64 | )
65 | else None
66 | }
67 |
68 | extension [A](s: Seq[A])
69 | /** Checks whether all the elements in the sequence are distinct from each other by [[f]] */
70 | protected[tschuchort] def allDistinctBy[B](f: A => B): Boolean = s.distinctBy(f).length == s.length
71 |
72 | extension [A](aas: Seq[A])
73 | /** Matches the elements in [[aas]] with the elements in [[bs]] according to the key function. The order of [[aas]] will be
74 | * maintained, optionally appending an additional [[None]] element at the end of the list to contain all the elements in [[bs]]
75 | * that did not have a corresponding match in [[aas]]. Each sequence of [[B]] matches for an [[A]] is in the order that they
76 | * appeared in the [[bs]] list.
77 | */
78 | protected[tschuchort] def matchBy[B, K](bs: Seq[B])(byA: A => K)(byB: B => K): Seq[(Option[A], Seq[B])] =
79 | val unmatchedBs = new mutable.HashMap[K, mutable.ListBuffer[B]](
80 | bs match {
81 | case _: IndexedSeq[?] => bs.length // Only when indexed, to avoid traversing the Seq
82 | case _ => mutable.HashMap.defaultInitialCapacity
83 | },
84 | mutable.HashMap.defaultLoadFactor
85 | ).tap { m =>
86 | bs.foreach { b =>
87 | m.updateWith(key = byB(b)) {
88 | case Some(otherBsForKey) => Some(otherBsForKey.appended(b))
89 | case None => Some(mutable.ListBuffer(b))
90 | }
91 | }
92 | }
93 |
94 | (for (a <- aas)
95 | // Note: this MUST be executed before the concat operation, or else the result will be wrong because
96 | // [[unmatchedBs]] is mutable!
97 | yield (Some(a), unmatchedBs.remove(byA(a)).map(_.toList).getOrElse(Seq.empty)))
98 | ++ {
99 | val leftovers = unmatchedBs.iterator.flatMap { case (_, bsForKey) => bsForKey }.toSeq
100 | leftovers match {
101 | case Seq() => Seq.empty[(Option[A], Seq[B])]
102 | case leftovers => Seq((Option.empty[A], leftovers))
103 | }
104 | }
105 |
106 | extension [CC[_]: Functor, A](coll: CC[A])
107 | /** Applies the partial function to every argument to narrow the type, but instead of dropping unmatched elements like
108 | * [[Seq.collect]], returns `None` for the entire list.
109 | */
110 | protected[tschuchort] def collectAllOrNone[B](pf: PartialFunction[A, B]): Option[CC[B]] =
111 | boundary {
112 | Some(coll.map { x =>
113 | pf.applyOrElse(x, { _ => boundary.break(None) })
114 | })
115 | }
116 |
117 | /** Functional composition of two type functions */
118 | @targetName("Compose")
119 | private[tschuchort] infix type `.`[F[_], G[_]] = [A] =>> F[G[A]]
120 |
121 | @targetName("Product")
122 | private[tschuchort] infix type `*`[F[_], G[_]] = [A] =>> (F[A], G[A])
123 |
124 | trait TypeName[T <: AnyKind]:
125 | val value: String
126 |
127 | object TypeName:
128 | transparent inline given [T <: AnyKind]: TypeName[T] = ${ givenTypeNameImpl[T] }
129 |
130 | private def givenTypeNameImpl[T <: AnyKind: Type](using q: Quotes): Expr[TypeName[T]] =
131 | import q.reflect.{*, given}
132 |
133 | // Removes redundant lambda expressions. ONLY SAFE FOR GETTING THE NAME. The resulting TypeRepr can not generally be used
134 | // in a position that expects * -> * kind.
135 | @tailrec def etaReduceForName(t: TypeRepr): TypeRepr =
136 | t match
137 | case tl @ TypeLambda(paramNames, paramBounds, AppliedType(applied, appliedArgs))
138 | if paramNames.size == appliedArgs.size
139 | && appliedArgs.zipWithIndex.forall { case (ParamRef(binder, paramIndex), i) =>
140 | binder == tl && paramIndex == i
141 | } =>
142 | etaReduceForName(applied)
143 | case t => t
144 |
145 | val name: String =
146 | Printer.TypeReprShortCode
147 | .show(
148 | etaReduceForName(TypeRepr.of[T]).widen.widenTermRefByName.simplified
149 | )
150 | .replace(" >: Nothing", "")
151 | .replace(" <: Any", "")
152 | .replaceAll("_\\$\\d+", "_") match
153 | case s"[$paramList] =>> $rhs" if paramList == rhs => rhs
154 | case s => s
155 |
156 | '{ new TypeName[T] { val value: String = ${ Expr(name) } } }
157 |
158 | object ImplicitsPriority:
159 | open class L1
160 | object L1 { given L1() }
161 |
162 | open class L2 extends L1
163 | object L2 { given L2() }
164 |
165 | open class L3 extends L2
166 | object L3 { given L3() }
167 |
168 | open class L4 extends L3
169 | object L4 { given L4() }
170 |
171 | open class L5 extends L4
172 | object L5 { given L5() }
173 |
--------------------------------------------------------------------------------
/README.MD:
--------------------------------------------------------------------------------
1 | # hkd4s: Higher Kinded Data in Scala 3
2 |
3 | -----------
4 |
5 | 
6 |
7 |
8 | hkd4s is a Scala 3 library that provides basic functions and typeclasses for working with higher kinded data. Higher kinded data is an idiom, most commonly known among Haskell library authors, where a data type is declared with a higher kinded type parameter `F[_]` and every field of the type is wrapped in `F[_]`:
9 |
10 | ```scala
11 | case class User(name: String, age: Int)
12 | case class UserOf[F[_]](name: F[String], age: F[Int])
13 |
14 | case class Config(host: String, port: Int, maxConnections: Int)
15 | case class ConfigOf[F[_]](host: F[String], port: F[Int], maxConnections: F[Int])
16 | ```
17 |
18 | The hkd4s library provides generically derived typeclasses for working with higher kinded data types and can even __automatically generate__ those types for case classes.
19 |
20 | ## Applications of higher kinded data
21 |
22 | The higher kinded data pattern allows us to abstract over the basic structure of the data type and declare many different versions of it without boilerplate. For example:
23 |
24 | - `ConfigOf[Option]` could be the result of parsing a config file where only some of the configuration options are overwritten:
25 |
26 | ```yaml
27 | config:
28 | - host: 127.0.0.1
29 | - port: 44
30 | ```
31 |
32 | parsed into
33 |
34 | ```scala
35 | ConfigOf[Option](
36 | host = Some("127.0.0.1"),
37 | port = Some(44),
38 | maxConnections = None)
39 | ```
40 | - `ConfigOf[List]` would then be the result of combining several sources of config options (config file, environment variables, defaults).
41 | - `UserOf[Seq]` could be a data frame of users that is stored in a columnar format:
42 |
43 | ```scala
44 | val usersDataFrame = UserOf[Seq](
45 | name = Seq("Albert", "Benedict", "Christopher", "Diane"),
46 | age = Seq(59, 42, 22, 36))
47 |
48 | val meanAge = mean(usersDataFrame.age)
49 | ```
50 | - `UserOf[Validated]` could be the result of validating each form field in a sign-up form
51 | - When you want to send back error messages of the validated `User` form, each error message is a `String` regardless of what the field type was. `UserOf[Const[String, _]]` turns every field into a `String`:
52 |
53 | ```scala
54 | type FormError[A] = Const[String, A]
55 | val formErrors = UserOf[FormError](
56 | name = Const("name must not be empty"),
57 | age = Const("age must be between 0 and 99"))
58 | ```
59 | - `ConfigOf[Const[String, _]]` could also be documentation for the `Config` class: a doc string for every field:
60 | ```scala
61 | type DocString[A] = Const[String, _]
62 | val configDoc = ConfigOf[DocString](
63 | host = Const("host name of the server"),
64 | port = Const("the server will listen on this port for new connections"),
65 | maxConnections = Const("maximum simultaneous connections"))
66 | ```
67 | - `UserOf[Id]` is a plain `User` since `type Id[X] = X`
68 |
69 | Higher kinded data appears in many places, if you know to look for it.
70 |
71 | ## Higher Kinded Data as Metadata
72 |
73 | Higher kinded data types, especially those with a `Const[X, _]` argument, can be used to hold well-typed metadata about an object's fields. In the `ConfigOf[DocString]` example above, the higher kinded data instance holds not config values, but descriptive information about every config field. Higher kinded data classes can thus be useful in any place where we need metadata at runtime; usually this sort of metadata is supplied by field annotations in JVM languages:
74 |
75 | ```scala
76 | case class Address(street: String,
77 | @JsValidatePositive()
78 | houseNumber: Int,
79 | @JsName("zip_code")
80 | zipCode: String)
81 | ```
82 |
83 | The point of those annotations is to overwrite default behaviour, so we can do the same thing by... overwriting fields in a record of default behaviours:
84 |
85 | ```scala
86 | val defaultJsReaders: AddressOf[JsReads] = defaultJsReaders[Address]
87 |
88 | given JsReads[Address] = readsFromRecord(
89 | defaultJsReaders.copy(
90 | houseNumber = defaultJsReaders.houseNumber.map { (x: Int) =>
91 | require(x > 0)
92 | x
93 | }
94 | zipCode = defaultJsReaders.zipCode.rename("zip_code")
95 | ))
96 | ```
97 |
98 | The advantage here is that we do not need to control the declaration of `Address` to be able to put annotations on the fields. Furthermore, we are much more flexible in the kind of changes that we can make. We no longer need a pre-defined `JsValidatePositive` annotation class. Any kind of validation imaginable can be done easily.
99 |
100 | ## Typeclasses
101 |
102 | The real power of higher-kinded data comes from generic functions that can apply logic to any arbitrary higher-kinded data type instead of laboriously writing out the same code for each field. A basic suite of typeclasses is provided with the hkd4s library to easily and generically manipulate higher-kinded data types. All of these typeclasses should be familiar to functional programmers (`Functor`, `Pure`, `Apply`/`Applicative`, `Traverse`, etc.), except that they're at a higher kind and work on the wrapper type `F[_]` instead of the field types. A few of the most important typeclasses will be shown below:
103 |
104 | ### FunctorK
105 |
106 | `FunctorK[D[_[_]]]` allows us to map the wrapper type from `F[_]` to `G[_]`.
107 |
108 | ```scala
109 | trait FunctorK[D[_[_]]] extends InvariantK[D]:
110 | extension [F[_]](df: D[F])
111 | def mapK[G[_]](fg: [A] => F[A] => G[A]): D[G]
112 | ```
113 |
114 | Note that `mapK` is a higher-rank function where `fg` must work with *any possible* `A`.
115 |
116 | `FuntorK` can be used when we want to execute a generic operation on the wrapper type. For example, let's say that we have a `ConfigOf[List]` object containing a list of prioritized values for every config option that come from different sources (default, read from file, read from environment variable, etc.). To get the final `Config`, we must first take the highest priority value of each list (if there is one):
117 |
118 | ```scala
119 | val prioritizedConfigValues: ConfigOf[List] = ???
120 | val highestPrioConfigValue: ConfigOf[Option] = FunctorK[ConfigOf].mapK(prioritizedConfigValues)(
121 | [A] => (a: List[A]) => a.headOption // the head has the highest priority
122 | )
123 | ```
124 |
125 | Furthermore, `FunctorK` can also `mapK` with implicit given instances for each field:
126 |
127 | ```scala
128 | val user = UserOf[Id](name = "Alice", age = 32)
129 | val userFieldStrings: UserOf[Const[String, _]] = FunctorK[UserOf].mapKGiven(user)[Show](
130 | [A] => (field: A) => (showInst: Show[A]) ?=>
131 | Const[String, A](summon[Show[A]].show(field))
132 | )
133 | ```
134 |
135 | In contrast to the `zipK`/`map2K` functions of the type class `ApplyK`, this `mapKGiven` function will also work with coproduct types.
136 |
137 | ### PureK
138 |
139 | `PureK[D[_[_]]]` is a typeclass that allows us to create instances of `D[F]` from nothing.
140 |
141 | ```scala
142 | trait PureK[D[_[_]]]:
143 | def pureK[F[_]](gen: [A] => () => F[A]): D[F]
144 | ```
145 |
146 | Again, `pureK` is a higher-rank function where `gen` must work for any arbitrary field type `A`. Since we do not know `A` in advance and it is impossible to write a sensible function `def absurd[T]: T` that would create a value of arbitrary type, the ability to create an "empty" value must come from `F[_]`. Thus, `PureK` is often most useful when `F[_]` is sort of monoidal:
147 |
148 | ```scala
149 | val x: UserOf[List] = PureK[UserOf].pureK([A] => () => List.empty[A])
150 | ```
151 |
152 | ### ApplyK
153 |
154 | ```ApplyK[D[_[_]]]``` primarily offers the ability to zip two higher-kinded data types together.
155 |
156 | ```scala
157 | trait ApplyK[D[_[_]]] extends FunctorK[D]:
158 | extension [F[_]](df: D[F]) {
159 | def zipK[G[_]](dg: D[G]): D[[A] =>> (F[A], G[A])]
160 | def map2K[G[_], H[_]](dg: D[G])(h: [A] => (F[A], G[A]) => H[A]): D[H]
161 | }
162 | ````
163 |
164 | Example:
165 |
166 | ```scala
167 | val person1 = UserOf[Id](name = "Alice", age = 32)
168 | val person1 = UserOf[Id](name = "Bob", age = 35)
169 | val couple: UserOf[[A] =>> (A, A)] = ApplyK[UserOf].zipK(person1)(person2)
170 | ```
171 |
172 | We can also use `ApplyK` to map a function of multiple parameters directly over multiple higher-kinded data objects:
173 |
174 | ```scala
175 | def foo[A](l: A, r: A): A = ???
176 | ApplyK[UserOf].map2K(l)(r)(foo)
177 | ```
178 |
179 | Since two objects must be guaranteed to have exactly the same fields to be zipped together, `ApplyK` can only be derived for product types.
180 |
181 | ### TraverseK
182 |
183 | `TraverseK[D[_[_]]]` is surely one of the most important typeclasses, as it allows us to turn the composition of wrapper types "inside out", floating `F[_]` outside of the higher-kinded data type `D`:
184 |
185 | ```scala
186 | trait TraverseK[D[_[_]]] extends FunctorK[D] {
187 | extension [F[+_], G[_]](dfg: D[[A] =>> F[G[A]]])
188 | def sequenceK(using Applicative[F]): F[D[G]]
189 | ```
190 |
191 | Consider the config example from before. We have combined all our different config sources `ConfigOf[List]` into a single `ConfigOf[Option]` using `FunctorK.mapK`, but at some point, we want to have the real `Config` (i.e. `ConfigOf[Id]`) with every field defined to a _single_ value, not more, not less. `FunctorK` can not do this, but `TraverseK` can:
192 |
193 | ```scala
194 | val highestPrioConfigValue: ConfigOf[Option] = ???
195 | val finalConfig: Option[ConfigOf[Id]] = TraverseK[ConfigOf].sequenceK[Option, Id](highestPrioConfigValue)
196 | ```
197 |
198 | Obviously, the result can only be `Option[Config]` (`=:= Option[ConfigOf[Id]]`) because fields in `ConfigOf[Option]` may be `None` and it would be impossible then to assemble a `Config` if a field is missing.
199 |
200 | ## Automatic type generation for case classes
201 |
202 | hkd4s contains sophisticated mechanisms for generating higher kinded data types of case classes automatically. All you have to do is reference the type `HkdFor` and everything will work:
203 |
204 | ```scala
205 | case class User(name: String, age: Int)
206 | val partialUser: HkdFor[User, Option] = HkdFor[User, Option](name = Some("hello"), age = None)
207 | println(partialUser.name)
208 | ```
209 |
210 | Automatic higher kinded data types can be generated for algebraic data types (case classes and sealed traits) of any depth. Recursive types are currently unsupported. They have `apply` functions/constructors and `copy` methods with named and/or unnamed arguments and even work with generic typeclass derivation in third-party libraries using Shapeless or `scala.deriving.Mirror`.
211 |
212 | When using VSCode with Metals we even get code completion for fields:
213 |
214 | 
215 |
216 | For convenience, a type alias `HkdFor_[T] = [F[_]] =>> HkdFor[T, F]` is provided to allow easy partial application:
217 |
218 | ```scala
219 | FunctorK[HkdFor_[Foo]] == FunctorK[HkdFor[Foo, _]]
220 | ```
221 |
222 | Generated higher kinded data types can be destructured and pattern matched on the first type parameter safely (except for wildcard types):
223 |
224 | ```scala
225 | sealed trait Foo
226 | case class Foo1(a: Int) extends Foo
227 | case class Foo2(b: String) extends Foo
228 |
229 | val foo: HkdFor[Foo, Option] = HkdFor[Foo2, Option]("hello")
230 | foo match
231 | case HkdFor[Foo1, Option](a) => a: Int
232 | case HkdFor[Foo2, Option](b) => b: String
233 | ```
234 |
235 | *(Note: IntelliJ is currently unable to highlight pattern matches with type parameters and will show errors, but they work)*
236 |
237 | When matching a single case on a `HkdFor[T, F]` with known `T` and known `F`, the type arguments on the case definition can be omitted:
238 |
239 | ```scala
240 | (partialUser: HkdFor[User, Option]) match
241 | case HkdFor(name, age) => println(name + " " + age)
242 | ```
243 |
244 | It is usually not necessary to specify the `F` parameter when pattern matching, as it can not be tested at runtime anyway:
245 |
246 | ```scala
247 | (x: HkdFor[Foo, Option]) match
248 | case y: HkdFor_[Bar] => y: HkdFor[Bar, Option]
249 | ```
250 |
251 | ## Installation
252 |
253 | Add this to your build.sbt:
254 |
255 | ```scala
256 | libraryDependencies ++= Seq(
257 | "com.github.tschuchortdev" %% "hkd4s" % ""
258 | )
259 | ```
260 |
261 | where `` is the latest published release that you can find [here](https://github.com/tschuchortdev/hkd4s/releases).
262 |
263 |
--------------------------------------------------------------------------------
/src/test/scala/MacroHkdTest.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 |
3 | import cats.Id
4 | import shapeless3.deriving.{K0, K11}
5 |
6 | import scala.compiletime.testing.{typeCheckErrors, typeChecks}
7 | import scala.deriving.Mirror
8 | import scala.annotation.experimental
9 |
10 | @experimental
11 | class MacroHkdTest extends munit.FunSuite {
12 | sealed trait Foo
13 | case class Foo1(a: Int) extends Foo
14 | sealed trait Foo2 extends Foo { val b: Int }
15 | case class Foo21(b: Int, c1: Int) extends Foo2
16 | case class Foo22(b: Int, c2: Int, d: Int) extends Foo2
17 |
18 | test("case class can be constructed") {
19 | val h = HkdFor[Foo1, Option](Some(1))
20 | assertEquals(h.a, Some(1))
21 | }
22 |
23 | test("case class can be constructed with mixed named and unnamed arguments") {
24 | val h = HkdFor[Foo22, Id](1, c2 = 2, d = 3)
25 | assertEquals(h.b, 1)
26 | assertEquals(h.c2, 2)
27 | assertEquals(h.d, 3)
28 | }
29 |
30 | test("case class can be constructed with out-of-order named arguments") {
31 | val h = HkdFor[Foo22, Id](c2 = 2, d = 3, b = 1)
32 | assertEquals(h.b, 1)
33 | assertEquals(h.c2, 2)
34 | assertEquals(h.d, 3)
35 | }
36 |
37 | test("constructor errors when argument missing") {
38 | val errors = typeCheckErrors("HkdFor[Foo21, Id](1)")
39 | assertEquals(errors.map(_.message), Seq("Missing argument for parameter 'c1'"))
40 | }
41 |
42 | test("constructor errors when too many arguments") {
43 | val errors = typeCheckErrors("HkdFor[Foo21, Id](1, 2, 3)")
44 | assertEquals(errors.map(_.message), Seq("Unexpected argument"))
45 | }
46 |
47 | test("constructor errors when wrong argument type") {
48 | val errors = typeCheckErrors("""HkdFor[Foo21, Id]("hello", 2)""")
49 | assertEquals(errors.map(_.message), Seq("Found: java.lang.String\nRequired: cats.Id[scala.Int]"))
50 | }
51 |
52 | test("can assign case class to superclass sum type") {
53 | val h: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
54 | }
55 |
56 | test("can copy case class with positional arguments") {
57 | val h = HkdFor[Foo21, Id](1, 2)
58 | assertEquals(h.copy(3, 4), HkdFor[Foo21, Id](3, 4))
59 | }
60 |
61 | test("can copy case class with named arguments out-of-order") {
62 | val h = HkdFor[Foo21, Id](1, 2)
63 | assertEquals(h.copy(c1 = 4, b = 3), HkdFor[Foo21, Id](3, 4))
64 | }
65 |
66 | test("can copy case class with mixed named and unnamed arguments") {
67 | val h = HkdFor[Foo21, Id](1, 2)
68 | assertEquals(h.copy(3, c1 = 4), HkdFor[Foo21, Id](3, 4))
69 | }
70 |
71 | test("can copy case class with default parameters") {
72 | val h = HkdFor[Foo21, Id](1, 2)
73 | assertEquals(clue(h.copy(c1 = 3)), HkdFor[Foo21, Id](1, 3))
74 | }
75 |
76 | test("can derive fully applied mirror for products") {
77 | summon[Mirror.ProductOf[HkdFor[Foo1, Option]]]
78 | }
79 |
80 | test("can derive K0.ProductGeneric") {
81 | summon[K0.ProductGeneric[HkdFor[Foo1, Option]]]
82 | }
83 |
84 | test("can derive partially applied mirror for products") {
85 | summon[Mirror.Product { type MirroredType[F[_]] = HkdFor[Foo1, F] }]
86 | }
87 |
88 | test("can derive K11.ProductGeneric") {
89 | summon[K11.ProductGeneric[HkdFor_[Foo1]]]
90 | }
91 |
92 | test("can derive fully applied mirror for sums") {
93 | summon[Mirror.SumOf[HkdFor[Foo2, Option]]]
94 | }
95 |
96 | test("can derive K0.CoproductGeneric") {
97 | summon[K0.CoproductGeneric[HkdFor[Foo2, Option]]]
98 | }
99 |
100 | test("can derive partially applied mirror for sums") {
101 | summon[Mirror.Sum { type MirroredType[F[_]] = HkdFor[Foo2, F] }]
102 | }
103 |
104 | test("can derive K11.CoproductGeneric") {
105 | summon[K11.CoproductGeneric[HkdFor_[Foo2]]]
106 | }
107 |
108 | test("renders toString correctly") {
109 | val h: HkdFor[Foo, Id] = HkdFor[Foo22, Id](1, c2 = 2, d = 3)
110 | assertEquals(h.toString, "HkdFor[Foo22, ?](1, 2, 3)")
111 | }
112 |
113 | test("compares non-equal for nominally unrelated types with same fields") {
114 | case class A(a: Int)
115 | case class B(a: Int)
116 | val ha = HkdFor[A, Id](1)
117 | val hb = HkdFor[B, Id](1)
118 | assert(!ha.canEqual(hb))
119 | assert(!hb.canEqual(ha))
120 | assert(ha != hb)
121 | }
122 |
123 | test("canEqual HkdFor with same erased T") {
124 | val h1: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
125 | val h2: HkdFor[Foo, Id] = HkdFor[Foo1, Id](2)
126 | assert(h1.canEqual(h2))
127 | assert(h2.canEqual(h1))
128 | }
129 |
130 | test("not canEqual HkdFor with different erased T") {
131 | val h1: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
132 | val h2: HkdFor[Foo, Id] = HkdFor[Foo21, Id](2, 3)
133 | assert(!h1.canEqual(h2))
134 | assert(!h2.canEqual(h1))
135 | }
136 |
137 | test("canEqual does not depend on F") {
138 | val h1: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
139 | val h2: HkdFor[Foo, Option] = HkdFor[Foo1, Option](Some(2))
140 | assert(h1.canEqual(h2))
141 | assert(h2.canEqual(h1))
142 | }
143 |
144 | test("implements Product") {
145 | val h: HkdFor[Foo, Id] = HkdFor[Foo21, Id](0, 1)
146 | assertEquals(h.productArity, 2)
147 | assertEquals(h.productElement(0), 0)
148 | assertEquals(h.productElement(1), 1)
149 | }
150 |
151 | test("throws compile-time error when selecting unknown field") {
152 | val errors = typeCheckErrors(
153 | """
154 | val h = HkdFor[Foo21, Id](1, 2)
155 | h.b
156 | h.x
157 | """)
158 | assertEquals(errors.map(_.lineContent.stripLeading().stripTrailing()), Seq("h.x"))
159 | }
160 |
161 | test("throws compile-time error when calling unknown method") {
162 | val errors = typeCheckErrors(
163 | """
164 | val h = HkdFor[Foo21, Id](1, 2)
165 | h.b
166 | h.f(123)
167 | """)
168 | assertEquals(errors.map(_.lineContent.stripLeading().stripTrailing()), Seq("h.f(123)"))
169 | }
170 |
171 | test("has covariant subtype relationship with simple F") {
172 | val h: HkdFor[Foo1, Option] = HkdFor[Foo1, Some](Some(1))
173 | }
174 |
175 | test("no contravariant subtype relationship with simple F") {
176 | val errors = typeCheckErrors("""val h: HkdFor[Foo1, Some] = HkdFor[Foo1, Option](Some(1))""")
177 | assert(clue(errors.size) == 1)
178 | assert(clue(errors.head.message).contains("Could not prove"))
179 | assert(clue(errors.head.message).contains("no implicit values were found that match type Option[Int] <:< Some[Int]"))
180 | }
181 |
182 | test("has covariant subtype relationship with complex F") {
183 | case class Bar(a: Int, b: String)
184 |
185 | type F1[X] = X match
186 | case Int => Int
187 | case String => String
188 | case Boolean => Unit
189 |
190 | type F2[X] = X match
191 | case String => String
192 | case Int => Int
193 | case Boolean => Nothing
194 |
195 | val h1: HkdFor[Bar, F1] = HkdFor[Bar, F2](1, "hello")
196 | val h2: HkdFor[Bar, F2] = HkdFor[Bar, F1](1, "hello")
197 | }
198 |
199 | test("no subtype relationship with incompatible F") {
200 | case class Bar(a: Int, b: String)
201 |
202 | type F1[X] = X match
203 | case Int => Int
204 | case String => String
205 |
206 | type F2[X] = X match
207 | case Int => Unit
208 | case String => Unit
209 |
210 | val errors = typeCheckErrors("""val h2: HkdFor[Bar, F2] = HkdFor[Bar, F1](1, "hello")""")
211 |
212 | assert(errors.size == 1)
213 | assert(clue(errors.head.message).contains("Could not prove"))
214 | assert(clue(errors.head.message).contains("no implicit values were found that match type F1[Int] <:< F2[Int]"))
215 | }
216 |
217 | test("has subtype relationship with erased F") {
218 | val h: HkdFor[Foo1, [_] =>> Any] = HkdFor[Foo1, Some](Some(1))
219 | }
220 |
221 | test("has subtype relationship with erased T and erased F") {
222 | val h: HkdFor[Any, [_] =>> Any] = HkdFor[Foo1, Some](Some(1))
223 | }
224 |
225 | test("type tests sealed subtype") {
226 | val h: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
227 | h match
228 | case h: HkdFor[Foo2, Id] => fail("matched Foo2", clues(h))
229 | case h: HkdFor[Foo1, Id] => ()
230 | case _ => fail("matched fallback", clues(h))
231 | }
232 |
233 | test("type tests nested sealed subtype as supertype") {
234 | val h: HkdFor[Foo, Id] = HkdFor[Foo22, Id](1, 2, 3)
235 | h match
236 | case h: HkdFor[Foo1, Id] => fail("matched Foo1", clues(h))
237 | case h: HkdFor[Foo21, Id] => fail("matched Foo21", clues(h))
238 | case h: HkdFor[Foo2, Id] => ()
239 | case _ => fail("matched fallback", clues(h))
240 | }
241 |
242 | test("type tests and doesn't match unrelated T") {
243 | case class Bar()
244 | val h: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
245 |
246 | val errors = typeCheckErrors(
247 | """
248 | h match
249 | case h: HkdFor[Bar, Id] => ()
250 | """)
251 |
252 | assert(errors.size == 1)
253 | assert(clue(errors.head.message).contains("case is unreachable"))
254 | assert(clue(errors.head.message).contains("Bar is not a subtype of Foo"))
255 | }
256 |
257 | test("type tests and doesn't match same T unrelated F") {
258 | val h = HkdFor[Foo1, Option](Some(1))
259 | val errors = typeCheckErrors(
260 | """
261 | h match
262 | case h: HkdFor[Foo1, List] => ()
263 | """)
264 |
265 | assertEquals(errors.length, 1)
266 | assert(clue(errors.head.message).contains("no implicit values were found that match type Option[Int] <:< List[Int]"))
267 | }
268 |
269 | test("type tests T subtype with simple F supertype") {
270 | val h: HkdFor[Foo, Some] = HkdFor[Foo1, Some](Some(1))
271 | h match
272 | case h: HkdFor[Foo2, Option] => fail("matched Foo2", clues(h))
273 | case h: HkdFor[Foo21, Option] => fail("matched Foo21", clues(h))
274 | case h: HkdFor[Foo1, Option] => ()
275 | case _ => fail("matched fallback", clues(h))
276 | }
277 |
278 | test("type tests T subtype with complex F supertype") {}
279 |
280 | /* WILDCARDS CAUSE A COMPILER CRASH
281 |
282 | test("type tests T wildcard with simple F super type") {
283 | val h: HkdFor[Foo, Some] = HkdFor[Foo1, Some](Some(1))
284 | h match
285 | case h: HkdFor[?, Option] => ()
286 | case _ => fail("matched fallback", clues(h))
287 | }
288 |
289 | test("type tests and matches T wildcard with unrelated F") {
290 | val h: HkdFor[Foo, Some] = HkdFor[Foo1, Some](Some(1))
291 | h match
292 | case h: HkdFor[?, List] => ()
293 | case _ => fail("matched fallback", clues(h))
294 | }
295 |
296 | test("type tests and doesn't match ADT bounded T wildcard with unrelated F") {
297 | val h: HkdFor[Foo, Some] = HkdFor[Foo1, Some](Some(1))
298 | h match
299 | case h: HkdFor[? <: Foo, List] => fail("matched unrelated F", clues(h))
300 | case _ => ()
301 | }*/
302 |
303 | test("pattern match extracts fields without type test") {
304 | val h = HkdFor[Foo22, Option](Some(1), Some(2), Some(3))
305 | h match {
306 | case HkdFor(a, b, c) =>
307 | summon[a.type <:< Option[Int]]
308 | summon[b.type <:< Option[Int]]
309 | summon[c.type <:< Option[Int]]
310 | assertEquals((a, b, c), (Some(1), Some(2), Some(3)))
311 | }
312 | }
313 |
314 | test("pattern match extracts fields with type test complex F") {
315 | case class Bar(a: Int, b: String)
316 |
317 | type F1[X] = X match
318 | case Int => Int
319 | case String => String
320 | case Boolean => Unit
321 |
322 | val h = HkdFor[Bar, F1](1, "hello")
323 | h match {
324 | case HkdFor(a, b) =>
325 | summon[a.type <:< Int]
326 | summon[b.type <:< String]
327 | assertEquals((a, b), (1, "hello"))
328 | }
329 | }
330 |
331 | test("pattern match extracts fields with inferred F, no type test") {
332 | val h = HkdFor[Foo22, Option](Some(1), Some(2), Some(3))
333 | h match {
334 | case HkdFor_[Foo22](a, b, c) => {
335 | summon[a.type <:< Option[Int]]
336 | summon[b.type <:< Option[Int]]
337 | summon[c.type <:< Option[Int]]
338 | assertEquals((a, b, c), (Some(1), Some(2), Some(3)))
339 | }
340 | }
341 | }
342 |
343 | test("pattern match extracts fields with inferred F, type test for subtype") {
344 | val h: HkdFor[Foo, Option] = HkdFor[Foo22, Option](Some(1), Some(2), Some(3))
345 | h match {
346 | case h: HkdFor[Foo1, Option] => fail("matched Foo1", clues(h))
347 | case HkdFor_[Foo22](a, b, c) => {
348 | summon[a.type <:< Option[Int]]
349 | summon[b.type <:< Option[Int]]
350 | summon[c.type <:< Option[Int]]
351 | assertEquals((a, b, c), (Some(1), Some(2), Some(3)))
352 | }
353 | }
354 | }
355 |
356 | test("calls type tests in matchExhaustively") {
357 | val h: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
358 | h matchExhaustively {
359 | case h: HkdFor[Foo2, Id] => fail("matched Foo2", clues(h))
360 | case h: HkdFor[Foo1, Id] => ()
361 | case _ => fail("matched fallback", clues(h))
362 | }
363 | }
364 |
365 | // can only be checked manually by eye
366 | test("matchExhaustively warns when not exhaustive".ignore) {
367 | val h: HkdFor[Foo, Id] = HkdFor[Foo1, Id](1)
368 | h matchExhaustively {
369 | case _: HkdFor[Foo1, Id] => ()
370 | // no case for Foo2
371 | }
372 | }
373 |
374 | test("FunctorK maps product") {
375 | val h = HkdFor[Foo1, Option](Some(1))
376 | val mapped = FunctorK[HkdFor_[Foo1]].mapK(h)([A] => (a: Option[A]) => a.toList)
377 | assertEquals(mapped, HkdFor[Foo1, List](List(1)))
378 | }
379 |
380 | test("FunctorK maps sum") {
381 | val h: HkdFor[Foo2, Option] = HkdFor[Foo21, Option](Some(1), Some(2))
382 | val mapped = FunctorK[HkdFor_[Foo2]].mapK(h)([A] => (a: Option[A]) => a.toList)
383 | assertEquals(mapped, HkdFor[Foo21, List](List(1), List(2)))
384 | }
385 |
386 | test("FunctorK maps nested sum") {
387 | val h: HkdFor[Foo, Option] = HkdFor[Foo21, Option](Some(1), Some(2))
388 | val mapped = FunctorK[HkdFor_[Foo]].mapK(h)([A] => (a: Option[A]) => a.toList)
389 | assertEquals(mapped, HkdFor[Foo21, List](List(1), List(2)))
390 | }
391 |
392 | /* NOT IMPLEMENTED YET
393 |
394 | test("FunctorK maps recursive ADT") {
395 | sealed trait Bar
396 | case class BarLeaf(a: Int) extends Bar
397 | case class BarBranch(b: Int, tree: Bar) extends Bar
398 |
399 | val bar = HkdFor[BarBranch, Option](Some(1), HkdFor[BarLeaf, Option](Some(2)))
400 | val mapped = FunctorK[HkdFor_[Bar]].mapK(bar)([A] => (a: Option[A]) => a.toList)
401 | assertEquals(mapped, HkdFor[BarBranch, List](List(1), HkdFor[BarLeaf, List](List(2))))
402 | }*/
403 | }
404 |
--------------------------------------------------------------------------------
/src/main/scala/internal/MacroUtils.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 | package internal
3 |
4 | import scala.annotation.{nowarn, targetName}
5 | import scala.compiletime.{constValueTuple, erasedValue}
6 | import scala.deriving.Mirror
7 | import scala.quoted.*
8 | import scala.quoted.runtime.StopMacroExpansion
9 | import scala.quoted.{Expr, Quotes, Type}
10 | import scala.util.chaining.*
11 |
12 | /** Dealiases the type and all its arguments, recursively. */
13 | protected[tschuchort] def dealiasNested(using q: Quotes)(tpe: q.reflect.TypeRepr): q.reflect.TypeRepr =
14 | import q.reflect.{*, given}
15 | tpe match
16 | case AppliedType(tycon, args) => AppliedType(dealiasNested(tycon), args.map(dealiasNested(_)))
17 | case _ => tpe.dealias
18 |
19 | private def printTypeDealiasedImpl[T: Type](using q: Quotes): Expr[Unit] = {
20 | import q.reflect.*
21 | println("dealiased: " + TypeRepr.of[T].dealias.show)
22 | '{ () }
23 | }
24 |
25 | protected[tschuchort] inline def printTypeDealiased[T]: Unit = ${ printTypeDealiasedImpl[T] }
26 |
27 | private def printTypeDefImpl[T: Type](using q: Quotes): Expr[Unit] = {
28 | import q.reflect.*
29 | println("Print type def: " + TypeRepr.of[T].typeSymbol.tree.show)
30 | '{ () }
31 | }
32 |
33 | protected[tschuchort] inline def printTypeDef[A]: Unit = ${ printTypeDefImpl[A] }
34 |
35 | protected[tschuchort] inline def labelsOf[A](using p: Mirror.ProductOf[A]): p.MirroredElemLabels =
36 | constValueTuple[p.MirroredElemLabels]
37 |
38 | @nowarn("msg=discarded expression")
39 | private def indentTreeStr(s: String): String = {
40 | val o = new StringBuilder()
41 | val indentSize = 2
42 | var indentLevel = 0
43 | var skipNextSpaces = false
44 |
45 | def newLine(): Unit =
46 | o.append('\n')
47 | o.appendAll(Array.fill[Char](indentSize * indentLevel)(' '))
48 |
49 | val lastIndex = s.length - 1
50 | for (i <- 0 to lastIndex)
51 | if s(i) == '(' && i < lastIndex && s(i + 1) != ')' then
52 | indentLevel += 1
53 | o.append(s(i))
54 | newLine()
55 | else if s(i) == ')' && i != 0 && s(i - 1) != '(' then
56 | indentLevel -= 1
57 | o.append(s(i))
58 | else if s(i) == ',' then
59 | o.append(',')
60 | newLine()
61 | skipNextSpaces = true
62 | else if s(i) == ' ' && skipNextSpaces then ()
63 | else
64 | o.append(s(i))
65 | skipNextSpaces = false
66 |
67 | o.result()
68 | }
69 |
70 | protected[tschuchort] def printTastyTree(using q: Quotes)(tree: q.reflect.Tree): Unit = {
71 | import q.reflect.*
72 | println(indentTreeStr(Printer.TreeStructure.show(tree)))
73 | }
74 |
75 | protected[tschuchort] def printTastyTypeRepr(using q: Quotes)(typeRepr: q.reflect.TypeRepr): Unit = {
76 | import q.reflect.*
77 | println(indentTreeStr(Printer.TypeReprStructure.show(typeRepr)))
78 | }
79 |
80 | extension (e: Expr.type)
81 | protected[tschuchort] def summonOrErrorMsg[T](using Type[T])(using q: Quotes): Either[String, Expr[T]] = {
82 | import q.reflect.*
83 | Implicits.search(TypeRepr.of[T]) match {
84 | case iss: ImplicitSearchSuccess => Right(iss.tree.asExpr.asInstanceOf[Expr[T]])
85 | case isf: ImplicitSearchFailure => Left(isf.explanation)
86 | }
87 | }
88 |
89 | extension (e: Expr.type)
90 | protected[tschuchort] def summonOrAbort[T](using Type[T])(using q: Quotes): Expr[T] =
91 | summonOrAbort(errPos = q.reflect.Position.ofMacroExpansion)
92 |
93 | protected[tschuchort] def summonOrAbort[T](using Type[T])(using q: Quotes)(errPos: q.reflect.Position): Expr[T] =
94 | import q.reflect.*
95 | Implicits.search(TypeRepr.of[T]) match {
96 | case iss: ImplicitSearchSuccess => iss.tree.asExpr.asInstanceOf[Expr[T]]
97 | case isf: ImplicitSearchFailure => report.errorAndAbort(isf.explanation, errPos)
98 | }
99 |
100 | extension (e: Expr.type)
101 | protected[tschuchort] inline def summonAllOrAbort[T <: Tuple](using Type[T])(using q: Quotes): Tuple.Map[T, Expr] =
102 | summonAllOrAbort(errPos = q.reflect.Position.ofMacroExpansion)
103 |
104 | protected[tschuchort] inline def summonAllOrAbort[T <: Tuple](using Type[T])(using q: Quotes)(
105 | errPos: q.reflect.Position
106 | ): Tuple.Map[T, Expr] =
107 | import q.reflect.{*, given}
108 |
109 | inline erasedValue[T] match
110 | case _: EmptyTuple => EmptyTuple
111 | case _: (t *: ts) =>
112 | Implicits.search(TypeRepr.of[t]) match
113 | case iss: ImplicitSearchSuccess =>
114 | iss.tree.asExpr.asExprOf[t] *: Expr.summonAllOrAbort[ts](errPos)
115 | case isf: ImplicitSearchFailure =>
116 | report.errorAndAbort(isf.explanation, errPos)
117 |
118 | protected[tschuchort] def typeIdentOf[T <: AnyKind](using q: Quotes)(using Type[T]) =
119 | import q.reflect.*
120 | TypeIdent(TypeRepr.of[T].typeSymbol)
121 |
122 | protected[tschuchort] def typeReprOf(using q: Quotes)(tpe: Type[?]): q.reflect.TypeRepr =
123 | tpe match { case '[tpe] => q.reflect.TypeRepr.of[tpe] }
124 |
125 | protected[tschuchort] def isTuple(using q: Quotes)(tpr: q.reflect.TypeRepr): Boolean =
126 | tpr.asType match {
127 | case '[EmptyTuple] => true
128 | case '[t *: ts] => true // '[_ *: _] causes compiler error but '[t *: ts] works
129 | case _ => false
130 | }
131 |
132 | protected[tschuchort] def tupleToTypeReprs[T <: Tuple: Type](using q: Quotes): Seq[q.reflect.TypeRepr] =
133 | import q.reflect.{*, given}
134 | Type.of[T] match
135 | case '[head *: tail] => TypeRepr.of[head] +: tupleToTypeReprs[tail]
136 | case '[EmptyTuple] => Seq.empty
137 |
138 | protected[tschuchort] def tupleToTypes[T <: Tuple: Type](using q: Quotes): Seq[Type[? <: Tuple.Union[T]]] =
139 | tupleToTypeReprs[T].map(_.asType.asInstanceOf[Type[? <: Tuple.Union[T]]])
140 |
141 | protected[tschuchort] def tupleOfTypes(using q: Quotes)(tpes: Seq[Type[?]]): Type[? <: Tuple] =
142 | import q.reflect.{*, given}
143 |
144 | tpes.foldRight[Type[? <: Tuple]](Type.of[EmptyTuple]) { case ('[tpe], acc) =>
145 | type acc <: Tuple
146 | given Type[acc] = acc.asInstanceOf[Type[acc]]
147 | Type.of[tpe *: acc] // foldRight --> prepend; foldLeft --> append
148 | }
149 |
150 | protected[tschuchort] def tupleOfTypeReprs(using q: Quotes)(tpes: Seq[q.reflect.TypeRepr]): q.reflect.TypeRepr =
151 | import q.reflect.{*, given}
152 | typeReprOf(tupleOfTypes(tpes.map(_.asType)))
153 |
154 | protected[tschuchort] def noTypeBoundsRepr(using q: Quotes) =
155 | import q.reflect.*
156 | TypeBounds(TypeRepr.of[Nothing], TypeRepr.of[Any])
157 |
158 | protected[tschuchort] def noTypeBoundsTree(using q: Quotes) =
159 | import q.reflect.*
160 | TypeBoundsTree(Inferred(TypeRepr.of[Nothing]), Inferred(TypeRepr.of[Any]))
161 |
162 | protected[tschuchort] def typeBoundsTreeOf[Lower, Upper](using q: Quotes)(using Lower <:< Upper, Type[Lower], Type[Upper]) =
163 | import q.reflect.*
164 | TypeBoundsTree(TypeIdent(TypeRepr.of[Lower].typeSymbol), TypeIdent(TypeRepr.of[Upper].typeSymbol))
165 |
166 | protected[tschuchort] def lowerTypeBoundTree[Lower](using q: Quotes)(using Type[Lower]) =
167 | import q.reflect.*
168 | TypeBoundsTree(TypeIdent(TypeRepr.of[Lower].typeSymbol), Inferred(TypeRepr.of[Any]))
169 |
170 | protected[tschuchort] def upperTypeBoundTree[Upper](using q: Quotes)(using Type[Upper]) =
171 | import q.reflect.*
172 | TypeBoundsTree(Inferred(TypeRepr.of[Nothing]), TypeIdent(TypeRepr.of[Upper].typeSymbol))
173 |
174 | protected[tschuchort] def refinementOf(using q: Quotes)(baseType: q.reflect.TypeRepr, fields: (String, q.reflect.TypeRepr)*) =
175 | import q.reflect.*
176 | fields.foldLeft(baseType) { case (prev, (fieldName, fieldType)) =>
177 | Refinement(prev, fieldName, fieldType)
178 | }
179 |
180 | extension (using q: Quotes)(tpe: q.reflect.TypeRepr)
181 | /** Case class fields zipped with their global TypeRepr */
182 | protected[tschuchort] def caseFieldsWithTypes: List[(String, q.reflect.TypeRepr)] =
183 | import q.reflect.*
184 | tpe.typeSymbol.caseFields.map { symbol =>
185 | (symbol.name, tpe.memberType(symbol).typeSymbol.pipe(TypeIdent.apply).tpe)
186 | }
187 |
188 | extension (using q: Quotes)(tpe: q.reflect.TypeRepr)
189 | /** TypeReprs of child classes */
190 | @targetName("TypeReprChildrenTypes")
191 | protected[tschuchort] def childrenTypes: List[q.reflect.TypeRepr] =
192 | import q.reflect.*
193 | tpe.typeSymbol.children.map { cs => TypeTree.ref(cs).tpe }
194 |
195 | extension (using q: Quotes)(typeSymbol: q.reflect.Symbol)
196 | /** TypeReprs of child classes */
197 | @targetName("symbolChildrenTypes")
198 | protected[tschuchort] def childrenTypes: List[q.reflect.TypeRepr] =
199 | import q.reflect.*
200 | assert(typeSymbol.isType)
201 | typeSymbol.children.map { cs => TypeTree.ref(cs).tpe }
202 |
203 | protected[tschuchort] def requireDynamicMethodName(using q: Quotes)(
204 | expectedName: String,
205 | name: Expr[String],
206 | methodOwnerType: q.reflect.TypeRepr
207 | ): Unit =
208 | import q.reflect.*
209 | require(expectedName.nonEmpty)
210 | name.value match
211 | case Some(`expectedName`) => ()
212 | case Some(name) => report.errorAndAbort(s"'${name}' is not a member of ${methodOwnerType.widenTermRefByName.show}")
213 | case None => report.errorAndAbort(s"Invalid method invocation on ${methodOwnerType.widenTermRefByName.show}")
214 |
215 | protected[tschuchort] def parseDynamicArgsExpr(using q: Quotes)(
216 | argsExpr: Expr[Seq[Any | (String, Any)]],
217 | generalErrorPos: q.reflect.Position = q.reflect.Position.ofMacroExpansion
218 | ): Seq[(Option[String], q.reflect.TypeRepr, Expr[Any])] =
219 | import q.reflect.*
220 |
221 | val args = argsExpr match
222 | case Varargs(args) => args
223 | case _ => report.errorAndAbort("Macro internal error: Expected explicit varargs sequence", generalErrorPos)
224 |
225 | args.map {
226 | case '{ (${ labelExpr }: String, $valueExpr) } =>
227 | // Widen valueExpr TypeRepr to get rid of Singleton types of String expressions.
228 | // Note: In applyDynamic if some parameters have names and others do not, those
229 | // without names will have the name "" (empty String).
230 | (Some(labelExpr.valueOrAbort).filter(_.nonEmpty), valueExpr.asTerm.tpe.widen, valueExpr)
231 |
232 | // "label" -> value;
233 | case '{ ArrowAssoc(${ labelExpr }: String).->(${ valueExpr }) } =>
234 | // Widen valueExpr TypeRepr to get rid of Singleton types of String expressions
235 | (Some(labelExpr.valueOrAbort).filter(_.nonEmpty), valueExpr.asTerm.tpe.widen, valueExpr)
236 |
237 | case expr =>
238 | // Widen valueExpr TypeRepr to get rid of Singleton types of String expressions
239 | (None, expr.asTerm.tpe.widen, expr)
240 | }
241 |
242 | /** Checks that supplied parameters match the expected parameters in type and either name or position. The result is a normalized
243 | * list of all expected arguments in the same order of [[expectedParamNamesTypesDefaults]] with name, widened argument types and
244 | * argument expression or default argument expression.
245 | *
246 | * @param expectedParamNamesTypesDefaults
247 | * Tuples of parameter's expected name, expected type and optional default argument expression.
248 | * @param paramNamesTypesValues
249 | * Tuples of argument's name (if present), argument type and argument expression
250 | * @param generalErrorPos
251 | * Source position to show in errors that can not be traced to a single argument expression, i.e. a missing argument. Default
252 | * is the position of macro expansion, which will usually highlight the entire call-site. Alternatively, you may set
253 | * {{{
254 | * generalErrorPos = argsExpr.asTerm.pos
255 | * }}}
256 | * where `argsExpr` is the `Expr[Seq[Any]]` you get from `applyDynamic`, to only highlight the argument list.
257 | * @return
258 | * Tuple of name, argument type, argument expression.
259 | */
260 | protected[tschuchort] def checkAndNormalizeParams(using q: Quotes)(
261 | expectedParamNamesTypesDefaults: Seq[(String, q.reflect.TypeRepr, Option[Expr[Any]])],
262 | paramNamesTypesValues: Seq[(Option[String], q.reflect.TypeRepr, Expr[Any])],
263 | generalErrorPos: q.reflect.Position = q.reflect.Position.ofMacroExpansion
264 | ): Seq[(String, q.reflect.TypeRepr, Expr[Any])] =
265 | import q.reflect.*
266 |
267 | require(expectedParamNamesTypesDefaults.allDistinctBy(_._1), "expected parameters must be distinct by name")
268 |
269 | paramNamesTypesValues.collectAllOrNone { case (Some(name), tpe, value) => (name, tpe, value) } match
270 | case Some(paramNamesTypesValues) =>
271 | // Case 1: All params have names and they match exactly (no leftovers) in name and type to the
272 | // expected params in any order.
273 | expectedParamNamesTypesDefaults
274 | .matchBy(paramNamesTypesValues)(_._1 /* name */ )(_._1 /* name */ )
275 | .map {
276 | // Parameters that appeared in the call but had no matching name in expected parameters
277 | case (None, unmatchedParams) =>
278 | assert(unmatchedParams.nonEmpty)
279 | unmatchedParams.foreach { case (name, _, expr) =>
280 | report.error(s"Method does not have a parameter '${name}'", expr)
281 | }
282 | throw StopMacroExpansion()
283 |
284 | // Expected param has no param with matching name
285 | case (Some((name, expectedType, default)), Seq()) =>
286 | default match
287 | case Some(default) => (name, expectedType, default)
288 | case None => report.errorAndAbort(s"Missing argument for parameter '$name'", generalErrorPos)
289 |
290 | // Exactly one param matching the name of expected param
291 | case (Some((expectedName, expectedType, default)), Seq(param @ (name: String, paramType, paramValue))) =>
292 | assert(expectedName == name)
293 | if paramType <:< expectedType
294 | then param
295 | else
296 | report.errorAndAbort(s"Found: ${Printer.TypeReprCode.show(paramType)}\n" +
297 | s"Required: ${Printer.TypeReprCode.show(expectedType)}",
298 | paramValue)
299 |
300 | // Note: Scala allows a name to appear multiple times in the parameter list!
301 | case (Some((name, tpe, default)), matchedParams) =>
302 | assert(matchedParams.length > 1)
303 | matchedParams.foreach { case (name, _, expr) =>
304 | report.error(s"Parameter '$name' may not appear more than once", expr)
305 | }
306 | throw StopMacroExpansion()
307 | }
308 |
309 | case None =>
310 | // Case 2: Some or no params have names. All params match exactly (no leftovers) the type of the
311 | // expected param at same position. Those that do have names, also match in name.
312 | expectedParamNamesTypesDefaults.map(Option(_)).zipAll(paramNamesTypesValues.map(Option(_)), None, None).map {
313 | case (None, None) => throw AssertionError("impossible match")
314 | case (Some((expectedName, expectedType, default)), None) =>
315 | default match
316 | case Some(default) => (expectedName, expectedType, default)
317 | case None => report.errorAndAbort(s"Missing argument for parameter '$expectedName'", generalErrorPos)
318 | case (None, Some((name, tpe, value))) =>
319 | report.errorAndAbort(s"Unexpected argument", value)
320 | case (Some((expectedName, expectedType, default)), Some((maybeName, tpe, value))) =>
321 | if !(tpe <:< expectedType) then
322 | report.errorAndAbort(s"Found: ${Printer.TypeReprCode.show(tpe)}\n" +
323 | s"Required: ${Printer.TypeReprCode.show(expectedType)}",
324 | value)
325 |
326 | maybeName match
327 | case Some(name) if name != expectedName =>
328 | report.errorAndAbort(s"Expected parameter of name '$expectedName'", value)
329 | case _ => ()
330 |
331 | (expectedName, tpe, value)
332 | }
333 |
334 | enum ShowTypeOptions:
335 | case ShortName, FqName, Widen, WidenByName, WidenTermRefByName, Simplified
336 |
337 | object ShowTypeOptions:
338 | given FromExpr[ShowTypeOptions] with
339 | def unapply(expr: Expr[ShowTypeOptions])(using Quotes): Option[ShowTypeOptions] =
340 | import quotes.reflect.*
341 | expr match
342 | case '{ ShowTypeOptions.ShortName } => Some(ShowTypeOptions.ShortName)
343 | case '{ ShowTypeOptions.FqName } => Some(ShowTypeOptions.FqName)
344 | case '{ ShowTypeOptions.Widen } => Some(ShowTypeOptions.Widen)
345 | case '{ ShowTypeOptions.WidenByName } => Some(ShowTypeOptions.WidenByName)
346 | case '{ ShowTypeOptions.WidenTermRefByName } => Some(ShowTypeOptions.WidenTermRefByName)
347 | case '{ ShowTypeOptions.Simplified } => Some(ShowTypeOptions.Simplified)
348 | case _ => None
349 |
350 | protected[tschuchort] inline def showType[T](inline options: ShowTypeOptions*): String & scala.Singleton =
351 | ${ showTypeImpl[T]('options) }
352 |
353 | inline def showType[T]: String & scala.Singleton =
354 | ${ showTypeImpl[T]('{ Seq(ShowTypeOptions.ShortName) }) }
355 |
356 | private def showTypeImpl[T: Type](optionsExpr: Expr[Seq[ShowTypeOptions]])(using q: Quotes): Expr[String & scala.Singleton] =
357 | import q.reflect.{*, given}
358 | import ShowTypeOptions.{*, given}
359 |
360 | val options = optionsExpr.valueOrAbort
361 |
362 | def requireExactlyOneOption(subset: Seq[ShowTypeOptions]): Unit =
363 | require(options.count(subset.contains(_)) == 1,
364 | s"'options' must contain exactly one of ${subset.map { _.getClass.getSimpleName }.mkString(", ")}")
365 |
366 | requireExactlyOneOption(Seq(ShortName, FqName))
367 |
368 | def applyOption(tpe: TypeRepr, opt: ShowTypeOptions) = opt match
369 | case ShortName => tpe
370 | case FqName => tpe
371 | case Widen => tpe.widen
372 | case WidenByName => tpe.widenByName
373 | case WidenTermRefByName => tpe.widenTermRefByName
374 | case Simplified => tpe.simplified
375 |
376 | val modifiedTpe = options.foldLeft(TypeRepr.of[T])(applyOption)
377 |
378 | if options.contains(ShortName) then Expr(Printer.TypeReprShortCode.show(modifiedTpe).asInstanceOf[String & scala.Singleton])
379 | else if options.contains(FqName) then Expr(Printer.TypeReprCode.show(modifiedTpe).asInstanceOf[String & scala.Singleton])
380 | else throw AssertionError("At least one name option must be given!")
381 |
--------------------------------------------------------------------------------
/src/main/scala/BasicTypeclasses.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 |
3 | import cats.{Applicative, Functor, Id}
4 | import shapeless3.deriving.{K0, K11, summonAsArray}
5 |
6 | import scala.compiletime.{constValue, summonInline}
7 | import internal.{ArrayProduct, `*`, `.`}
8 |
9 | import alleycats.Pure
10 | import FunctorK.{MapKGiven2Helper, MapKGiven3Helper, MapKGiven4Helper, MapKGivenHelper}
11 |
12 | import scala.deriving.Mirror
13 | import scala.util.chaining.*
14 |
15 | trait InvariantK[D[_[_]]] {
16 | extension [F[_]](df: D[F]) def imapK[G[_]](fg: [A] => F[A] => G[A])(gf: [A] => G[A] => F[A]): D[G]
17 | }
18 |
19 | trait FunctorK[D[_[_]]] extends InvariantK[D] {
20 | extension [F[_]](df: D[F]) {
21 | def mapK[G[_]](fg: [A] => F[A] => G[A]): D[G]
22 |
23 | override def imapK[G[_]](fg: [A] => F[A] => G[A])(gf: [A] => G[A] => F[A]): D[G] = mapK(fg)
24 |
25 | def mapKGiven[I[_]] = new MapKGivenHelper[I, D, F](df)(using this)
26 | def mapKGiven2[I1[_], I2[_]] = new MapKGiven2Helper[I1, I2, D, F](df)(using this)
27 | def mapKGiven3[I1[_], I2[_], I3[_]] = new MapKGiven3Helper[I1, I2, I3, D, F](df)(using this)
28 | def mapKGiven4[I1[_], I2[_], I3[_], I4[_]] = new MapKGiven4Helper[I1, I2, I3, I4, D, F](df)(using this)
29 | }
30 | }
31 |
32 | //noinspection DuplicatedCode
33 | object FunctorK {
34 | inline def derived[D[_[_]]]: FunctorK[D] = apply
35 | inline def apply[D[_[_]]]: FunctorK[D] = summonInline
36 |
37 | given monoFunctorK[A]: FunctorK[[F[_]] =>> F[A]] with {
38 | extension [F[_]](df: F[A]) override def mapK[G[_]](fg: [B] => F[B] => G[B]): G[A] = fg(df)
39 | }
40 |
41 | given functionFunctorK[A, P]: FunctorK[[F[_]] =>> P => F[A]] with {
42 | extension [F[_]](df: P => F[A])
43 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P => G[A] =
44 | p => fg(df(p))
45 | }
46 |
47 | given function2FunctorK[A, P1, P2]: FunctorK[[F[_]] =>> P1 => P2 => F[A]] with {
48 | extension [F[_]](df: P1 => P2 => F[A])
49 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => G[A] =
50 | p1 => p2 => fg(df(p1)(p2))
51 | }
52 |
53 | given function3FunctorK[A, P1, P2, P3]: FunctorK[[F[_]] =>> P1 => P2 => P3 => F[A]] with {
54 | extension [F[_]](df: P1 => P2 => P3 => F[A])
55 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => G[A] =
56 | p1 => p2 => p3 => fg(df(p1)(p2)(p3))
57 | }
58 |
59 | given function4FunctorK[A, P1, P2, P3, P4]: FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => F[A]] with {
60 | extension [F[_]](df: P1 => P2 => P3 => P4 => F[A])
61 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => G[A] =
62 | p1 => p2 => p3 => p4 => fg(df(p1)(p2)(p3)(p4))
63 | }
64 |
65 | given function5FunctorK[A, P1, P2, P3, P4, P5]: FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => P5 => F[A]] with {
66 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => F[A])
67 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => P5 => G[A] =
68 | p1 => p2 => p3 => p4 => p5 => fg(df(p1)(p2)(p3)(p4)(p5))
69 | }
70 |
71 | given function6FunctorK[A, P1, P2, P3, P4, P5, P6]: FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => P5 => P6 => F[A]] with {
72 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => P6 => F[A])
73 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => P5 => P6 => G[A] =
74 | p1 => p2 => p3 => p4 => p5 => p6 => fg(df(p1)(p2)(p3)(p4)(p5)(p6))
75 | }
76 |
77 | given function7FunctorK[A, P1, P2, P3, P4, P5, P6, P7]: FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => P5 => P6 => P7 => F[A]]
78 | with {
79 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => P6 => P7 => F[A])
80 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => P5 => P6 => P7 => G[A] =
81 | p1 => p2 => p3 => p4 => p5 => p6 => p7 => fg(df(p1)(p2)(p3)(p4)(p5)(p6)(p7))
82 | }
83 |
84 | given function8FunctorK[A, P1, P2, P3, P4, P5, P6, P7, P8]
85 | : FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => F[A]] with {
86 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => F[A])
87 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => G[A] =
88 | p1 => p2 => p3 => p4 => p5 => p6 => p7 => p8 => fg(df(p1)(p2)(p3)(p4)(p5)(p6)(p7)(p8))
89 | }
90 |
91 | given function9FunctorK[A, P1, P2, P3, P4, P5, P6, P7, P8, P9]
92 | : FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => F[A]] with {
93 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => F[A])
94 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => G[A] =
95 | p1 => p2 => p3 => p4 => p5 => p6 => p7 => p8 => p9 => fg(df(p1)(p2)(p3)(p4)(p5)(p6)(p7)(p8)(p9))
96 | }
97 |
98 | given function10FunctorK[A, P1, P2, P3, P4, P5, P6, P7, P8, P9, P10]
99 | : FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => F[A]] with {
100 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => F[A])
101 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => G[A] =
102 | p1 => p2 => p3 => p4 => p5 => p6 => p7 => p8 => p9 => p10 => fg(df(p1)(p2)(p3)(p4)(p5)(p6)(p7)(p8)(p9)(p10))
103 | }
104 |
105 | given function11FunctorK[A, P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, P11]
106 | : FunctorK[[F[_]] =>> P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => P11 => F[A]] with {
107 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => P11 => F[A])
108 | override def mapK[G[_]](fg: [B] => F[B] => G[B]): P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => P11 => G[A] =
109 | p1 => p2 => p3 => p4 => p5 => p6 => p7 => p8 => p9 => p10 => p11 => fg(df(p1)(p2)(p3)(p4)(p5)(p6)(p7)(p8)(p9)(p10)(p11))
110 | }
111 |
112 | given function12FunctorK[A, P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, P11, P12]: FunctorK[
113 | [F[_]] =>> P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => P11 => P12 => F[A]
114 | ] with {
115 | extension [F[_]](df: P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => P11 => P12 => F[A])
116 | override def mapK[G[_]](fg: [B] => F[B] => G[B])
117 | : P1 => P2 => P3 => P4 => P5 => P6 => P7 => P8 => P9 => P10 => P11 => P12 => G[A] =
118 | p1 =>
119 | p2 =>
120 | p3 =>
121 | p4 => p5 => p6 => p7 => p8 => p9 => p10 => p11 => p12 => fg(df(p1)(p2)(p3)(p4)(p5)(p6)(p7)(p8)(p9)(p10)(p11)(p12))
122 | }
123 |
124 | given phantomFunctorK[A]: FunctorK[[_[_]] =>> A] with {
125 | extension [F[_]](df: A) override def mapK[G[_]](fg: [B] => F[B] => G[B]): A = df
126 | }
127 |
128 | given adtFunctorK[D[_[_]]](using inst: => K11.Instances[FunctorK, D]): FunctorK[D] with {
129 | extension [F[_]](df: D[F])
130 | override def mapK[G[_]](fg: [A] => F[A] => G[A]): D[G] =
131 | inst.map(df)(
132 | [t[_[_]]] => (fieldFunctorK: FunctorK[t], field: t[F]) => fieldFunctorK.mapK(field)(fg)
133 | )
134 | }
135 |
136 | given wrappedFunctorK[D[_[_]], H[_]](using fkd: FunctorK[D], functorH: Functor[H]): FunctorK[[F[_]] =>> H[D[F]]] with {
137 | extension [F[_]](hdf: H[D[F]])
138 | override def mapK[G[_]](fg: [A] => F[A] => G[A]): H[D[G]] =
139 | functorH.map(hdf) { (df: D[F]) => fkd.mapK(df)(fg) }
140 | }
141 |
142 | class MapKGivenHelper[I[_], D[_[_]], F[_]](df: D[F])(using FunctorK[D]) {
143 | def apply[G[_]](fg: [A] => F[A] => I[F[A]] ?=> G[A])(using instances: HkdFieldInstances[D, F, I]): D[G] =
144 | instances.zip(df).mapK[G](
145 | [A] => (fieldAndInstance: (F[A], I[F[A]])) => fg(fieldAndInstance._1)(using fieldAndInstance._2)
146 | )
147 | }
148 |
149 | class MapKGiven2Helper[I1[_], I2[_], D[_[_]], F[_]](df: D[F])(using FunctorK[D]) {
150 | def apply[G[_]](fg: [A] => F[A] => (I1[F[A]], I2[F[A]]) ?=> G[A])(using
151 | instances1: HkdFieldInstances[D, F, I1],
152 | instances2: HkdFieldInstances[D, F, I2]
153 | ): D[G] = instances2.zip(instances1.zip(df)).mapK[G](
154 | [A] => (x: ((F[A], I1[F[A]]), I2[F[A]])) => fg(x._1._1)(using x._1._2, x._2)
155 | )
156 | }
157 |
158 | class MapKGiven3Helper[I1[_], I2[_], I3[_], D[_[_]], F[_]](df: D[F])(using FunctorK[D]) {
159 | def apply[G[_]](fg: [A] => F[A] => (I1[F[A]], I2[F[A]], I3[F[A]]) ?=> G[A])(using
160 | instances1: HkdFieldInstances[D, F, I1],
161 | instances2: HkdFieldInstances[D, F, I2],
162 | instances3: HkdFieldInstances[D, F, I3]
163 | ): D[G] = instances3.zip(instances2.zip(instances1.zip(df))).mapK[G](
164 | [A] => (x: (((F[A], I1[F[A]]), I2[F[A]]), I3[F[A]])) => fg(x._1._1._1)(using x._1._1._2, x._1._2, x._2)
165 | )
166 | }
167 |
168 | class MapKGiven4Helper[I1[_], I2[_], I3[_], I4[_], D[_[_]], F[_]](df: D[F])(using FunctorK[D]) {
169 | def apply[G[_]](fg: [A] => F[A] => (I1[F[A]], I2[F[A]], I3[F[A]], I4[F[A]]) ?=> G[A])(using
170 | instances1: HkdFieldInstances[D, F, I1],
171 | instances2: HkdFieldInstances[D, F, I2],
172 | instances3: HkdFieldInstances[D, F, I3],
173 | instances4: HkdFieldInstances[D, F, I4]
174 | ): D[G] = instances4.zip(instances3.zip(instances2.zip(instances1.zip(df)))).mapK[G](
175 | [A] =>
176 | (x: ((((F[A], I1[F[A]]), I2[F[A]]), I3[F[A]]), I4[F[A]])) =>
177 | fg(x._1._1._1._1)(using x._1._1._1._2, x._1._1._2, x._1._2, x._2)
178 | )
179 | }
180 | }
181 |
182 | trait ContravariantK[D[_[_]]] extends InvariantK[D] {
183 | extension [F[_]](df: D[F])
184 | def contramapK[G[_]](gf: [A] => G[A] => F[A]): D[G]
185 | override def imapK[G[_]](fg: [A] => F[A] => G[A])(gf: [A] => G[A] => F[A]): D[G] = contramapK(gf)
186 | }
187 |
188 | object ContravariantK {
189 | inline def derived[D[_[_]]]: ContravariantK[D] = apply
190 | inline def apply[D[_[_]]]: ContravariantK[D] = summonInline
191 |
192 | given functionContravariantK[A, B]: ContravariantK[[F[_]] =>> F[A] => B] with {
193 | extension [F[_]](df: F[A] => B)
194 | override def contramapK[G[_]](gf: [C] => G[C] => F[C]): G[A] => B = { ga => df(gf(ga)) }
195 | }
196 |
197 | given phantomContravariantK[A]: ContravariantK[[_[_]] =>> A] with {
198 | extension [F[_]](df: A)
199 | override def contramapK[G[_]](gf: [B] => G[B] => F[B]): A = df
200 | }
201 |
202 | given adtContravariantK[D[_[_]]](using inst: => K11.Instances[ContravariantK, D]): ContravariantK[D] with {
203 | extension [F[_]](df: D[F])
204 | override def contramapK[G[_]](gf: [A] => G[A] => F[A]): D[G] =
205 | inst.map(df)(
206 | [t[_[_]]] =>
207 | (fieldContravariantK: ContravariantK[t], field: t[F]) =>
208 | fieldContravariantK.contramapK(field)[G](gf)
209 | )
210 | }
211 |
212 | given wrappedContravariantK[D[_[_]], H[_]](using
213 | contrad: ContravariantK[D],
214 | functorH: Functor[H]
215 | ): ContravariantK[[F[_]] =>> H[D[F]]] with {
216 | extension [F[_]](hdf: H[D[F]])
217 | override def contramapK[G[_]](gf: [A] => G[A] => F[A]): H[D[G]] =
218 | functorH.map(hdf) { (df: D[F]) => contrad.contramapK(df)[G](gf) }
219 | }
220 | }
221 |
222 | trait ApplyK[D[_[_]]] extends FunctorK[D]:
223 | extension [F[_]](df: D[F]) {
224 | def map2K[G[_], H[_]](dg: D[G])(h: [A] => (F[A], G[A]) => H[A]): D[H]
225 |
226 | /** Alias for [[map2K]] */
227 | def zipWithK[G[_], H[_]](dg: D[G])(h: [A] => (F[A], G[A]) => H[A]): D[H] = map2K(dg)(h)
228 |
229 | def zipK[G[_]](dg: D[G]): D[F * G] = map2K(dg)([A] => (fa: F[A], ga: G[A]) => (fa, ga))
230 |
231 | def zip2K[G[_], H[_]](dg: D[G], dh: D[H]): D[[A] =>> (F[A], G[A], H[A])] =
232 | zipK(dg).zipK(dh).mapK([A] => (x: ((F[A], G[A]), H[A])) => (x._1._1, x._1._2, x._2))
233 |
234 | def zip3K[G[_], H[_], I[_]](dg: D[G], dh: D[H], di: D[I]): D[[A] =>> (F[A], G[A], H[A], I[A])] =
235 | zip2K(dg, dh).zipK(di).mapK([A] => (x: ((F[A], G[A], H[A]), I[A])) => (x._1._1, x._1._2, x._1._3, x._2))
236 |
237 | def zip4K[G[_], H[_], I[_], J[_]](dg: D[G], dh: D[H], di: D[I], dj: D[J]): D[[A] =>> (F[A], G[A], H[A], I[A], J[A])] =
238 | zip3K(dg, dh, di).zipK(dj).mapK([A] => (x: ((F[A], G[A], H[A], I[A]), J[A])) => (x._1._1, x._1._2, x._1._3, x._1._4, x._2))
239 |
240 | def unzipK[G[_]](dfg: D[F * G]): (D[F], D[G]) =
241 | (
242 | mapK(dfg)([A] => (pair: (F[A], G[A])) => pair._1),
243 | mapK(dfg)([A] => (pair: (F[A], G[A])) => pair._2)
244 | )
245 |
246 | def unzip3K[G[_], H[_]](dfgh: D[[A] =>> (F[A], G[A], H[A])]): (D[F], D[G], D[H]) =
247 | (
248 | mapK(dfgh)([A] => (pair: (F[A], G[A], H[A])) => pair._1),
249 | mapK(dfgh)([A] => (pair: (F[A], G[A], H[A])) => pair._2),
250 | mapK(dfgh)([A] => (pair: (F[A], G[A], H[A])) => pair._3)
251 | )
252 |
253 | def unzip4K[G[_], H[_], I[_]](dfghi: D[[A] =>> (F[A], G[A], H[A], I[A])]): (D[F], D[G], D[H], D[I]) =
254 | (
255 | mapK(dfghi)([A] => (pair: (F[A], G[A], H[A], I[A])) => pair._1),
256 | mapK(dfghi)([A] => (pair: (F[A], G[A], H[A], I[A])) => pair._2),
257 | mapK(dfghi)([A] => (pair: (F[A], G[A], H[A], I[A])) => pair._3),
258 | mapK(dfghi)([A] => (pair: (F[A], G[A], H[A], I[A])) => pair._4)
259 | )
260 | }
261 |
262 | object ApplyK {
263 | inline def derived[D[_[_]]]: ApplyK[D] = apply
264 | inline def apply[D[_[_]]]: ApplyK[D] = summonInline
265 |
266 | given monoApplyK[X](using functorK: FunctorK[[F[_]] =>> F[X]]): ApplyK[[F[_]] =>> F[X]] with {
267 | export functorK.*
268 |
269 | extension [F[_]](df: F[X])
270 | override def map2K[G[_], H[_]](dg: G[X])(h: [A] => (F[A], G[A]) => H[A]): H[X] = h(df, dg)
271 | }
272 |
273 | given productApplyK[D[_[_]]](using functorK: FunctorK[D], pInst: K11.ProductInstances[ApplyK, D]): ApplyK[D] with {
274 | export functorK.*
275 |
276 | extension [F[_]](df: D[F])
277 | override def map2K[G[_], H[_]](dg: D[G])(h: [A] => (F[A], G[A]) => H[A]): D[H] =
278 | pInst.map2(df, dg)(
279 | [t[_[_]]] =>
280 | (fieldApplyK: ApplyK[t], fieldF: t[F], fieldG: t[G]) => fieldApplyK.map2K(fieldF)(fieldG)(h)
281 | )
282 | }
283 | }
284 |
285 | trait PureK[D[_[_]]] {
286 | def pureK[F[_]](gen: [A] => () => F[A]): D[F]
287 | }
288 | object PureK {
289 | inline def derived[D[_[_]]]: PureK[D] = apply
290 | inline def apply[D[_[_]]]: PureK[D] = summonInline
291 |
292 | given monoPureK[X]: PureK[[F[_]] =>> F[X]] with {
293 | override def pureK[F[_]](gen: [A] => () => F[A]): F[X] = gen()
294 | }
295 |
296 | given functionPureK[X, P]: PureK[[F[_]] =>> P => F[X]] with {
297 | override def pureK[F[_]](gen: [A] => () => F[A]): P => F[X] = { (p: P) => gen() }
298 | }
299 |
300 | given productPureK[D[_[_]]](using pInst: K11.ProductInstances[PureK, D]): PureK[D] with {
301 | override def pureK[F[_]](gen: [A] => () => F[A]): D[F] =
302 | pInst.construct(
303 | [t[_[_]]] => (fieldPureK: PureK[t]) => fieldPureK.pureK(gen)
304 | )
305 | }
306 |
307 | given wrappedPureK[D[_[_]], H[_]](using pd: PureK[D], pureH: Pure[H]): PureK[[F[_]] =>> H[D[F]]] with {
308 | def pureK[F[_]](gen: [A] => () => F[A]): H[D[F]] = pureH.pure(pd.pureK(gen))
309 | }
310 | }
311 |
312 | trait ApplicativeK[D[_[_]]] extends ApplyK[D] with PureK[D]:
313 | extension [F[_]](df: D[F])
314 | override def mapK[G[_]](fg: [A] => F[A] => G[A]): D[G] =
315 | df.map2K[[_] =>> Unit, G](pureK([_] => () => ()))([A] => (x: F[A], _: Unit) => fg(x))
316 |
317 | object ApplicativeK {
318 | inline def derived[D[_[_]]]: ApplicativeK[D] = apply
319 | inline def apply[D[_[_]]]: ApplicativeK[D] = summonInline
320 |
321 | /*given monoApplicativeK[A]: ApplicativeK[[F[_]] =>> F[A]] with {
322 | // noinspection TypeParameterShadow
323 | override def pureK[F[_]](gen: [A] => () => F[A]): F[A] = gen()
324 |
325 | extension [F[_]](df: F[A])
326 | //
327 | // noinspection TypeParameterShadow
328 | override def map2K[G[_], H[_]](dg: G[A])(h: [A] => (F[A], G[A]) => H[A]): H[A] =
329 | h(df, dg)
330 | }
331 |
332 | given productApplicativeK[D[_[_]]](using pInst: K11.ProductInstances[ApplicativeK, D]): ApplicativeK[D] with {
333 | override def pureK[F[_]](gen: [A] => () => F[A]): D[F] =
334 | pInst.construct(
335 | [t[_[_]]] => (fieldApplicativeK: ApplicativeK[t]) => fieldApplicativeK.pureK(gen)
336 | )
337 |
338 | extension [F[_]](df: D[F])
339 | override def map2K[G[_], H[_]](dg: D[G])(h: [A] => (F[A], G[A]) => H[A]): D[H] =
340 | pInst.map2(df, dg)(
341 | [t[_[_]]] =>
342 | (fieldApplicativeK: ApplicativeK[t], fieldF: t[F], fieldG: t[G]) => fieldApplicativeK.map2K(fieldF)(fieldG)(h)
343 | )
344 | }*/
345 |
346 | given applicativeKFromPureKAndApplyK[D[_[_]]](using p: PureK[D], a: ApplyK[D]): ApplicativeK[D] with {
347 | export p.*
348 | export a.{mapK as _, *}
349 | }
350 | }
351 |
352 | trait TraverseK[D[_[_]]] extends FunctorK[D] {
353 | extension [F[_]](df: D[F])
354 | def traverseK[G[+_], H[_]](f: [A] => F[A] => G[H[A]])(using Applicative[G]): G[D[H]]
355 |
356 | extension [F[+_], G[_]](dfg: D[F `.` G])
357 | def sequenceK(using Applicative[F]): F[D[G]] = dfg.traverseK([A] => (a: F[G[A]]) => a)
358 |
359 | extension [F[_]](df: D[F])
360 | override def mapK[G[_]](fg: [A] => F[A] => G[A]): D[G] = df.traverseK[cats.Id, G](fg)
361 | }
362 |
363 | object TraverseK {
364 | inline def derived[D[_[_]]]: TraverseK[D] = apply
365 | inline def apply[D[_[_]]]: TraverseK[D] = summonInline
366 |
367 | given monoTraverseK[X]: TraverseK[[F[_]] =>> F[X]] with {
368 | extension [F[_]](df: F[X])
369 | override def traverseK[G[+_], H[_]](f: [A] => F[A] => G[H[A]])(using Applicative[G]): G[H[X]] = f(df)
370 | }
371 |
372 | given adtTraverseK[D[_[_]]](using pInst: K11.Instances[TraverseK, D]): TraverseK[D] with {
373 | extension [F[_]](df: D[F])
374 | override def traverseK[G[+_], H[_]](f: [A] => F[A] => G[H[A]])(using Applicative[G]): G[D[H]] =
375 | pInst.traverse(df)(
376 | ([A, B] => (ga: G[A], ab: A => B) => Applicative[G].map(ga)(ab)).asInstanceOf[shapeless3.deriving.MapF[G]]
377 | )(
378 | [A] => (a: A) => Applicative[G].pure(a)
379 | )(
380 | [A, B] => (gg: G[A => B], ga: G[A]) => Applicative[G].ap(gg)(ga)
381 | )(
382 | [t[_[_]]] => (fieldTraversableK: TraverseK[t], field: t[F]) => fieldTraversableK.traverseK(field)(f)
383 | )
384 | }
385 |
386 | given wrappedTraverseK[D[_[_]], J[_]](using
387 | traversableD: TraverseK[D],
388 | traversableJ: cats.Traverse[J]
389 | ): TraverseK[[F[_]] =>> J[D[F]]] with {
390 | extension [F[_]](jdf: J[D[F]])
391 | def traverseK[G[+_], H[_]](f: [A] => F[A] => G[H[A]])(using Applicative[G]): G[J[D[H]]] =
392 | traversableJ.traverse(jdf) { (df: D[F]) => traversableD.traverseK(df)(f) }
393 | }
394 |
395 | }
396 |
397 | /*
398 | /** The categorical dual of TraversableK. */
399 | trait DistributiveK[D[_[_]]] extends FunctorK[D]:
400 | extension [F[_]: Functor, G[_]](fdg: F[D[G]])
401 | /** Distributes the effect [[F]] over the fields of the higher-kinded data type [[D]]. In other words, it turns an
402 | * [[F]]-effectful way of creating a `D[G]` into a pure `D[F . G]` with the effect [[F]] now wrapped around every field.
403 | */
404 | def distributeK: D[F `.` G]
405 |
406 | extension [F[_]: Functor, G[_]](fdb: F[D[G]])
407 | def cotraverseK(ct: [A] => F[G[A]] => F[A]): D[F] = mapK(distributeK(fdb))(ct)
408 |
409 | extension [A](ad: A => D[Id])
410 | def decomposeK: D[[R] =>> A => R] = distributeK(ad)
411 |
412 | extension [A](df: D[[R] =>> A => R])
413 | def recomposeK: A => D[Id] = { (a: A) => mapK(df)[Id]([B] => (f: A => B) => f(a)) }
414 |
415 | object DistributiveK {
416 | inline def derived[D[_[_]]]: DistributiveK[D] = apply
417 | inline def apply[D[_[_]]]: DistributiveK[D] = summonInline
418 |
419 | given monoDistributiveK[X](using functorK: FunctorK[[F[_]] =>> F[X]]): DistributiveK[[F[_]] =>> F[X]] with {
420 | export functorK.*
421 | extension [F[_]: Functor, G[_]](fdg: F[G[X]])
422 | def distributeK(): ([H[_]] =>> H[X])[F `.` G] = fdg
423 | }
424 |
425 | inline given adtDistributiveK[D[_[_]]](using
426 | m: Mirror.Product { type MirroredType[f[_]] = D[f] },
427 | inst: K11.ProductInstances[DistributiveK, D]
428 | ): DistributiveK[D] = new DistributiveK[D] {
429 | val fieldCount: Int = constValue[Tuple.Size[m.MirroredElemLabels]]
430 |
431 | extension [F[_]: Functor, G[_]](fdg: F[D[G]])
432 | override def distributeK(): D[F `.` G] = ??? // m.fromProduct( Seq.range(0, inst.))
433 |
434 | extension [F[_]](df: D[F])
435 | override def mapK[G[_]](fg: [A] => F[A] => G[A]): D[G] = ???
436 | }
437 |
438 | }*/
439 |
--------------------------------------------------------------------------------
/src/main/scala/HkdFor.scala:
--------------------------------------------------------------------------------
1 | package com.tschuchort.hkd
2 |
3 | import scala.compiletime.*
4 | import scala.deriving.Mirror
5 | import scala.util.chaining.*
6 | import scala.quoted.*
7 | import scala.quoted.runtime.StopMacroExpansion
8 | import scala.collection.MapView.Id
9 | import scala.annotation.tailrec
10 | import shapeless3.deriving.K11
11 | import shapeless3.deriving.K0
12 | import scala.reflect.ClassTag
13 | import scala.reflect.TypeTest
14 | import scala.annotation.targetName
15 | import izumi.reflect.Tag
16 | import shapeless3.deriving.internals.ErasedInstances
17 | import scala.util.NotGiven
18 | import scala.util.boundary
19 | import scala.annotation.experimental
20 | import scala.annotation.static
21 | import scala.runtime.Tuples
22 | import scala.runtime.TupleXXL
23 | import internal.{*, given}
24 |
25 | type HkdFor_[T] = [F[_]] =>> HkdFor[T, F]
26 |
27 | object HkdFor_ :
28 | def apply[T] = new PartialApplHelper[T]
29 |
30 | class PartialApplHelper[T] extends Dynamic:
31 | // noinspection TypeAnnotation
32 | inline def applyDynamic[F[_]](methodName: "apply")(inline args: Any*) =
33 | HkdFor.applyDynamic[T, F](methodName)(args*)
34 |
35 | // noinspection TypeAnnotation
36 | inline def applyDynamicNamed[F[_]](methodName: "apply")(inline args: (String, Any)*) =
37 | HkdFor.applyDynamicNamed[T, F](methodName)(args*)
38 |
39 | def unapply[F[_]](h: HkdFor_[T][F])(using m: Mirror.ProductOf[T]): Tuple.Map[m.MirroredElemTypes, F] =
40 | HkdFor.unapply[T, F](h)
41 |
42 | /** When an unapply method like this is called and `F` is not explicitly given, then `F` can not be inferred correctly:
43 | * {{{
44 | * HkdFor_[Foo][Option](???) match
45 | * case HkdFor_[Foo](a, b, c) => ??? // F is inferred as some unknown F$1 instead of Option
46 | * }}}
47 | * Because a type test introduces a level of indirection and the inference algorithm cannot "carry" the F through multiple
48 | * levels of indirection. We need to apply the type test ourselves, so that we remain in control of type inference.
49 | */
50 | transparent inline def unapply[S, F[_]](using inline m: Mirror.ProductOf[T])(using
51 | tt: TypeTest[com.tschuchort.hkd.HkdFor$package.HkdFor[S, F], com.tschuchort.hkd.HkdFor$package.HkdFor[T, F]])(
52 | h: com.tschuchort.hkd.HkdFor$package.HkdFor[S, F]
53 | ): Option[Any] | Boolean = ${ unapplyWithTypeTestImpl[T, S, F]('h, 'tt, 'm) }
54 |
55 | private def unapplyWithTypeTestImpl[T: Type, S: Type, F[_]: Type](using q: Quotes)(
56 | h: Expr[com.tschuchort.hkd.HkdFor$package.HkdFor[S, F]],
57 | tt: Expr[TypeTest[com.tschuchort.hkd.HkdFor$package.HkdFor[S, F], com.tschuchort.hkd.HkdFor$package.HkdFor[T, F]]],
58 | m: Expr[Mirror.ProductOf[T]]
59 | ): Expr[Option[Any]] | Expr[Boolean] =
60 | import q.reflect.{*, given}
61 |
62 | val fieldTypes = m match
63 | case '{
64 | type elems <: Tuple;
65 | $m: Mirror.Product { type MirroredElemTypes = `elems` }
66 | } =>
67 | tupleToTypes[elems].map { case '[field] => TypeRepr.of[F[field]] }
68 |
69 | fieldTypes.length match
70 | case 0 => '{ ${ tt }.unapply($h).isDefined }
71 | case l if l <= Tuples.MaxSpecialized =>
72 | type TupleL <: Tuple
73 | given Type[TupleL] = Symbol.classSymbol("scala.Tuple" + l).typeRef
74 | .appliedTo(fieldTypes.toList).asType.asInstanceOf
75 |
76 | '{
77 | ${ tt }.unapply($h).map { (x: com.tschuchort.hkd.HkdFor$package.HkdFor[T, F]) =>
78 | Tuples.fromProduct(x).asInstanceOf[TupleL]
79 | }: Option[TupleL]
80 | }
81 |
82 | case _ => report.errorAndAbort(s"Only types with 0 to ${Tuples.MaxSpecialized} fields are supported by this extractor", h)
83 |
84 | opaque type HkdFor[T, F[_]] <: Dynamic & Product = HkdForImpl[T]
85 | // ^ opaque type can not have `Product & Selectable` bounds or the warning for "type test can not be checked at runtime" will not
86 | // appear for some reason, despite the fact that those interfaces are not `Matchable`. Subtype relations should instead be supplied
87 | // by given <:< instances or implicit conversions.
88 |
89 | extension [T, F[_]](self: HkdFor[T, F]) {
90 | // noinspection TypeAnnotation
91 | inline def applyDynamic(methodName: String)(inline args: Any*) =
92 | ${ copyDynamicNamedImpl[T, F]('self, 'methodName, 'args) }
93 |
94 | // noinspection TypeAnnotation
95 | inline def applyDynamicNamed(methodName: String)(inline args: (String, Any)*) =
96 | ${ copyDynamicNamedImpl[T, F]('self, 'methodName, 'args) }
97 | }
98 |
99 | private def copyDynamicNamedImpl[T: Type, F[_]: Type](
100 | thisExpr: Expr[HkdFor[T, F]],
101 | methodNameExpr: Expr[String],
102 | argsExpr: Expr[Seq[Any | (String, Any)]]
103 | )(using q: Quotes) =
104 | import q.reflect.{*, given}
105 | requireDynamicMethodName(expectedName = "copy", name = methodNameExpr, methodOwnerType = TypeRepr.of[HkdFor[T, F]])
106 |
107 | val paramNamesTypesValues: Seq[(Option[String], TypeRepr, Expr[Any])] = parseDynamicArgsExpr(argsExpr)
108 |
109 | val tMirror = Expr.summonOrAbort[Mirror.ProductOf[T]]
110 |
111 | val normalizedParams = checkAndNormalizeParams(
112 | expectedParamNamesTypesDefaults =
113 | TypeRepr.of[T].caseFieldsWithTypes.map { case (name, typeRep) =>
114 | val default = '{ $thisExpr.selectDynamic(${ Expr(name) })(using $tMirror) }
115 |
116 | typeRep.asType match
117 | case '[fieldType] => (name, TypeRepr.of[F[fieldType]], Some(default))
118 | },
119 | paramNamesTypesValues
120 | )
121 |
122 | val ctorArgs = Expr.ofSeq(normalizedParams.map { case (name, _, expr) => Expr.ofTuple((Expr(name), expr)) })
123 |
124 | val (tClass, tName, fName) =
125 | Expr.summonAllOrAbort[(HkdFor.TypeTag[T], TypeName[T], TypeName[F])]
126 |
127 | '{
128 | new HkdForImpl[T](${ ctorArgs }*)(using $tMirror, $tName, $tClass)
129 | .asInstanceOf[com.tschuchort.hkd.HkdFor$package.HkdFor[T, F]]
130 | }
131 |
132 | object HkdFor extends Dynamic:
133 |
134 | /*given typeTestStaticUpcast[T, S <: T, F[_], G[_]](using ImplicitsPriority.L4)(using
135 | HkdFor[S, F] <:< HkdFor[T, G]
136 | ): TypeTest[HkdFor[S, F], HkdFor[T, G]] with {
137 | override def unapply(x: HkdFor[S, F]): Some[x.type & HkdFor[T, G]] =
138 | println("typeTestStaticUpcast")
139 | Some(x.asInstanceOf[x.type & HkdFor[T, G]])
140 | }*/
141 |
142 | given typeTestDowncastDynamicTComplexF[T, S <: T, F[_], G[_]](using ImplicitsPriority.L3)(using sClass: TypeTag[S])(using
143 | com.tschuchort.hkd.HkdFor$package.HkdFor[S, G] <:< com.tschuchort.hkd.HkdFor$package.HkdFor[S, F]
144 | ): TypeTest[HkdFor[T, G], HkdFor[S, F]] with {
145 | override def unapply(x: HkdFor[T, G]): Option[x.type & HkdFor[S, G]] =
146 | x match // in this scope we know that HkdFor =:= (HkdForImpl <: Matchable)
147 | case _x: (x.type & HkdForImpl[?]) if (_x.tClass <:< sClass) =>
148 | Some(_x.asInstanceOf[x.type & HkdFor[S, F]])
149 | case _ => None
150 | }
151 |
152 | given typeTestDowncastDynamicTSimpleF[T, S <: T, F[_], G <: [A] =>> F[A]](using ImplicitsPriority.L4)(using
153 | sClass: TypeTag[S]
154 | ): TypeTest[com.tschuchort.hkd.HkdFor$package.HkdFor[T, G], com.tschuchort.hkd.HkdFor$package.HkdFor[S, F]]
155 | with {
156 | override def unapply(x: com.tschuchort.hkd.HkdFor$package.HkdFor[T, G])
157 | : Option[x.type & com.tschuchort.hkd.HkdFor$package.HkdFor[S, F]] =
158 | x match // in this scope we know that HkdFor =:= (HkdForImpl <: Matchable)
159 | case _x: (x.type & HkdForImpl[?]) if (_x.tClass <:< sClass) =>
160 | Some(_x.asInstanceOf[x.type & com.tschuchort.hkd.HkdFor$package.HkdFor[S, F]])
161 | case _ => None
162 | }
163 |
164 | given typeTestHkdForErasedF[T](using ImplicitsPriority.L2)(using
165 | tClass: TypeTag[T]
166 | ): TypeTest[HkdFor[Any, [_] =>> Any], HkdFor[T, [_] =>> Any]] with {
167 | override def unapply(x: HkdFor[Any, [_] =>> Any]): Option[x.type & HkdFor[T, [_] =>> Any]] =
168 | x match // in this scope we know that HkdFor =:= (HkdForImpl <: Matchable)
169 | case _x: (x.type & HkdForImpl[?]) if (_x.tClass <:< tClass) =>
170 | Some(_x.asInstanceOf[x.type & HkdFor[T, [_] =>> Any]])
171 | case _ => None
172 | }
173 |
174 | given typeTestHkdForErasedTErasedF(using ImplicitsPriority.L1): TypeTest[Matchable, HkdFor[Any, [_] =>> Any]] with {
175 | override def unapply(x: Matchable): Option[x.type & HkdFor[Any, [_] =>> Any]] =
176 | x match
177 | case _x: (x.type & HkdForImpl[?]) =>
178 | Some(_x.asInstanceOf[x.type & HkdFor[Any, [_] =>> Any]])
179 | case _ => None
180 | }
181 |
182 | inline given typeTestFallbackUnrelatedClasses[T, S, F[_], G[_]](using ImplicitsPriority.L1)(
183 | using
184 | NotGiven[T <:< S],
185 | NotGiven[S <:< T])
186 | : TypeTest[com.tschuchort.hkd.HkdFor$package.HkdFor[S, F], com.tschuchort.hkd.HkdFor$package.HkdFor[T, G]] =
187 | error(
188 | "this case is unreachable since " + showType[com.tschuchort.hkd.HkdFor$package.HkdFor[S, F]] + " and " + showType[
189 | com.tschuchort.hkd.HkdFor$package.HkdFor[T, G]] +
190 | " are unrelated. " + showType[T] + " is not a subtype of " + showType[S]
191 | )
192 |
193 | given unionOfSubtypesEqualsParent[T, F[_]](using ImplicitsPriority.L3)(
194 | using m: Mirror.SumOf[HkdFor[T, F]]): (HkdFor[T, F] =:= Tuple.Union[m.MirroredElemTypes]) =
195 | scala.<:<.refl.asInstanceOf
196 |
197 | /** Subtype relationship where the type functions are simple subtypes of each other, for example forall `A`. `Some[A] <:
198 | * Option[A]` by definition.
199 | */
200 | given subtypeWithObviousF[T, S <: T, G[_], F <: [A] =>> G[A]](using ImplicitsPriority.L2): (HkdFor[S, F] <:< HkdFor[T, G]) =
201 | scala.<:<.refl.asInstanceOf
202 |
203 | /** Subtype relationship where the type functions are not related to each other by definition, for example two match type
204 | * families that happen to have the same result for all fields in [[S]], despite being different:
205 | * {{{
206 | * type F1[X] = X match
207 | * String => String
208 | * Int => Int
209 | * Boolean => Any
210 | *
211 | * type F2[X] = X match
212 | * String => String
213 | * Int => Int
214 | * Boolean => Nothing
215 | *
216 | * case class Foo(a: Int, b: String)
217 | * }}}
218 | * forall `A` in `Mirror.ProductOf[Foo].MirroredElemTypes = (Int, String)`. `F1[A] <: F2[A]` but not `F1[Boolean] <:
219 | * F2[Boolean]`.
220 | */
221 | inline given subtypeWithComplexF[T, S <: T, F[_], G[_]](using ImplicitsPriority.L1): (HkdFor[S, F] <:< HkdFor[T, G]) =
222 | // It seems that implicit parameters in general just mess up the type inference for the type parameters
223 | // of <:< givens. Thus, the summoning of all needed implicits has to be deferred to the function body with
224 | // `summonInline` or `Expr.summon` so that so that type parameters are inferred independently. If no given
225 | // instance is found, there will be a compile-error and the compiler will automatically skip this given definition.
226 | ${ subtypeWithComplexFImpl[T, S, F, G] }
227 |
228 | private def subtypeWithComplexFImpl[T: Type, S <: T: Type, F[_]: Type, G[_]: Type](using
229 | q: Quotes
230 | ): Expr[HkdFor[S, F] <:< HkdFor[T, G]] =
231 | import q.reflect.{*, given}
232 |
233 | val allLeafs: Seq[Type[?]] = Expr.summonOrAbort[AdtHierarchyLeafs[S]] match
234 | case '{
235 | type leafTypes <: Tuple;
236 | $al: AdtHierarchyLeafs[S] { type MirroredLeafTypes = `leafTypes` }
237 | } =>
238 | tupleToTypes[leafTypes]
239 |
240 | allLeafs.foreach { case '[leafType] =>
241 | Expr.summonOrAbort[Mirror.ProductOf[leafType]] match
242 | case '{
243 | type elems <: Tuple;
244 | $m: Mirror.ProductOf[s] { type MirroredElemTypes = `elems` }
245 | } =>
246 | tupleToTypes[elems].foreach { case '[field] =>
247 | Implicits.search(TypeRepr.of[F[field] <:< G[field]]) match
248 | case iss: ImplicitSearchSuccess => ()
249 | case isf: ImplicitSearchFailure =>
250 | report.errorAndAbort(
251 | s"Could not prove that HkdFor[${Type.show[S]}, ${Type.show[F]}]" +
252 | s" <: HkdFor[${Type.show[T]}, ${Type.show[G]}] because ${Type.show[leafType]}, " +
253 | s"a possible subtype of ${Type.show[S]}, has a field of type ${Type.show[field]}." +
254 | "\n" + "-".repeat(30) + "\n" +
255 | isf.explanation +
256 | "\n" + "-".repeat(30) + "\n" +
257 | s"Hint: You may want to match HkdFor[${Type.show[S]}, ${Type.show[[_] =>> Any]}] and/or cast instead."
258 | )
259 | }
260 | }
261 |
262 | '{ scala.<:<.refl.asInstanceOf[HkdFor[S, F] <:< HkdFor[T, G]] }
263 |
264 | extension [T, F[_]](self: HkdFor[T, F])
265 | @experimental
266 | transparent inline infix def matchExhaustively(using m: Mirror.SumOf[HkdFor[T, F]])(
267 | inline matchExpression: HkdFor[T, F] => Any
268 | ) = ${ matchExhaustivelyImpl[T, F]('self, 'matchExpression, 'm) }
269 |
270 | private def matchExhaustivelyImpl[T: Type, F[_]: Type](
271 | self: Expr[HkdFor[T, F]],
272 | expr: Expr[HkdFor[T, F] => Any],
273 | m: Expr[Mirror.Of[HkdFor[T, F]]]
274 | )(using q: Quotes): Expr[Any] =
275 | import q.reflect.{*, given}
276 | val diagnosticPosition = Position(
277 | self.asTerm.pos.sourceFile,
278 | start = expr.asTerm.pos.start - ("matchExhaustively".length + 1),
279 | end = expr.asTerm.pos.start + 1)
280 |
281 | val expectedCases = m match
282 | case '{ $m: Mirror.ProductOf[s] } => Seq(TypeRepr.of[com.tschuchort.hkd.HkdFor$package.HkdFor[T, F]])
283 | case '{
284 | type elems <: Tuple;
285 | $m: Mirror.SumOf[s] { type MirroredElemTypes = `elems` }
286 | } =>
287 | tupleToTypeReprs[elems]
288 |
289 | val caseDefs = expr.asTerm match
290 | case Inlined(_,
291 | _,
292 | TypeApply(
293 | Select(
294 | Block(
295 | List(
296 | DefDef(
297 | lambdaName,
298 | List(TermParamClause(List(ValDef(lambdaParamName, lambdaParamType, _)))),
299 | _,
300 | Some(Match(matchVar @ Ident(matchVarName), cases))
301 | )
302 | ),
303 | Closure(Ident(closureName), _)
304 | ),
305 | "$asInstanceOf$"
306 | ),
307 | _
308 | ))
309 | if closureName == lambdaName && matchVarName == lambdaParamName =>
310 | cases
311 |
312 | case _ => report.errorAndAbort("Must be a lambda with top-level match expression", expr)
313 |
314 | def computeMatchedType(caseDefPattern: Tree): Seq[TypeRepr] =
315 | try
316 | caseDefPattern match
317 | case Wildcard() => List(TypeRepr.of[Any])
318 |
319 | case Alternatives(patterns) => patterns.flatMap(computeMatchedType)
320 |
321 | case TypedOrTest(_, tpt) =>
322 | assert(tpt.symbol.isType)
323 | List(tpt.tpe)
324 |
325 | case Bind(bindName, tr) =>
326 | assert(tr.symbol.isType)
327 | List(tr.symbol.typeRef.widenByName)
328 |
329 | case Unapply(fun /*@ Select(Apply(TypeApply(_, typeArgs), _), "unapply")*/, implicits, bindPatterns) =>
330 | fun.tpe.widenTermRefByName match
331 | // A MethodType is a regular method taking term parameters, a PolyType is a method taking type parameters,
332 | // a TypeLambda is a method returning a type and not a value. Unapply's type should be a function with no
333 | // type parameters, with a single value parameter (the match scrutinee) and with an Option[?] return type
334 | // (no curried function), thus it should be a MethodType.
335 | case methodType: MethodType =>
336 | methodType.resType.asType match
337 | // Also matches Some[] and None in an easy way
338 | case '[Option[tpe]] => TypeRepr.of[tpe] match
339 | case AndType(left, right)
340 | if methodType.paramTypes.nonEmpty && left =:= methodType.param(0) => List(right)
341 |
342 | case AndType(left, right)
343 | if methodType.paramTypes.nonEmpty && right =:= methodType.param(0) => List(left)
344 |
345 | case tpe => List(tpe)
346 |
347 | case '[tpe] => List(TypeRepr.of[tpe])
348 |
349 | case tpe: TypeRepr => report.errorAndAbort(
350 | s"Expected type of Unapply function to be MethodType. Was: ${Printer.TypeReprStructure.show(tpe)}"
351 | )
352 |
353 | case pattern =>
354 | report.errorAndAbort(s"Expected pattern of CaseDef to be either Alternative, TypedOrTest, Bind or Unapply. " +
355 | s"Was: ${Printer.TreeStructure.show(pattern)}")
356 | catch
357 | // Better error message for compiler bug. As usual the compiler is leaking internal implementation classes of Type
358 | // and then failing to match on them. This bug occurs when a CaseDef has a type error.
359 | case e: MatchError if e.getMessage().contains("dotty.tools.dotc.core.Types$PreviousErrorType") =>
360 | report.errorAndAbort("Macro could not be executed due to a previous error " +
361 | "in the match expression. Fix other errors first.",
362 | diagnosticPosition)
363 |
364 | val caseDefTypes = caseDefs.flatMap { caseDef =>
365 | if caseDef.guard.isDefined then List()
366 | else computeMatchedType(caseDef.pattern)
367 | }
368 |
369 | val uncoveredCases = expectedCases.map(_.asType).filterNot { case '[expectedCase] =>
370 | caseDefTypes.map(_.asType).exists { case '[caseDefType] =>
371 | (TypeRepr.of[expectedCase] <:< TypeRepr.of[caseDefType])
372 | || Expr.summon[expectedCase <:< caseDefType].isDefined
373 | }
374 | }
375 |
376 | if uncoveredCases.nonEmpty then
377 | val casesString = uncoveredCases.map { t =>
378 | "_: " + Printer.TypeReprCode.show(typeReprOf(t))
379 | }.mkString(", ")
380 |
381 | report.warning(
382 | s"Match may not be exhaustive.\n\nIt would fail on case: $casesString",
383 | diagnosticPosition
384 | )
385 |
386 | '{ $expr($self) }
387 |
388 | def unapply[T, F[_]](h: HkdFor[T, F])(using m: Mirror.ProductOf[T]): Tuple.Map[m.MirroredElemTypes, F] =
389 | Tuple.fromProduct(h).asInstanceOf[Tuple.Map[m.MirroredElemTypes, F]]
390 |
391 | // noinspection TypeAnnotation
392 | inline def applyDynamic[T, F[_]](methodName: "apply")(inline args: Any*) =
393 | ${ applyDynamicNamedImpl[T, F]('methodName, 'args) }
394 |
395 | // noinspection TypeAnnotation
396 | inline def applyDynamicNamed[T, F[_]](methodName: "apply")(inline args: (String, Any)*) =
397 | ${ applyDynamicNamedImpl[T, F]('methodName, 'args) }
398 |
399 | private def applyDynamicNamedImpl[T: Type, F[_]: Type](
400 | methodNameExpr: Expr[String],
401 | argsExpr: Expr[Seq[Any | (String, Any)]]
402 | )(using q: Quotes): Expr[com.tschuchort.hkd.HkdFor$package.HkdFor[T, F]] =
403 | import q.reflect.*
404 | import q.reflect.given // superfluous import helps IntelliJ code completion
405 |
406 | requireDynamicMethodName("apply", methodNameExpr, methodOwnerType = TypeRepr.of[this.type])
407 |
408 | val paramNamesTypesValues: Seq[(Option[String], TypeRepr, Expr[Any])] = parseDynamicArgsExpr(argsExpr)
409 |
410 | val expectedParamNamesWithTypes = TypeRepr.of[T].caseFieldsWithTypes.map { case (name, typeRep) =>
411 | typeRep.asType match
412 | case '[fieldType] => (name, TypeRepr.of[F[fieldType]])
413 | }
414 |
415 | val normalizedParams = checkAndNormalizeParams(
416 | expectedParamNamesWithTypes.map { case (name, tpe) => (name, tpe, /* default */ None) },
417 | paramNamesTypesValues
418 | )
419 |
420 | val ctorArgs = Expr.ofSeq(normalizedParams.map { case (name, _, expr) => Expr.ofTuple((Expr(name), expr)) })
421 |
422 | val (tMirror, tClass, tName, fName) =
423 | Expr.summonAllOrAbort[(Mirror.ProductOf[T], TypeTag[T], TypeName[T], TypeName[F])]
424 |
425 | '{
426 | new HkdForImpl[T](${ ctorArgs }*)(using $tMirror, $tName, $tClass)
427 | .asInstanceOf[com.tschuchort.hkd.HkdFor$package.HkdFor[T, F]]
428 | // ^ When referencing the fully qualified name of an opaque type, the compiler does not seem to resolve it immediately to
429 | // the RHS even if the opaque type is transparent in this scope. The fully qualified type is "as seen from outside the package".
430 | // Still, the RHS is inferred at the callsite of a transparent def returning an opaque type, but at least with this trick it
431 | // will be recognized as =:= to the opaque type.
432 | }
433 |
434 | class RefinementHelper[T] private ():
435 | // ^ Note: must be a class and not trait, so that it can be instantiated and cast in quoted code without generating
436 | // an anonymous class with its own Symbol that would be remembered by the instance.
437 |
438 | /** Contains the refined type generated by the macro for parameters [[T]], [[F]] */
439 | type Out[F[_]] <: HkdFor[T, F]
440 |
441 | object RefinementHelper:
442 | /** This function serves to introduce a level of indirection because quoted code can not call the private constructor of
443 | * [[RefinementHelper]], whereas calling private functions seems to work fine.
444 | */
445 | private def indirectlyCallCtor[T]() = new RefinementHelper[T]
446 |
447 | /** This given computes the field type information at compile time through a macro and saves it in [[RefinementHelper.Out]] as
448 | * a refinement of the structural type [[HkdForImpl]] which is the runtime representation of a generated HKD. The refinement
449 | * type inside [[RefinementHelper.Out]] is then applied to the runtime type [[HkdForImpl]] by an implicit conversion at the
450 | * use-site.
451 | */
452 | transparent inline given [T]: RefinementHelper[T] = ${ givenRefinementHelperImpl[T] }
453 |
454 | private def givenRefinementHelperImpl[T: Type](using q: Quotes): Expr[RefinementHelper[T]] =
455 | import q.reflect.*
456 | import q.reflect.given // superfluous import helps IntelliJ code completion
457 | try
458 | val outTypeRepr = TypeLambda(
459 | List("F"),
460 | boundsFn = _ =>
461 | List(
462 | TypeBounds.upper(TypeLambda(
463 | List("_"), // Symbol.freshName? Unfortunately still experimental, so I'll hope for the best.
464 | boundsFn = _ => List(TypeBounds.empty),
465 | bodyFn = _ => TypeRepr.of[Any]
466 | ))),
467 | bodyFn = lambdaF =>
468 | type F[_]
469 | given Type[F] = lambdaF.param(0).asType.asInstanceOf
470 |
471 | // Add refinements for case class properties
472 | TypeRepr.of[T].caseFieldsWithTypes.foldLeft(TypeRepr.of[HkdFor[T, F]]) {
473 | case (refinementTypeRepr, (fieldName, fieldTypeRepr)) =>
474 | fieldTypeRepr.asType match
475 | case '[fieldType] => Refinement(refinementTypeRepr, fieldName, TypeRepr.of[F[fieldType]])
476 | }
477 | )
478 |
479 | type OutGen[F[_]]
480 | given Type[OutGen] = outTypeRepr.asType.asInstanceOf
481 |
482 | // For some reason quoted code can not call private ctors, but private functions are a-ok!
483 | '{
484 | RefinementHelper.indirectlyCallCtor[T]().asInstanceOf[RefinementHelper[T] { type Out[F[_]] = OutGen[F] }]
485 | }
486 |
487 | catch
488 | case ex: Throwable =>
489 | // Exceptions during the expansion of given-macros are swallowed and the given ignored unless the right
490 | // compiler flags are set, thus we should always print it, too.
491 | Console.err.println(ex)
492 | throw ex
493 |
494 | /** An implicit conversion to apply the refinement and thus make field type information available.
495 | */
496 | // noinspection ConvertExpressionToSAM
497 | given applyRefinement[T, F[_]](using
498 | refinementHelper: RefinementHelper[T]
499 | ): Conversion[HkdFor[T, F], refinementHelper.Out[F]] = new Conversion:
500 | override def apply(x: HkdFor[T, F]): refinementHelper.Out[F] = x.asInstanceOf[refinementHelper.Out[F]]
501 |
502 | /** Implicit conversion to apply the refinement to "indirect" subtypes of HkdFor[T, F], such as intersection types that are the
503 | * result of type tests. Of course HkdFor[T, F] has no real subtypes; the subtype relationships are established with <:<
504 | * instances, which is why the [[applyRefinement]] conversion can not be chosen automatically and we need this.
505 | */
506 | given applyRefinementIndirectSubtype[T, F[_], H](using c: H <:< HkdFor[T, F])(using
507 | refinementHelper: RefinementHelper[T]
508 | ): Conversion[H, refinementHelper.Out[F]] = new Conversion:
509 | override def apply(x: H): refinementHelper.Out[F] = c(x).asInstanceOf[refinementHelper.Out[F]]
510 |
511 | /** For some reason the compiler will complain that `makeFullyAppliedMirror.MirroredElemTypes` is not a constant type if we do
512 | * not use this indirection to move the Tuple.Map[m.MirroredElemTypes, F] out of the given, despite the fact that the
513 | * MirroredElemTypes _are_ known. I don't know why.
514 | */
515 | trait FullyAppliedProductMirrorHelper[T, F[_]]:
516 | type MirroredElemTypes <: Tuple
517 | type FieldCount <: Int
518 | val fieldCount: FieldCount
519 |
520 | transparent inline given [T, F[_]](using m: Mirror.ProductOf[T]): FullyAppliedProductMirrorHelper[T, F] =
521 | new FullyAppliedProductMirrorHelper[T, F]:
522 | override type MirroredElemTypes = Tuple.Map[m.MirroredElemTypes, F]
523 | override type FieldCount = Tuple.Size[m.MirroredElemTypes]
524 | override val fieldCount: FieldCount = constValue[FieldCount]
525 |
526 | inline given makeFullyAppliedProductMirror[T, F[_]](using
527 | m: Mirror.ProductOf[T],
528 | h: FullyAppliedProductMirrorHelper[T, F],
529 | tClass: ClassTag[T]
530 | ): (K0.ProductGeneric[HkdFor[T, F]] & Mirror.ProductOf[HkdFor[T, F]] {
531 | type Kind = K0.type
532 | type MirroredElemTypes = h.MirroredElemTypes
533 | type MirroredElemLabels = m.MirroredElemLabels
534 | type MirroredLabel = "HkdFor"
535 | type MirroredType = HkdFor[T, F]
536 | type MirroredMonoType = HkdFor[T, F]
537 | }) = new Mirror.Product {
538 | type Kind = K0.type
539 | type MirroredType = HkdFor[T, F]
540 | type MirroredMonoType = HkdFor[T, F]
541 | type MirroredElemTypes = h.MirroredElemTypes
542 | type MirroredElemLabels = m.MirroredElemLabels
543 | type MirroredLabel = "HkdFor"
544 |
545 | override def fromProduct(p: Product): HkdFor[T, F] =
546 | new HkdForImpl[T](
547 | Seq.range(0, h.fieldCount).map { i => (p.productElementName(0), p.productElement(i)) }*
548 | )
549 | }
550 |
551 | inline given makePartiallyAppliedProductMirror[T](using
552 | m: Mirror.ProductOf[T],
553 | tClass: ClassTag[T]
554 | ): (K11.ProductGeneric[HkdFor_[T]] & Mirror.Product {
555 | // All of the expected types, particularly MirroredElemTypes and MirroredElemLabels, need to be in the
556 | // given's public type declaration, otherwise any library (shapeless3) will not be able to match the Tuples because
557 | // the type will not be a compile-time static type. `transparent inline` can not fix this problem, because the derivation
558 | // methods that use this Mirror are inline themselves and thus will not benefit from the type narrowing of transparent defs.
559 | type Kind = K11.type;
560 | type MirroredType[F[_]] = HkdFor_[T][F];
561 | type MirroredMonoType = HkdFor_[T][[_] =>> Any];
562 | type MirroredElemTypes[F[_]] = Tuple.Map[m.MirroredElemTypes, F]
563 | type MirroredLabel = "HkdFor"
564 | type MirroredElemLabels = m.MirroredElemLabels
565 | }) = new Mirror.Product {
566 | type Kind = K11.type;
567 | type MirroredType[F[_]] = HkdFor_[T][F];
568 | type MirroredMonoType = HkdFor_[T][[_] =>> Any];
569 | type MirroredElemTypes[F[_]] = Tuple.Map[m.MirroredElemTypes, F]
570 | type MirroredLabel = "HkdFor"
571 | type MirroredElemLabels = m.MirroredElemLabels
572 | override def fromProduct(p: Product): HkdFor_[T][[_] =>> Any] =
573 | new HkdForImpl[T](
574 | Seq.range(0, constValue[Tuple.Size[m.MirroredElemLabels]])
575 | .map { i => (p.productElementName(0), p.productElement(i)) }*
576 | )
577 | }
578 |
579 | trait FullyAppliedCoproductMirrorHelper[T, F[_]]:
580 | type MirroredElemTypes <: Tuple
581 | type CasesCount <: Int
582 | val casesCount: CasesCount
583 |
584 | transparent inline given [T, F[_]](using m: Mirror.SumOf[T]): FullyAppliedCoproductMirrorHelper[T, F] =
585 | new FullyAppliedCoproductMirrorHelper[T, F]:
586 | override type MirroredElemTypes = Tuple.Map[m.MirroredElemTypes, [A] =>> com.tschuchort.hkd.HkdFor$package.HkdFor[A, F]]
587 | // ^ must use fully qualified name here to prevent dealiasing because we're in a transparent method
588 |
589 | override type CasesCount = Tuple.Size[m.MirroredElemTypes]
590 | override val casesCount: CasesCount = constValue[CasesCount]
591 |
592 | inline given makeFullyAppliedCoproductMirror[T, F[_]](using
593 | m: Mirror.SumOf[T],
594 | h: FullyAppliedCoproductMirrorHelper[T, F],
595 | casesClassTags: K0.CoproductInstances[TypeTag, T]
596 | ): (K0.CoproductGeneric[HkdFor[T, F]] & Mirror.SumOf[HkdFor[T, F]] {
597 | type Kind = K0.type
598 | type MirroredElemTypes = h.MirroredElemTypes
599 | type MirroredElemLabels = m.MirroredElemLabels
600 | type MirroredLabel = "HkdFor"
601 | type MirroredType = HkdFor[T, F]
602 | type MirroredMonoType = HkdFor[T, F]
603 | }) = new Mirror.Sum {
604 | type Kind = K0.type
605 | type MirroredType = HkdFor[T, F]
606 | type MirroredMonoType = HkdFor[T, F]
607 | type MirroredElemTypes = h.MirroredElemTypes
608 | type MirroredElemLabels = m.MirroredElemLabels
609 | type MirroredLabel = "HkdFor"
610 |
611 | override def ordinal(x: MirroredMonoType): Int =
612 | boundary {
613 | Seq.range(0, h.casesCount)
614 | .foreach { i =>
615 | val caseClassTag: TypeTag[?] = casesClassTags.inject(i)([t] => (classTag: TypeTag[t]) => classTag)
616 | if x.tClass <:< caseClassTag then
617 | boundary.break(i)
618 | }
619 |
620 | throw AssertionError(s"Could not match runtime type of value '${x.toString}'. " +
621 | s"The case types that I considered were ${showType[m.MirroredElemTypes]}")
622 | }
623 | }
624 |
625 | inline given makePartiallyAppliedCoproductMirror[T](using
626 | m: Mirror.SumOf[T],
627 | casesClassTags: K0.CoproductInstances[TypeTag, T]
628 | ): (K11.CoproductGeneric[HkdFor_[T]] & Mirror.Sum {
629 | // All of the expected types, particularly MirroredElemTypes and MirroredElemLabels, need to be in the
630 | // given's public type declaration, otherwise any library (shapeless3) will not be able to match the Tuples because
631 | // the type will not be a compile-time static type. `transparent inline` can not fix this problem, because the derivation
632 | // methods that use this Mirror are inline themselves and thus will not benefit from the type narrowing of transparent defs.
633 | type Kind = K11.type;
634 | type MirroredType[F[_]] = HkdFor_[T][F];
635 | type MirroredMonoType = HkdFor_[T][[_] =>> Any];
636 | type MirroredElemTypes[F[_]] = Tuple.Map[m.MirroredElemTypes, [A] =>> com.tschuchort.hkd.HkdFor$package.HkdFor[A, F]]
637 | type MirroredLabel = "HkdFor"
638 | type MirroredElemLabels = m.MirroredElemLabels
639 | }) = new Mirror.Sum {
640 | type Kind = K11.type;
641 | type MirroredType[F[_]] = HkdFor_[T][F];
642 | type MirroredMonoType = HkdFor_[T][[_] =>> Any];
643 | type MirroredElemTypes[F[_]] = Tuple.Map[m.MirroredElemTypes, [A] =>> com.tschuchort.hkd.HkdFor$package.HkdFor[A, F]]
644 | type MirroredLabel = "HkdFor"
645 | type MirroredElemLabels = m.MirroredElemLabels
646 | override def ordinal(x: MirroredMonoType): Int =
647 | boundary {
648 | Seq.range(0, constValue[Tuple.Size[m.MirroredElemLabels]])
649 | .foreach { i =>
650 | val caseClassTag: TypeTag[?] = casesClassTags.inject(i)([t] => (classTag: TypeTag[t]) => classTag)
651 | if x.tClass <:< caseClassTag then
652 | boundary.break(i)
653 | }
654 |
655 | throw AssertionError(s"Could not match runtime type of value '${x.toString}'. " +
656 | s"The case types that I considered were ${showType[m.MirroredElemTypes]}")
657 | }
658 | }
659 |
660 | /** TypeTag used to make runtime type checks and hide the implementation for future compatibility. (Can not be an opaque alias
661 | * because we need wildcards)
662 | */
663 | class TypeTag[T] private (private val impl: izumi.reflect.Tag[T]) extends AnyVal:
664 | infix def <:<(tt: TypeTag[?]): Boolean = impl <:< tt.impl
665 |
666 | object TypeTag:
667 | /** Indirection method to work around restriction: inline method can not directly call private ctor */
668 | private def indirectlyCallCtor[T](impl: izumi.reflect.Tag[T]) = TypeTag(impl)
669 | inline given [T]: com.tschuchort.hkd.HkdFor.TypeTag[T] = indirectlyCallCtor(izumi.reflect.Tag.apply[T])
670 |
671 | /** The runtime representation of a generated HKD. Compile-time information about the fields that this structural type contains at
672 | * runtime is not available by itself. Instead, it is added later through an implicit conversion to a refinement type of
673 | * [[HkdForImpl]] with the help of a transparent inline given, which can compute the refinement type at compile-time via a macro.
674 | */
675 | transparent private class HkdForImpl[+T](elems: (String, Any)*)(using
676 | m: Mirror.ProductOf[T],
677 | tName: TypeName[T],
678 | val tClass: HkdFor.TypeTag[? <: T]
679 | ) extends Selectable, Product, Dynamic:
680 | type Underlying <: T
681 | private type UnderlyingFields = m.MirroredElemTypes
682 | private type UnderlyingLabels = m.MirroredElemLabels
683 |
684 | private val fields = Array.from(elems.map(_._2))
685 |
686 | override def canEqual(that: Any): Boolean = that match
687 | case that: HkdForImpl[?] => (that.tClass == this.tClass)
688 | case _ => false
689 |
690 | override def equals(that: Any): Boolean = that match
691 | case that: HkdForImpl[?] => (that.tClass == this.tClass) && (that.fields sameElements this.fields)
692 | case _ => false
693 |
694 | override def productArity: Int = fields.length
695 | override def productElement(n: Int): Any = fields(n)
696 |
697 | override def toString = s"HkdFor[${tName.value}, ?](${fields.mkString(", ")})"
698 |
699 | object HkdForImpl:
700 | extension [T](self: HkdForImpl[T])
701 | // This has to be an extension and can not be an instance method due to compiler bug https://github.com/scala/scala3/issues/15413
702 | inline def selectDynamic(name: String)(using m: Mirror.ProductOf[T]): Any = ${ selectDynamicImpl[T]('self, 'name, 'm) }
703 |
704 | private def selectDynamicImpl[T: Type](using
705 | q: Quotes)(self: Expr[HkdForImpl[T]], name: Expr[String], m: Expr[Mirror.ProductOf[T]]): Expr[Any] =
706 | import q.reflect.{*, given}
707 |
708 | val fieldNames = m match
709 | case '{
710 | type elems <: Tuple;
711 | $m: Mirror.ProductOf[s] { type MirroredElemLabels = `elems` }
712 | } =>
713 | tupleToTypeReprs[elems].map {
714 | case ConstantType(StringConstant(fieldName)) => fieldName
715 | case t => throw AssertionError(s"Expected MirroredElemLabel of ${showType[T]} to be a String constant. " +
716 | s"Was: ${Printer.TypeReprStructure.show(t)}")
717 | }
718 |
719 | fieldNames.indexOf(name.valueOrAbort) match
720 | case -1 => report.errorAndAbort(s"No such field: $name", name)
721 | case i => '{ $self.fields(${ Expr(i) }) }
722 |
--------------------------------------------------------------------------------