├── schema.txt ├── project ├── build.properties └── plugins.sbt ├── README.md ├── .gitignore └── src └── main └── scala ├── playground.scala ├── etc ├── optimization.scala ├── inspections.scala └── polynomial.scala ├── welcome.scala ├── answers ├── optimizer.scala ├── derivation.scala ├── records.scala ├── units.scala └── staging.scala ├── 07-plugins.scala ├── 05-annotations.scala ├── 06-derivations.scala ├── 04-staging.scala ├── 01-inlining.scala ├── 03-reflection.scala └── 02-quotes.scala /schema.txt: -------------------------------------------------------------------------------- 1 | name: String 2 | age: Int -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.8.2 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jdegoes/scala3-macros/HEAD/README.md -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.spire-math" % "sbt-javap" % "0.0.1") 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | project/target 3 | .bsp 4 | .bloop 5 | .metals 6 | .vscode 7 | project/project 8 | metals.sbt -------------------------------------------------------------------------------- /src/main/scala/playground.scala: -------------------------------------------------------------------------------- 1 | package playground 2 | /** 3 | * This file is a place where we can experiment with macros defined elsewhere. This is necessary 4 | * because we can't invoke a macro in the file it is defined in. 5 | * 6 | * To try a method from `sbt`, call: 7 | * sbt> runMain playground.run 8 | */ 9 | 10 | @main 11 | def run(): Unit = 12 | println("Hello world") 13 | 14 | object experimental: 15 | val _ = () -------------------------------------------------------------------------------- /src/main/scala/etc/optimization.scala: -------------------------------------------------------------------------------- 1 | package optimizingMatch 2 | 3 | import reflectionAndTrees.* 4 | 5 | @main 6 | def optimize(): Unit = 7 | val partialFn: String => Int = exercise4.optimize: 8 | case "alpha" => 1 9 | case "beta" => 2 10 | case "gamma" => 3 11 | case "delta" => 4 12 | case "epsilon" => 5 13 | case "zeta" => 6 14 | case "eta" => 7 15 | case "theta" => 8 16 | 17 | println(partialFn("zeta")) 18 | -------------------------------------------------------------------------------- /src/main/scala/welcome.scala: -------------------------------------------------------------------------------- 1 | object welcome 2 | // Please join and say hello: 3 | // 4 | // CHAT ROOM: https://discord.gg/bc62QxMWqd 5 | // 6 | // Please git clone and build: 7 | // 8 | // REPOSITORY: https://github.com/jdegoes/scala3-macros 9 | // 10 | // Daily Schedule: 11 | // 12 | // START : 10:00 AM Eastern Time 13 | // BREAK : 11:45 AM Eastern Time 14 | // RESUME : 12:15 Noon Eastern Time 15 | // END : 2:00 PM Eastern Time 16 | // 17 | // NOTE: Different schedule on Tuesday TBA! -------------------------------------------------------------------------------- /src/main/scala/etc/inspections.scala: -------------------------------------------------------------------------------- 1 | package treeInspections 2 | 3 | import reflectionAndTrees.* 4 | 5 | @main 6 | def test(): Unit = 7 | import exercise3.* 8 | var x = 10 9 | List( 10 | // inspect("hello"), 11 | // inspect(x), 12 | // inspect(42), 13 | // inspect("?"*3), 14 | // inspect(x*x), 15 | // inspect(()), 16 | // inspect(12: Double), 17 | // inspect("hello world".substring(6)), 18 | // inspect(List(1, 2, 3)), 19 | // inspect(List[Int](1, 2, 3)), 20 | // inspect(List[AnyVal](1, 2, 3)), 21 | // inspect { (x: Int) => x + 1 }, 22 | // inspect { (x => x + 1): (Int => Int) }, 23 | // inspect(Right[1, 2](2)), 24 | // inspect(for i <- 1 to 10 do println(i)), 25 | // inspect((1 to 10).foreach(println)), 26 | // inspect(x match { case 10 => true; case _ => false }), 27 | // inspect { 28 | // object Y { final val x: Int = 42 } 29 | // Y.x 30 | // } 31 | ).foreach(println(_)) 32 | -------------------------------------------------------------------------------- /src/main/scala/answers/optimizer.scala: -------------------------------------------------------------------------------- 1 | package reflectionAndTrees 2 | 3 | import scala.quoted.* 4 | 5 | object Optimizer: 6 | def optimizeMacro[T: Type](fn: Expr[PartialFunction[Any, T]])(using Quotes): Expr[Function[String, T]] = 7 | import quotes.*, reflect.* 8 | val (n, newPartialFunction) = fn.asTerm match 9 | case Inlined(_, _, Block(List(defDef), term)) => defDef match 10 | case DefDef(a, b, c, Some(Match(matchId, caseDefs))) => 11 | val cases: Map[String, Term] = 12 | caseDefs.map: 13 | case CaseDef(Literal(StringConstant(str)), None, block) => str -> block 14 | .to(Map) 15 | 16 | val n: Int = 17 | LazyList.from(cases.size).find: i => 18 | cases.map { c => math.abs(c(0).hashCode)%i }.to(Set).size == cases.size 19 | .get 20 | 21 | val newCaseDefs = cases.to(List).map: (str, block) => 22 | CaseDef(Literal(IntConstant(math.abs(str.hashCode)%n)), None, block) 23 | 24 | val newDefDef = DefDef.copy(defDef)(a, b, c, Some(Match(matchId, newCaseDefs))) 25 | n -> Block(List(newDefDef), term) 26 | 27 | '{${newPartialFunction.asExprOf[PartialFunction[Any, T]]}.compose { s => math.abs(s.hashCode)%${Expr(n)} } } -------------------------------------------------------------------------------- /src/main/scala/answers/derivation.scala: -------------------------------------------------------------------------------- 1 | package genericDerivation 2 | 3 | import scala.deriving.* 4 | import compiletime.* 5 | 6 | trait Show[-T]: 7 | def show(value: T): String 8 | 9 | extension [T: Show](value: T) def show: String = summon[Show[T]].show(value) 10 | 11 | object Show: 12 | given Show[Int] = _.toString 13 | given Show[String] = identity(_) 14 | 15 | private transparent inline def deriveProduct[Labels <: Tuple](tuple: Tuple): List[String] = 16 | inline tuple match 17 | case EmptyTuple => Nil 18 | case cons: (_ *: _) => cons match 19 | case (head *: tail) => inline erasedValue[Labels] match 20 | case _: (headLabel *: tailLabels) => inline valueOf[headLabel].asMatchable match 21 | case label: String => 22 | val value = summonInline[Show[head.type]].show(head) 23 | s"$label=$value" :: deriveProduct[tailLabels](tail) 24 | 25 | private transparent inline def deriveSum[Elements <: Tuple, Labels <: Tuple](ordinal: Int, value: Matchable): String = 26 | inline erasedValue[Elements] match 27 | case _: (headType *: tailType) => inline erasedValue[Labels] match 28 | case _: (headLabel *: tailLabels) => inline valueOf[headLabel].asMatchable match 29 | case label: String => 30 | if ordinal == 0 then 31 | value match 32 | case value: `headType` => summonInline[Show[headType]].show(value) 33 | else deriveSum[tailType, tailLabels](ordinal - 1, value) 34 | 35 | inline given derived[P](using mirror: Mirror.Of[P]): Show[P] = inline mirror match 36 | case given Mirror.ProductOf[P & Product] => 37 | (value: P) => value.asMatchable match 38 | case value: Product => 39 | val elements = deriveProduct[mirror.MirroredElemLabels](Tuple.fromProductTyped(value)) 40 | val typeName = valueOf[mirror.MirroredLabel] 41 | typeName+elements.mkString("(", ", ", ")") 42 | 43 | case given Mirror.SumOf[P] => 44 | (value: P) => 45 | ??? 46 | // val typeName = valueOf[mirror.MirroredLabel] 47 | // deriveSum[mirror.MirroredElemTypes, mirror.MirroredElemLabels](summon[Mirror.SumOf[P]].ordinal(value), value) -------------------------------------------------------------------------------- /src/main/scala/answers/records.scala: -------------------------------------------------------------------------------- 1 | package recordTypes 2 | 3 | import scala.quoted.* 4 | 5 | case class Rec(map: Map[String, Any]) extends Selectable: 6 | def selectDynamic(name: String): Any = map(name) 7 | 8 | type Person = Rec { 9 | def name: String 10 | def age: Int 11 | } 12 | 13 | val personRec = Rec(Map("name" -> "Jack Smith", "age" -> 71)).asInstanceOf[Person] 14 | 15 | class Schema[RecType <: Rec](schema: Map[String, String]): 16 | def make(values: Map[String, Any]): Option[RecType] = 17 | if schema.keys.forall(values.contains) then Some(Rec(values).asInstanceOf[RecType]) else None 18 | 19 | class SchemaType[+Name <: String & Singleton, Type]() 20 | object SchemaType: 21 | given SchemaType["String", String]() 22 | given SchemaType["Int", Int]() 23 | 24 | object Schema: 25 | def readSchema(schema: List[String], map: Map[String, String] = Map()): Map[String, String] = 26 | schema match 27 | case s"$key: $typ" :: tail => readSchema(tail, map.updated(key, typ)) 28 | case _ :: tail => readSchema(tail, map) 29 | case Nil => map 30 | 31 | transparent inline def parse(filename: String): Schema[?] = ${parseMacro('filename)} 32 | 33 | def parseMacro(filename: Expr[String])(using Quotes): Expr[Schema[?]] = 34 | import quotes.reflect.* 35 | val map = readSchema(scala.io.Source.fromFile(filename.valueOrAbort).getLines.to(List)) 36 | 37 | def mkType(keyTypes: List[(String, String)], tpe: TypeRepr): TypeRepr = 38 | keyTypes match 39 | case Nil => tpe 40 | case (name, typeName) :: tail => 41 | val typeNameRepr = ConstantType(StringConstant(typeName)).asType match 42 | case '[str] => 43 | Expr.summon[SchemaType[str & String & scala.Singleton, ?]] match 44 | case Some('{ ${_}: schemaType }) => Type.of[schemaType] match 45 | case '[ SchemaType[?, fieldType] ] => TypeRepr.of[fieldType] 46 | case other => report.errorAndAbort(s"Unrecognized field type: ${other}") 47 | case other => report.errorAndAbort(s"Unrecognized field type: ${other}") 48 | 49 | mkType(tail, Refinement(tpe, name, typeNameRepr)) 50 | 51 | mkType(map.to(List), TypeRepr.of[Rec]).asType match 52 | case '[rec] => '{Schema[rec & Rec](${Expr(map)})} -------------------------------------------------------------------------------- /src/main/scala/answers/units.scala: -------------------------------------------------------------------------------- 1 | package units 2 | 3 | import scala.quoted.* 4 | 5 | trait UnitType 6 | trait Metre[N <: Int] extends UnitType 7 | trait Kilogram[N <: Int] extends UnitType 8 | trait Second[N <: Int] extends UnitType 9 | 10 | extension (d: Double) 11 | def *[U <: UnitType](quantity: Quantity[U]) = Quantity[U](quantity.value*d) 12 | def /[U <: UnitType](quantity: Quantity[U]) = Quantity[U](quantity.value/d) 13 | 14 | case class Quantity[U <: UnitType](value: Double): 15 | def +(right: Quantity[U]): Quantity[U] = Quantity(value + right.value) 16 | def -(right: Quantity[U]): Quantity[U] = Quantity(value - right.value) 17 | 18 | transparent inline def *[V <: UnitType](right: Quantity[V]): Any = 19 | ${Quantity.multiply[U, V]('this, 'right, false)} 20 | 21 | transparent inline def /[V <: UnitType](right: Quantity[V]): Any = 22 | ${Quantity.multiply[U, V]('this, 'right, true)} 23 | 24 | object Quantity: 25 | def multiply[U <: UnitType: Type, V <: UnitType: Type](left: Expr[Quantity[U]], right: Expr[Quantity[V]], divide: Boolean)(using Quotes): Expr[Any] = 26 | import quotes.reflect.* 27 | 28 | def typeToMap(tpe: TypeRepr, parts: Map[TypeRepr, Int] = Map()): Map[TypeRepr, Int] = tpe match 29 | case AppliedType(repr, List(ConstantType(IntConstant(n)))) => 30 | Map(repr -> n) 31 | case AndType(left, right) => 32 | typeToMap(left) ++ typeToMap(right) 33 | 34 | def add(left: Map[TypeRepr, Int], right: Map[TypeRepr, Int]): Map[TypeRepr, Int] = 35 | right.headOption match 36 | case None => 37 | left 38 | case Some((repr, n)) => 39 | add(left.updated(repr, left.getOrElse(repr, 0) + (if divide then -n else n)), right.tail) 40 | 41 | def mkType(parts: List[(TypeRepr, Int)], acc: TypeRepr = TypeRepr.of[UnitType]): TypeRepr = 42 | parts match 43 | case Nil => 44 | acc 45 | case (repr, n) :: tail => 46 | mkType(tail, AndType(acc, AppliedType(repr, List(ConstantType(IntConstant(n)))))) 47 | 48 | val resultUnits = add(typeToMap(TypeRepr.of[U]), typeToMap(TypeRepr.of[V])).filter(_(1) != 0) 49 | val newValue = if divide then '{$left.value/$right.value} else '{$left.value*$right.value} 50 | 51 | if resultUnits.isEmpty then newValue else 52 | mkType(resultUnits.to(List)).asType match 53 | case '[ q ] => '{Quantity[q & UnitType]($newValue)} -------------------------------------------------------------------------------- /src/main/scala/answers/staging.scala: -------------------------------------------------------------------------------- 1 | package answers.multistageProgramming 2 | 3 | import polynomials.* 4 | import scala.quoted.* 5 | 6 | given staging.Compiler = staging.Compiler.make(getClass.getClassLoader) 7 | 8 | enum Op: 9 | case Raise 10 | case Acc(k: Double) 11 | 12 | extension (poly: Polynomial) 13 | def ops: List[Op] = 14 | import Op.* 15 | 16 | def recur(n: Int, terms: List[(Int, Double)], ops: List[Op]): List[Op] = terms match 17 | case Nil => 18 | ops.reverse 19 | case (c, k) :: tail => 20 | if n == c then recur(n + 1, tail, Raise :: Acc(k) :: ops) 21 | else recur(n + 1, terms, Raise :: ops) 22 | 23 | recur(0, poly.terms.to(List).sortBy(_(0)), Nil) 24 | 25 | def runtimeCalc: (Double => Double) = staging.run: 26 | '{ 27 | (x: Double) => 28 | var a: Double = 0.0 29 | var p: Double = 1.0 30 | ${ 31 | poly.ops.foldLeft('{()}): 32 | case (insts, Op.Raise) => '{ $insts; p = p * x } 33 | case (insts, Op.Acc(k)) => '{ $insts; a = a + ${Expr(k)}*p } 34 | } 35 | a 36 | } 37 | 38 | def *(double: Double): Polynomial = new Polynomial(poly.terms.mapValues(_*double).to(Map)) 39 | 40 | def integrate(from: Double, to: Double, steps: Int): Double = 41 | val epsilon = (to - from)/steps 42 | val calc: (Double => Double) = (poly*epsilon).runtimeCalc 43 | //val calc: (Double => Double) = (poly*epsilon).apply(_) // Slow version, for comparison 44 | var sum: Double = 0.0 45 | var i = 0 46 | 47 | while i < steps do 48 | sum += calc(epsilon*i + from) 49 | i += 1 50 | 51 | sum 52 | 53 | object Macro 54 | // inline def calc(inline ops: List[Op]): Double => Double = ${calcMacro('ops)} 55 | 56 | // given FromExpr[Op] with 57 | // def unapply(expr: Expr[Op])(using Quotes): Option[Op] = expr match 58 | // case '{ Op.Raise } => Some(Op.Raise) 59 | // case '{ Op.Acc($k) } => k.value.map(Op.Acc(_)) 60 | // case _ => None 61 | 62 | // def calcMacro(ops: Expr[List[Op]])(using Quotes): Expr[Double => Double] = 63 | // '{ 64 | // (x: Double) => 65 | // var a: Double = 0.0 66 | // var p: Double = 1.0 67 | // ${ 68 | // ops.valueOrAbort.foldLeft('{()}): 69 | // case (insts, Op.Raise) => '{ $insts; p = p * x } 70 | // case (insts, Op.Acc(k)) => '{ $insts; a = a + ${Expr(k)}*p } 71 | // } 72 | // a 73 | // } 74 | -------------------------------------------------------------------------------- /src/main/scala/etc/polynomial.scala: -------------------------------------------------------------------------------- 1 | package polynomials 2 | 3 | import scala.quoted.* 4 | 5 | case class ParseError() extends Exception 6 | 7 | case class Polynomial(terms: Map[Int, Double]): 8 | override def toString(): String = 9 | def sup(n: Int): String = if n == 1 || n == 0 then "" else n.toString.map: 10 | case '-' => '⁻' 11 | case d => (d - '0' + '⁰').toChar 12 | 13 | terms.to(List).sortBy(-_(0)).zipWithIndex.map: 14 | case ((n, k), idx) => 15 | val p = if idx == 0 then (if k < 0 then "-" else "") else if k < 0 then " - " else " + " 16 | val k2 = math.abs(k) 17 | val c = if k2 == 1 && n > 0 then "" else if k == k.toInt then k2.toInt.toString else k2.toString 18 | val x = if n == 0 then "" else "x" 19 | val i = if k == 0 || (k == 1 && n != 0) then "" else sup(n) 20 | p+c+x+i 21 | .mkString 22 | 23 | def apply(value: Double): Double = terms.foldLeft(0.0): 24 | case (acc, (power, coefficient)) => acc + coefficient*math.pow(value, power) 25 | 26 | object Polynomial: 27 | def apply(poly: String): Polynomial = 28 | sealed trait Term 29 | 30 | case class Integer(minus: Boolean, number: Int) extends Term: 31 | def value: Int = if minus then -number else number 32 | 33 | case class Fraction(numerator: Int, denominator: Int) extends Term: 34 | def value: Double = numerator.toDouble/denominator 35 | 36 | case class Coefficient(coefficient: Double) extends Term 37 | 38 | case class Power(coefficient: Double, minus: Boolean, power: Int) extends Term: 39 | def value: Int = if minus then -power else power 40 | 41 | object Digit: 42 | def unapply(ch: Char): Option[Int] = if ch.isDigit then Some(ch - '0') else None 43 | 44 | @annotation.tailrec 45 | def recur(chars: List[Char], term: Term, terms: Map[Int, Double]): Map[Int, Double] = 46 | 47 | def put(coefficient: Double, power: Int): Map[Int, Double] = 48 | terms.updated(power, terms.getOrElse(power, 0.0) + coefficient) 49 | 50 | term match 51 | case integer@Integer(minus, number) => chars match 52 | case Nil => if number == 0 then throw ParseError() else put(integer.value, 0) 53 | case '/' :: tail => recur(tail, Fraction(integer.value, 0), terms) 54 | case 'x' :: tail => recur(tail, Coefficient(if integer.value == 0 then (if minus then -1.0 else 1.0) else integer.value), terms) 55 | case '-' :: tail => recur(tail, Integer(true, 0), if number == 0 then terms else put(integer.value, 0)) 56 | case '+' :: tail => recur(tail, Integer(false, 0), if number == 0 then terms else put(integer.value, 0)) 57 | case Digit(d) :: tail => recur(tail, Integer(minus, number*10 + d), terms) 58 | case _ => throw ParseError() 59 | 60 | case fraction@Fraction(numerator, denominator) => chars match 61 | case Nil => if denominator == 0 then throw ParseError() else put(fraction.value, 0) 62 | case Digit(d) :: tail => recur(tail, Fraction(numerator, denominator*10 + d), terms) 63 | case 'x' :: tail => recur(tail, Coefficient(fraction.value), terms) 64 | case '+' :: tail => recur(tail, Integer(false, 0), put(fraction.value, 0)) 65 | case '-' :: tail => recur(tail, Integer(true, 0), put(fraction.value, 0)) 66 | case _ => throw ParseError() 67 | 68 | case coefficient@Coefficient(value) => chars match 69 | case Nil => put(value, 1) 70 | case '^' :: tail => recur(tail, Power(value, false, 0), terms) 71 | case '-' :: tail => recur(tail, Integer(true, 0), put(value, 1)) 72 | case '+' :: tail => recur(tail, Integer(false, 0), put(value, 1)) 73 | case _ => throw ParseError() 74 | 75 | case power@Power(coefficient, minus, value) => chars match 76 | case Nil => put(coefficient, power.value) 77 | case Digit(d) :: tail => recur(tail, Power(coefficient, minus, value*10 + d), terms) 78 | case '-' :: tail => if value == 0 then recur(tail, Power(coefficient, true, 0), terms) 79 | else recur(tail, Integer(true, 0), put(coefficient, power.value)) 80 | case '+' :: tail => if value == 0 then throw ParseError() 81 | else recur(tail, Integer(false, 0), put(coefficient, power.value)) 82 | case _ => throw ParseError() 83 | 84 | Polynomial(recur(poly.toList.filter(_ != ' '), Integer(false, 0), Map())) 85 | -------------------------------------------------------------------------------- /src/main/scala/07-plugins.scala: -------------------------------------------------------------------------------- 1 | package compilerPlugins 2 | 3 | import dotty.tools.* 4 | import dotc.* 5 | import util.* 6 | import reporting.* 7 | import ast.Trees.* 8 | import ast.tpd.Tree 9 | import core.* 10 | import Contexts.* 11 | import Decorators.* 12 | import StdNames.* 13 | import plugins.* 14 | /** 15 | * COMPILER PLUGINS 16 | * 17 | * Compiler plugins allow us to insert phases into the compiler to perform additional steps during 18 | * a compilation. These phases can "see" the state of the code as it is transformed from source to 19 | * bytecode via ASTs, and can even make modifications to it. 20 | */ 21 | 22 | /** 23 | * EXERCISE 1 24 | * 25 | * Let's find out about the compilation process. Run the `scalac` binary with the `-Vphases` 26 | * option. You should see a list of phases. The latest version of the compiler has 102 phases, 27 | * though some of them run concurrently. 28 | * 29 | * The first phase is the `parser`, which creates an untyped syntax tree from sources. The second 30 | * phase is `typer`, which constructs a typed AST and symbol table from the untyped AST. This is 31 | * by far the biggest (and slowest) phase. The last phase is bytecode generation (`genBCode`), which 32 | * creates .class files. Almost everything between `typer` and `genBCode` performs transformations 33 | * on typed ASTs, migrating them step-by-step from a structure that's close to source-code, to a 34 | * structure that's closer to bytecode instructions. 35 | * 36 | * The best documentation for compiler is the Dotty source code, at `github:lampepfl/dotty`. Clone 37 | * the repository and try searching for one of the phase names, in double-quotes: each one exists as 38 | * a string literal in the place it is defined. 39 | */ 40 | 41 | /** 42 | * EXERCISE 2 43 | * 44 | * A compiler plugin can introduce one phase or many to the compilation pipeline. Each phase can 45 | * define where it should be executed, relative to other phases. 46 | * 47 | * To create a new plugin, we need to define a plugin class and at least one phase. 48 | * 49 | * 1. Add "org.scala-lang" %% "scala3-compiler" % "3.3.0-RC3" to your SBT build. 50 | * 2. Create a plugin phase class, `MyPhase`, which extends 51 | * `dotty.tools.dotc.plugins.PluginPhase` 52 | * 3. Implement the `phaseName` -- pick a name! 53 | * 4. Create a plugin class, `MyPlugin`, which extends `dotty.tools.dotc.plugins.StandardPlugin`. 54 | * 5. Implement the missing methods. `init` should be a single-element `List` containing a new 55 | * instance of the phase. 56 | * 6. Add a `println` with a welcome message to the `init` implementation 57 | * 7. Create a new file called `plugin.properties` in the project root, containing just one line: 58 | * ``` 59 | * pluginClass=plugins.MyPlugin 60 | * ``` 61 | * 8. Package the compiler plugin as a JAR file with the `jar` command in the project root: 62 | * ``` 63 | * mkdir -p lib 64 | * jar cf lib/plugin.jar plugin.properties -C target/scala-3.3.0-RC3/classes plugins 65 | * ``` 66 | * 9. Finally, launch the Scala REPL, setting the plugins directory to `lib`: 67 | * ``` 68 | * scala -Xpluginsdir lib 69 | * ``` 70 | * Make sure you use the same version of Scala as you used to compile the plugin! 71 | * 72 | * Now, every line you compile in the REPL should print the welcome message. We have now created 73 | * a very basic compiler plugin which we can modify, recompile, repackage, and test with the REPL. 74 | * 75 | * Try making a change to the welcome message a couple of times to test this iterative process. 76 | */ 77 | 78 | //class MyPlugin() extends dotty.tools.dotc.plugins.StandardPlugin 79 | //class MyPhase() extends dotty.tools.dotc.plugins.PluginPhase 80 | 81 | /** 82 | * EXERCISE 3 83 | * 84 | * We are now going to explore what is available to a compiler plugin. 85 | * 86 | * Modify `MyPhase` to override the `runsBefore` and `runsAfter` members: 87 | * ``` 88 | * override val runsBefore = Set("typer") 89 | * override val runsAfter = Set("parser") 90 | * ``` 91 | * This will ensure our plugin is run as early as possible -- before the typer. 92 | * 93 | * And also, 94 | * ``` 95 | * override def transformUnit(tree: Tree)(using Context): Tree = tree 96 | * ``` 97 | * 98 | * Within the body of `transformUnit` we can explore, but we need to use tab-completion to navigate 99 | * the API. Take a look at the members of the following values inside `transformUnit`, such as: 100 | * - `ctx` 101 | * - `ctx.compilationUnit` 102 | * - `ctx.compilationUnit.source` 103 | * - `ctx.compilationUnit.untpdTree` 104 | * - `ctx.compilationUnit.tpdTree` 105 | * - `ctx.implicits` 106 | * and print them out. 107 | * 108 | * Now try modifying `runsBefore` and `runsAfter` to have the phase run at a different time. How 109 | * have the values changed? 110 | */ 111 | type Dummy = Unit -------------------------------------------------------------------------------- /src/main/scala/05-annotations.scala: -------------------------------------------------------------------------------- 1 | package macroAnnotations 2 | 3 | /** 4 | * MACRO ANNOTATIONS 5 | * 6 | * In Scala, it has always been possible to annotate a variety of different parts of the syntax with 7 | * @-annotations. Annotations can be created as classes which extend `StaticAnnotation`, and syntax 8 | * can be annotated without any effect. 9 | * 10 | * Unfortunately, it was never so easy to _use_ those annotations. They could be read at runtime 11 | * with Java reflection, or at compile-time with macros. But if you wanted to use an annotation to 12 | * indicate code that should be *modified*, it meant writing a compiler plugin... until now! 13 | * 14 | * Macro annotations provide a convenient entry point which gives the programmer full access to the 15 | * AST of the annotated definition (class, trait, def, val, etc) in an ordinary method, and allows 16 | * us to return new definitions -- maybe a modification of the original, or maybe more than one 17 | * definition. 18 | * 19 | * Implementing a macro annotation is as simple as implementing a class with one abstract member: 20 | * ``` 21 | * class MacroAnnotation: 22 | * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] 23 | * ``` 24 | */ 25 | 26 | import scala.quoted.* 27 | import annotation.* 28 | import scala.collection.mutable.HashMap 29 | 30 | // This is an experimental feature, so we need to use the `@experimental` annotation! 31 | // Our macro annotation will be used as `@memo()` in front of a single-parameter `def`. We implement 32 | // it as a case class extending `MacroAnnotation`. 33 | @experimental 34 | case class memo() extends MacroAnnotation: 35 | 36 | // `MacroAnnotation` has a single abstract methad, `transform`. Note that `Quotes` is its first 37 | // parameter (and is contextual) since the type of the `tree` parameter is path dependent on the 38 | // `quotes` value that `Quotes` provides (and so is the return type). 39 | def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = 40 | 41 | // We import the familiar `quotes.reflect` package 42 | import quotes.reflect.* 43 | 44 | tree match 45 | 46 | // We start by matching on the type of definition. `DefDef` represents a `def`. The second 47 | // parameter matches exactly one parameter with one parameter block. We also bind the 48 | // return type and RHS of the `def`. 49 | case DefDef(name, TermParamClause(param :: Nil):: Nil, returnType, Some(rhs)) => 50 | (Ref(param.symbol).asExpr, rhs.asExpr) match 51 | 52 | // We get a reference to the parameter and the RHS, then match on these as `Expr`s 53 | case ('{ $paramRef: paramType }, '{ $rhs: rhsType }) => 54 | 55 | // We construct the type of the map which will hold the memoized values 56 | val cacheType = TypeRepr.of[HashMap[paramType, rhsType]] 57 | 58 | // We need to construct a symbol referring to a `val` which will hold the cache map 59 | // Note that we give it the same name with `Cache` appended, and we make it private. 60 | val cacheSymbol = Symbol.newVal(tree.symbol.owner, name+"Cache", cacheType, Flags.Private, Symbol.noSymbol) 61 | 62 | // We construct an expression of the initialization of the empty cache... 63 | val cacheRhs = '{ HashMap[paramType, rhsType]() }.asTerm 64 | 65 | // ...and construct a new `ValDef` representing a `val` which binds the `HashMap` to 66 | // the symbol we just created. 67 | val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) 68 | 69 | // Now we construct a new reference to the `val` we just constructed 70 | val cacheRef = Ref(cacheSymbol).asExprOf[HashMap[paramType, rhsType]] 71 | 72 | // We will replace the RHS of our original method with a lookup in the memoization map. 73 | val newRhs = '{ $cacheRef.getOrElseUpdate($paramRef, $rhs) }.asTerm 74 | 75 | // We construct a new `DefDef` to replace the original, but based on the original `tree` 76 | // Note that the only change is that we use `newRhs` as the method implementation. 77 | val newTree = DefDef.copy(tree)(name, TermParamClause(param :: Nil) :: Nil, returnType, Some(newRhs)) 78 | 79 | // We return two definitions: the `val` containing the cache map, and our rewritten 80 | // `def`. 81 | List(cacheVal, newTree) 82 | 83 | case _ => 84 | // If the macro is applied to something other than a single-parameter `def`, then we report 85 | // an error. 86 | report.errorAndAbort("@memo() cannot be used here") 87 | 88 | // A significant limitation of annotation macros is that any new definitions they introduce will not 89 | // be visible to the typechecker, so it's not possible for code elsewhere to refer to definitions 90 | // which aren't visible (statically) in the code. -------------------------------------------------------------------------------- /src/main/scala/06-derivations.scala: -------------------------------------------------------------------------------- 1 | package genericDerivation 2 | 3 | /** 4 | * GENERIC DERIVATION 5 | * 6 | * Generic derivation provides the mechanism to make type-classes composable. Each type-class needs 7 | * to have its "method" of composition specified, usually in one way for product types (like case 8 | * classes and tuples) and another way for coproduct or sum types (like enumerations and sealed 9 | * traits). 10 | * 11 | * Each generic derivation will be different, but the mechanism for defining those derivations will 12 | * typically have the same signature and be similar in form. 13 | */ 14 | 15 | 16 | object exercise1: 17 | /** 18 | * EXERCISE 1 19 | * 20 | * Look at the `Show` type-class defined below, with two `given` instances. Together with the 21 | * extension method, this makes it possible to call `.show` on an `Int` or `Double` and have a 22 | * String returned. 23 | * 24 | * Define an additional `Show` instance for `String` (with the obvious, trivial implementation). 25 | * 26 | * Now, look at `Stock`. Write a `Show` type-class instance for `Stock` which summons the type-class 27 | * instances for each of its three fields, and combines them in a String which includes the field 28 | * names. The output should look like this, for example: 29 | * ``` 30 | * Stock(symbol=META, price=179.16, quantity=600) 31 | * ``` 32 | * 33 | * The implementation should fit on one or two lines. Now let's rewrite it to be more complex! 34 | * Start the definition with the following definitions: 35 | * ``` 36 | * val tuple = Tuple.fromProduct(stock).toList 37 | * val typeclasses = (summon[Show[String]], summon[Show[Double]], summon[Show[Int]]).toList 38 | * val fieldNames = List("symbol", "price", "quantity") 39 | * val typeName = "Stock" 40 | * ``` 41 | * and write a recursive implementation that uses to all four values. You will need to use an 42 | * unsafe `asInstanceOf` cast (for now) to get it to compile. This will be the basis of a general 43 | * implementation for a `Show` type-class for any product type. 44 | */ 45 | trait Show[-T]: 46 | def show(value: T): String 47 | 48 | object Show: 49 | extension [T](value: T)(using show: Show[T]) def show: String = show.show(value) 50 | given Show[Int] = _.toString 51 | given Show[Double] = java.text.DecimalFormat("#.##").format(_).nn 52 | 53 | import exercise1.* 54 | 55 | object exercise2: 56 | /** 57 | * EXERCISE 2 58 | * 59 | * The derivation API, in `scala.deriving.*`, provides a representation of a type as an instance 60 | * of a `Mirror`, encoding some details of the type as type members of the `Mirror` type. Here is 61 | * the definition for the `Stock` case class, below: 62 | * ``` 63 | * type Mirror { 64 | * type MirroredMonoType = Stock 65 | * type MirroredType = Stock 66 | * type MirroredLabel = "Stock" 67 | * type MirroredElemTypes = (String, Double, Int) 68 | * type MirroredElemLabels = ("symbol", "price", "quantity") 69 | * } 70 | * ``` 71 | * Note that this is a type, but it provides (at compile-time) full details about the type 72 | * structure of the case class, including singleton string literal types for the type name and 73 | * field names. 74 | * 75 | * As a type, we can't just access these as `String`s. But Scala has a lot of very useful and 76 | * expressive syntax which works on *values* that we would like to use, that could make working 77 | * with a type such as `MirroredElem` easier. 78 | * 79 | * What if we could _pretend_ that we had an instance of that type (which we never actually use), 80 | * just so that we can work with those types more easily? This is what `erasedValue[T]` provides. 81 | * 82 | * `compiletime.erasedValue[T]` provides, for the purposes of typechecking, an "instance" of 83 | * `T` which we can match on, which will be erased after typechecking -- so we *can't* ever use 84 | * it! 85 | * 86 | * Take a look at the implementation of `length`. Finish implementing the type-level variant, 87 | *`typeLength`, which takes a type parameter instead of a type. You will need to specify patterns 88 | * that match the erased value's type but ignore its value. 89 | */ 90 | case class Stock(symbol: String, price: Double, quantity: Int) 91 | 92 | def length(tuple: Tuple): Int = 93 | tuple match 94 | case EmptyTuple => 0 95 | case head *: tail => 1 + length(tail) 96 | 97 | // transparent inline def typeLength[T <: Tuple]: Int = 98 | // inline compiletime.erasedValue[T] match 99 | // case ??? => 0 100 | // case ??? => 1 + typeLength[tail] 101 | 102 | object exercise3: 103 | /** 104 | * EXERCISE 3 105 | * 106 | * We can get the mirror for a product type by summoning its `Mirror`. As long as the type is 107 | * a product-like type, the compiler will construct the `Mirror` instance. 108 | * 109 | * Now, write an inline method which counts the number of fields in the product type passed to it. 110 | * You will need to add a `using` parameter, and refer to the `typeLength` method from the 111 | * previous exercise. 112 | * 113 | * Now, add a value parameter to `noOfFields` so that we can call it with a value, and have its 114 | * type parameter _inferred_ from the value's type, for example, 115 | * ``` 116 | * noOfFields(Stock("MSF", 242.4, 210)) // returns 3 117 | * ``` 118 | */ 119 | 120 | import scala.deriving.* 121 | 122 | case class Stock(symbol: String, price: Double, quantity: Int) 123 | 124 | val mirror = summon[Mirror.Of[Stock]] // return type is a more precise subtype of `Mirror`. 125 | 126 | inline def noOfFields[P]: Int = ??? 127 | 128 | object exercise4: 129 | /** 130 | * EXERCISE 4 131 | * 132 | * Below is a full implementation of `fields`. Change the return type of `fields` to 133 | * `List[String]` and write an inline method with the same recursive structure as `typeLength` 134 | * which produces a list of the field names. 135 | * 136 | * Tip: `valueOf[T]` will get an instance of `T` if it's possible to, but an additional pattern 137 | * match may be needed to check that our instance of `T` is the type we need! 138 | */ 139 | 140 | import scala.deriving.* 141 | import scala.compiletime.* 142 | 143 | transparent inline def typeLength[T <: Tuple]: Int = 144 | inline erasedValue[T] match 145 | case _: EmptyTuple => 0 146 | case _: (head *: tail) => 1 + typeLength[tail] 147 | 148 | inline def fields[P](value: P)(using mirror: Mirror.Of[P]): Int = 149 | typeLength[mirror.MirroredElemLabels] 150 | 151 | object exercise5: 152 | /** 153 | * EXERCISE 5 154 | * 155 | * We can convert a product value, such as a case class, into a `Tuple` with, 156 | * `Tuple.fromProductTyped`, and in a transparent inline context, we can use pattern matching to 157 | * deconstruct the tuple recursively. 158 | * 159 | * After matching on `head *: tail`, we have available to us the values `head` and `tail`, as 160 | * well their types `head.type` and `tail.type`. We now have everything we need to summon a 161 | * `Show` instance for the `head` and show it, and then to recurse on the list. Finish off the 162 | * missing part of the implementation. 163 | * */ 164 | 165 | import exercise1.Show 166 | import scala.deriving.* 167 | import scala.compiletime.summonInline 168 | 169 | transparent inline def derive(tuple: Tuple): List[String] = 170 | inline tuple match 171 | case EmptyTuple => Nil 172 | case cons: (_ *: _) => cons match // We would like to perform these two matches 173 | case head *: tail => // in a single match, but it doesn't work! 174 | val shown: String = ??? 175 | shown :: derive(tail) 176 | 177 | inline def derived[P <: Product](using mirror: Mirror.ProductOf[P]): Show[P] = 178 | (value: P) => derive(Tuple.fromProductTyped(value)).mkString("(", ", ", ")") 179 | 180 | object exercise6: 181 | /** 182 | * EXERCISE 6 183 | * 184 | * Finally, using the methods `derive` and `fields` from the last two exercises and `zip`, plus 185 | * the provided value `typeName`, complete the implementation of `derived` below to show a case 186 | * class in the form `TypeName(field1=value1, field2=value2)`. 187 | * 188 | */ 189 | import scala.deriving.* 190 | import exercise5.derive 191 | import exercise4.fields 192 | 193 | inline def derived[P <: Product](using mirror: Mirror.ProductOf[P]): Show[P] = 194 | (value: P) => 195 | val tuple = Tuple.fromProductTyped(value) 196 | val typeName = valueOf[mirror.MirroredLabel] 197 | 198 | "" -------------------------------------------------------------------------------- /src/main/scala/04-staging.scala: -------------------------------------------------------------------------------- 1 | package multistageProgramming 2 | 3 | /** 4 | * STAGING 5 | * 6 | * Imagine that we wanted to compile a method at runtime, based on values that weren't available 7 | * at compile-time. Fragments of code could be included or excluded based on runtime values, and 8 | * calculations could be replaced with constant values. Staging provides this facility, reusing 9 | * much of the same machinery of quotes and splices. You can think of it as a more elegant way of 10 | * writing a program which a) generates some source files, b) starts a new compiler process to turn 11 | * them into bytecode, and c) dynamically loads them. 12 | * 13 | * There are a variety of different uses for staging, but most are motivated by performance. 14 | */ 15 | 16 | object exercise1 17 | /** 18 | * EXERCISE 1 19 | * 20 | * Take another look at `polynomial.scala`, in particular the `apply` method of the `Polynomial` 21 | * case class. What are the steps it must take to evaluate a polynomial? The coefficients are 22 | * all stored in a data structure (a `Map[Int, Double]`), and we must navigate that data structure 23 | * every time. If we were performing a numerical integration, we may need to do that 100,000 24 | * times! 25 | * 26 | * If the coefficients were all known at compile-time, we could write a macro which optimizes the 27 | * implementation. But what if those coefficients only become known at runtime? Thankfully, this 28 | * is where runtime staging comes in. 29 | * 30 | * Let's try to find a fast way to implement the calculation of the polynomial, `3.2x³ - x + 2.8`. 31 | * How does this look? 32 | * ``` 33 | * def calc(x: Double) = 34 | * var a = 0.0 35 | * var p = 1.0 36 | * a = a + p*2.8 // a = 2.8 37 | * p = p*x // p = x 38 | * a = a + p*(-1) // a = 2.8 - x 39 | * p = p*x // p = x² 40 | * p = p*x // p = x³ 41 | * a = a + p*3.2 // a = 2.8 - x + 3.2x³ 42 | * a 43 | * ``` 44 | * 45 | * Check that the calculation above is correct. It should be clear that any polynomial (with 46 | * terms of positive powers) can be written as a sequence of just two operations: one which 47 | * increases the power of x for the "current" term, and one which multiplies that x^n by a 48 | * coefficient parameter. The initial values are always the same, and we always return the 49 | * accumulator at the end. 50 | * 51 | * 1. Write an enumeration called `Op` representing these two operations 52 | * 2. Write an extension method on `Polynomial` called `ops` which generates a sequence of `Op`s 53 | * for the polynomial, based on the values in its `terms` `Map`. 54 | * 3. Write a runtime interpreter for `ops` to check that it gives the same answer as the `apply` 55 | * method. Its signature should be similar to: 56 | * ``` 57 | * @tailrec def calculate(todo: List[Op], a: Double, p: Double, x: Double): Double 58 | * ``` 59 | */ 60 | 61 | object exercise2 62 | /** 63 | * EXERCISE 2 64 | * 65 | * A while ago we saw how we could convert a value to an expression by wrapping it in `Expr(...)` 66 | * if there's a contextual `ToExpr` type-class instance available. The reverse operation is to be 67 | * able to call `.value` or `.valueOrAbort` on an existing `Expr[T]` instance and get an 68 | * `Option[T]` (or a `T`) while the macro is running. This requires a given `FromExpr[T]` 69 | * instance. 70 | * 71 | * Implement a `FromExpr[Op]` instance by implementing the method signature, 72 | * ``` 73 | * def unapply(value: Expr[Op])(using Quotes): Option[Op] 74 | * ``` 75 | * in a given `FromExpr`, by pattern matching on quotes. 76 | */ 77 | 78 | object exercise3 79 | /** 80 | * EXERCISE 3 81 | * 82 | * Write a macro implementation for the method, 83 | * ``` 84 | * inline def calc(inline ops: List[Op]): Double = ${calcMacro('ops)} 85 | * ``` 86 | * This macro will compile a method to evaluate the polynomial from the `Op`s sequence, though 87 | * for now, it will still only be able to work on input which is statically known. 88 | * 89 | * In the body, first call `valueOrAbort` on the `Expr[List[Op]]`. This will need the given 90 | * `FromExpr[Op]` we created in Exercise 2 to be in scope. 91 | * 92 | * Now we need to implement the macro to using quotes and splices. This macro will not construct 93 | * an expression like the previous examples: it will construct a function object 94 | * (`Double => Double`) once, which we can call many times. 95 | * 96 | * This will be our most complex quotes example! Start with the code below: 97 | * ``` 98 | * '{ 99 | * (x: Double) => 100 | * var p: Double = 1.0 101 | * var a: Double = 0.0 102 | * ${ 103 | * ops.foldLeft('{()}): 104 | * case (instrs, Op.Raise) => '{ $insts; p = p*x } 105 | * } 106 | * a 107 | * } 108 | * ``` 109 | * 110 | * The code is incomplete, and needs one more case to be added. Try to understand what is being 111 | * constructed. 112 | * - it as an expression, so it's surrounded by quotes 113 | * - we are constructing a function, which takes a parameter, `(x: Double) =>` 114 | * - we need two mutable `var`s to hold our state while we compute the result with side-effecting 115 | * operations 116 | * - we construct those side-effecting operations _within_ a splice, with a fold over the `ops` 117 | * sequence 118 | * - the fold starts with an empty statement (`()`), and for each operation constructs a new 119 | * expression from the previous instructions followed by the instruction for the current `Op`. 120 | * 121 | * Carefully check the phase consistency of the variables, `x`, `p`, `a` and `instrs`, 122 | * particularly where those terms cross more than one splice/quote boundary. 123 | * 124 | * Add the second case to handle `Op.Acc(k)`, where `k` is the coefficient of the next term to 125 | * add. 126 | * 127 | * We should now be able to call `calc` with an explicit list of `Op`s, and it will compile a 128 | * function we can call at runtime. The method will run as fast as a hardcoded polynomial, thanks 129 | * to the macro, but it ONLY works if the coefficients are known at compile time. 130 | */ 131 | 132 | object exercise4 133 | /** 134 | * EXERCISE 4 135 | * 136 | * We will take this one step further with multistage programming. Let's define a method called 137 | * `runtimeCalc` which will return a `Double => Double` on an extension method of `Polynomial`. 138 | * This should not be an inline method. 139 | * 140 | * Import `scala.staging` if it is not already in scope. Now, copy the macro body from Exercise 3 141 | * into this method, but make the following changes: 142 | * 143 | * 1. Wrap the entire quoted body of `runtimeCalc` in `staging.run { ... }`. 144 | * 2. Replace `ops.valueOrAbort` with a direct reference to the `ops` value on the polynomial. 145 | * value. 146 | * 3. Provide a `given staging.Compiler` instance at the top-level for `staging.run`: 147 | * `given Compiler = Compiler(getClass.getClassLoader)` 148 | * 149 | * This is an ordinary method, which will be invoked at runtime, and it has access to normal 150 | * runtime values, like the polynomial. So we no longer need to work with an `Expr[List[Op]]` 151 | * because we have an actual `List[Op]` instance. The rest of the macro should be unchanged. 152 | * 153 | * In `playground.scala`, construct a couple of `Polynomial`s and compare the results of 154 | * calculating the value for a given `x` using the original `apply` method, and the new 155 | * `runtimeCalc` method. 156 | * 157 | * Remember, calling `runtimeCalc` compiles a new function every time it is invoked, which is 158 | * slow. But that function can then be called many times for different values of `x` without 159 | * recompilation! 160 | */ 161 | 162 | object exercise5: 163 | /** 164 | * EXERCISE 5 165 | * 166 | * Add an extension method on `Polynomial` which calculates the numerical integral of a polynomial 167 | * between a range of values for a number of steps, with the signature: 168 | * ``` 169 | * def integrate(start: Double, end: Double, steps: Int): Double 170 | * ``` 171 | * The implementation will need to call `runtimeCalc` to get a `calc` function, then apply it to each 172 | * of the evenly-spaced `x` values between `start` and `end`. These results should be summed, 173 | * and the result multiplied by `end - start` and divided by `steps`. 174 | * 175 | * Write a new polynomial and integrate it between -10 and 10 with 10000000 (ten million) steps. 176 | * How long does it take to complete? Switch back to using the standard `apply` method on 177 | * `Polynomial` in place of the compiled method. How long does it take now? 178 | */ -------------------------------------------------------------------------------- /src/main/scala/01-inlining.scala: -------------------------------------------------------------------------------- 1 | package inlineMethods 2 | /** 3 | * INLINE METHODS 4 | * 5 | * Scala 3 introduces the `inline` keyword which can "copy/paste" a method's implementation to the 6 | * call-site. Technically speaking, code inlining is a form of macros: indeed, the original "macro" 7 | * in C was #define, whose power subsumes inlining (every good C macro is a Scala 3 inline). 8 | * 9 | * Code inlining is not only the foundation of more powerful macros in Scala 3, but it is very 10 | * powerful in its own right, and can be used to improve performance and reduce code duplication in 11 | * cases that previously could not afford the cost of abstraction. 12 | * 13 | * In this section, you will learn about the `inline` keyword and the guarantees it provides. 14 | */ 15 | 16 | object exercise1: 17 | /** 18 | * EXERCISE 1 (10 mins) 19 | * 20 | * Run the main method of this class (in `sbt` use `runMain exercise1`), and see how `raise` 21 | * appears at the top of the stack trace. Make `raise` an `inline` method so the stack trace does 22 | * not contain `raise`. 23 | */ 24 | def raise(): String = throw new Exception() 25 | def defer(): Unit = println(raise()) 26 | 27 | def main(args: Array[String]): Unit = defer() 28 | 29 | object exercise2: 30 | /** 31 | * EXERCISE 2 (20 mins) 32 | * 33 | * Use `javap` on the class `exercise2$` to compare the bytecode of `printMessage` and 34 | * `inlinePrintMessage`. You can do this in `sbt` with `javap inlineMethods.exercise2\$` or by running 35 | * `javap -c` directly in the `target` directory after compiling. 36 | */ 37 | def message(): String = "Hello world!" 38 | inline def inlineMessage(): String = "Hello world!" 39 | 40 | def printMessage(): Unit = println(message()) 41 | def inlinePrintMessage(): Unit = println(inlineMessage()) 42 | 43 | object exercise3a: 44 | /** 45 | * EXERCISE 3a (20 mins) 46 | * 47 | * "Constant folding" is a feature in Scala which has always existed, and performs arithmetic 48 | * operations on numeric types and concatenation on `String`s at compile-time. See how `a`, `b`, 49 | * `c`, and `d` are represented in bytecode. Calculate the actual value of `d`, and understand 50 | * the bytecode representation. 51 | */ 52 | final val a = 6*7 53 | final val b = 10 54 | final val c = a*b 55 | final val d = (4*4 + 2*a/7)*13 - (5 - 10)*11 - c 56 | 57 | object exercise3b: 58 | /** 59 | * EXERCISE 3b 60 | * 61 | * Experiment with removing `final` from some or all of the values, `a`, `b`, `c` and `d`. What 62 | * happens if you make them inline? What are the exact types of each value? 63 | * 64 | * (Tip: you can get the compiler to tell you an expression's type by forcing a type error!) 65 | */ 66 | val a = 6*7 67 | val b = 10 68 | val c = a*b 69 | val d = (4*4 + 2*a/7)*13 - (5 - 10)*11 - c 70 | 71 | object exercise4a: 72 | /** 73 | * EXERCISE 4a (30 mins) 74 | * 75 | * Examine the implementation of `raise`. Assuming n > 0, what does it calculate (very 76 | * inefficiently)? 77 | */ 78 | def raise(x: Int): Int = if x == 0 then 1 else raise(x - 1) + raise(x - 1) 79 | 80 | object exercise4b: 81 | /** 82 | * EXERCISE 4b 83 | * 84 | * The method `time` will give a rough estimate of the average time taken to execute `fn` `n` 85 | * times. Experiment with values of `n` and the `x` parameter of `raise` until running the main 86 | * method of this object gives reasonably stable output. Check how the running time varies as 87 | * `x` is increased or decreased. 88 | */ 89 | inline def raise(x: Int): Int = if x == 0 then 1 else raise(x - 1) + raise(x - 1) 90 | 91 | def time(n: Int)(fn: => Unit): Long = 92 | val t0 = System.nanoTime 93 | for i <- 1 to n do fn 94 | (System.nanoTime - t0)/n 95 | 96 | def main(args: Array[String]): Unit = println(exercise4b.time(100)(exercise4b.raise(7))) 97 | 98 | object exercise4c: 99 | /** 100 | * EXERCISE 4c 101 | * 102 | * What happens to the running time if we change `raise` into an `inline` method? How does it vary 103 | * when `x` is changed? How is `e` represented in bytecode? Note that you may need to set `sbt` to 104 | * use `javap` in verbose mode with: 105 | * 106 | * sbt> set javaOpts := List("-c", "-v") 107 | * 108 | * N.B. 2¹² = 4096 109 | */ 110 | def raise(x: Int): Int = if x == 0 then 1 else raise(x - 1) + raise(x - 1) 111 | final val e: Int = raise(12) 112 | 113 | /** 114 | * TRANSPARENT INLINE METHODS 115 | * 116 | * An `inline` method can additionally be marked as `transparent`, i.e. `transparent inline def`. 117 | * These methods will be "interpreted" at compile time, where possible, and the return type of the 118 | * transparent method may be refined, if it can be proven to have a more a more specialized type 119 | * after inlining. 120 | * 121 | * They are called "transparent" because the compiler can "see inside" the implementation at the 122 | * call-site. 123 | */ 124 | 125 | object exercise5: 126 | /** 127 | * EXERCISE 5 (10 mins) 128 | * 129 | * Using the previous `inline` implementation of `raise`, make it `transparent` and give `result` 130 | * a precise, singleton literal return type. Check what happens if `transparent` is removed. 131 | */ 132 | inline def raise(x: Int): Int = if x == 0 then 1 else raise(x - 1) + raise(x - 1) 133 | val result: Int = raise(10) 134 | 135 | object exercise6a: 136 | /** 137 | * EXERCISE 6a (30 mins) 138 | * 139 | * The extension method `pairs` will transform an `Iterable[T]` into an `Iterable[(T, T)]`, as 140 | * shown in the `numberPairs` example. Change the type of `numberPairs` to `List[(Int, Int)]` and 141 | * confirm that it is a type error. 142 | */ 143 | extension [T](xs: Iterable[T]) 144 | def pairs: Iterable[(T, T)] = xs.map { x => (x, x) } 145 | 146 | val numbers: List[Int] = List(1, 2, 3) 147 | val numberPairs: Iterable[(Int, Int)] = numbers.pairs 148 | 149 | object exercise6b: 150 | /** 151 | * EXERCISE 6b 152 | * 153 | * Using a `BuildFrom` contextual value, write a generic `genPairs` method which returns a value 154 | * of the same collection type: first remove the `???` and uncomment the body of `genPairs`, then 155 | * choose appropriate type parameters for `BuildFrom` so that it compiles. 156 | * 157 | * Check that the return type of `numberPairs` can be refined from `Iterable` to `List`. 158 | */ 159 | import scala.collection.BuildFrom 160 | 161 | extension [Coll[T1] <: Iterable[T1], T](xs: Coll[T]) 162 | def genPairs(using bf: BuildFrom[?, ?, Coll[(T, T)]]): Coll[(T, T)] = ??? /* 163 | val builder = bf.newBuilder(xs) 164 | xs.foreach { x => builder += (x -> x) } 165 | builder.result() 166 | */ 167 | 168 | val numbers: List[Int] = List(1, 2, 3) 169 | val numberPairs: Iterable[(Int, Int)] = numbers.genPairs 170 | 171 | object exercise6c: 172 | /** 173 | * EXERCISE 6c 174 | * 175 | * That was hard work! Go back to the definition of `pairs` we started with. Change the return 176 | * type of `numberPairs3` to `List[(Int, Int)]`. This should be a compile error. 177 | * 178 | * Now, add two "magic" keywords in front of `def easyPairs` to fix the compile error! 179 | * 180 | * If you have time, take a look at the bytecode for `numberPairs` in each implementation 181 | */ 182 | extension [T](xs: Iterable[T]) 183 | def pairs: Iterable[(T, T)] = xs.map { x => (x, x) } 184 | 185 | val numbers: List[Int] = List(1, 2, 3) 186 | val numberPairs: Iterable[(Int, Int)] = numbers.pairs 187 | 188 | object example7a: 189 | /** 190 | * EXERCISE 7a (30 mins) 191 | * 192 | * The `rollDice` method will return a `String` if your two dice score 12. We will start with 193 | * fixed dice which always show a `6` and a `5`. 194 | * 195 | * Try refining the types of `rollDice` and `outcome` to a union of two singleton literal types. 196 | * If you write the strings correctly, the code should still compile. 197 | * 198 | * Now try removing one of the strings from the union type. (Remove the one which doesn't make 199 | * sense based on the numbers!) Does the code still compile? 200 | */ 201 | val fixedDice: Int = 6 + 5 202 | def rollDice(): String = if fixedDice == 12 then "Winner!" else "Loser!" 203 | 204 | val outcome: String = rollDice() 205 | 206 | object exercise7b: 207 | /** 208 | * EXERCISE 7b 209 | * 210 | * Now, uncomment `val outcome2` and experiment making the `rollDice2` method `inline` or 211 | * `transparent inline`. It looks like the compiler has all the information it needs to deduce the 212 | * precise return type is `"Loser!"`, but it needs a bit more cooperation from us! 213 | * 214 | * The `inline` keyword can also be used in the body of `rollDice2` to guarantee that inlining is 215 | * happening when we expect it. It can help to produce more helpful error messages. 216 | * 217 | * Add the `inline` keyword in front of `if`, and check the error message. Tweak the definition of 218 | * `fixedDice2` so that `outcome2` returns the singleton type, `"Loser!"`. */ 219 | 220 | val fixedDice2 = 6 + 5 221 | def rollDice2(): "Loser!" | "Winner!" = if fixedDice2 == 12 then "Winner!" else "Loser!" 222 | //val outcome2: "Loser!" = rollDice2() 223 | 224 | object exercise7c: 225 | /** 226 | * EXERCISE 7c 227 | * 228 | * Using the previous definition of `rollDice`, uncomment and change the definition of `rollDice3` 229 | * to replace the `inline if` with an `inline match`, without changing the behavior. (Note that 230 | * the scrutinee expression is sandwiched between `inline` and `match`.) 231 | */ 232 | inline val fixedDice3 = 6 + 5 233 | 234 | // transparent inline def rollDice3(): "Loser!" | "Winner!" = 235 | // inline if fixedDice3 == 12 then "Winner!" else "Loser!" 236 | // val outcome3: "Loser!" = rollDice3() 237 | 238 | object exercise7d: 239 | /** 240 | * EXERCISE 7d 241 | * 242 | * Now modify `rollDice4` to take two parameters representing the numbers scored on the two dice. 243 | * 244 | * Bonus enhancement: Make the parameters precise union types! 245 | * 246 | * Uncomment `outcome4` to check it compiles. What happens if you replace one of the `6`s with 247 | * `((math.random*6).toInt + 1)`? 248 | */ 249 | transparent inline def rollDice4(): "Loser!" | "Winner!" = 250 | inline if ??? == 12 then "Winner!" else "Loser!" 251 | 252 | // val outcome4: "Winner!" = rollDice4(6, 6) 253 | 254 | /** 255 | * BRANCH ELIMINATION 256 | * 257 | * When Scala reduces an inline `if` or `match` in a `transparent inline def`, the branches which 258 | * are not followed will be eliminated, i.e. erased from the compiled code. This means that code 259 | * appearing in these branches which would have compile-time side-effects will also be deleted. 260 | * 261 | * What are compile-time side-effects, though? We will see more later, but a useful example is 262 | * emitting a compile error. 263 | */ 264 | 265 | object exercise8a: 266 | /** 267 | * EXERCISE 8a (20 mins) 268 | * 269 | * Uncomment the code below and check the compiler error and position. Change the text and check 270 | * the compile error again. 271 | * 272 | * The `error` message is *always* failing to compile because it is never eliminated. Make the 273 | * method `inline`, and see where the error moves to. Change the parameter passed to 274 | * `unitInterval` to something else to make the compile error disappear. 275 | * 276 | * `error` must be used in an `inline` context to be useful! 277 | */ 278 | import scala.compiletime.error 279 | // def unitInterval(value: Double): Double = 280 | // if value > 1.0 || value < 0.0 then error("number is not guaranteed to be in the unit interval!") 281 | // value 282 | 283 | // val n = unitInterval(1.5) 284 | 285 | object exercise8b: 286 | /** 287 | * EXERCISE 8b 288 | * 289 | * We can provide more useful error messages with `codeOf`, which "captures" the code (at 290 | * compile-time) as a `String`. 291 | * 292 | * Uncomment `val n` and see what the error is. Note that we see `1.5d` instead of `1.5`. The 293 | * captured code is a re-serialization of the AST; not the code itself. */ 294 | import scala.compiletime.{error, codeOf} 295 | 296 | inline def unitInterval(value: Double): Double = 297 | if value > 1.0 || value < 0.0 298 | then error("The value "+codeOf(value)+" is not guaranteed to be in the unit interval!") 299 | 300 | value 301 | 302 | //val n = unitInterval(1.5) 303 | 304 | object exercise8c: 305 | /** 306 | * EXERCISE 8c 307 | * 308 | * Try calling `unitInterval` with a method such as `math.random`. What error message do you see? 309 | * 310 | * Why is this? Note the call-site of `codeOf(value)` in the body of `unitInterval`. At this 311 | * position, the compiler cannot "see" the code of `value` from whichever call-site `unitInterval` 312 | * was called from. 313 | * 314 | * What happens if we make the parameter itself `inline`? 315 | * 316 | * Does `unitInterval(math.random)` compile now? If not, why not, since it always returns a value 317 | * between 0.0 and 1.0? 318 | */ 319 | import scala.compiletime.{error, codeOf} 320 | 321 | inline def unitInterval(value: Double): Double = 322 | if value > 1.0 || value < 0.0 323 | then error("The value "+codeOf(value)+" is not guaranteed to be in the unit interval!") 324 | 325 | value 326 | 327 | val m = unitInterval(0.5) 328 | 329 | object exercise8d: 330 | /** 331 | * EXERCISE 8d 332 | * 333 | * We can only compare `value` to `0.0` and `1.0` if its value is known statically. 334 | * Call the `compiletime.requireConst` method with `value` at the start of `unitInterval` and 335 | * see how it changes its behavior. 336 | */ 337 | 338 | import scala.compiletime.* 339 | 340 | inline def unitInterval(inline value: Double): Double = 341 | if value > 1.0 || value < 0.0 342 | then error("The value "+codeOf(value)+" is not guaranteed to be in the unit interval!") 343 | 344 | value 345 | 346 | 347 | object exercise9a: 348 | /** 349 | * EXERCISE 9a (15 mins) 350 | * 351 | * A less obvious form of conditional branching in Scala arises from contextual values 352 | * (implicits) which may be chosen statically, based on a type (which may be inferred). 353 | * 354 | * Read and understand the code below. Note the "shorthand" style for the two provided givens. 355 | * 356 | * Uncomment the last line, passing a `Double` to the `accept` method, and note the error message. 357 | * Implement a special `given` instance of type `Permitted[Double]` whose implementation calls the 358 | * `scala.compiletime.error` method with the custom error message, "No doubles, please!". Don't 359 | * forget that `error` can only be called in an `inline` context. 360 | */ 361 | trait Permitted[T]() 362 | 363 | def accept[T](value: T)(using Permitted[T]): Unit = println(s"$value is allowed") 364 | 365 | given Permitted[Int]() 366 | given Permitted[String]() 367 | 368 | accept(1) 369 | accept("two") 370 | //accept(3.0) 371 | 372 | object exercise9b: 373 | /** 374 | * EXERCISE 9b 375 | * 376 | * Although implicit *conversions* are soft-deprecated in Scala 3, we can still use them to 377 | * produce some custom type-error messages. 378 | * 379 | * Provide an `inline given` instance of `Conversion[String, Int]` which causes the `val x` line 380 | * to report a custom compile error. 381 | */ 382 | import language.implicitConversions 383 | import scala.compiletime.error 384 | 385 | given Conversion[String, Int] = ??? 386 | 387 | val x: Int = "1" 388 | 389 | object exercise10a: 390 | /** 391 | * EXERCISE 10a (30 mins) 392 | * 393 | * Read and understand the implementation of `Show`, its instances and extension method. 394 | * 395 | * Uncomment and fix the implementation of `log` so that the `main` method runs without error. 396 | */ 397 | 398 | import scala.compiletime.* 399 | 400 | trait Show[T]: 401 | def show(value: T): String 402 | 403 | object Show: 404 | extension [T](value: T)(using show: Show[T]) def show: String = show.show(value) 405 | given Show[Int] = _.toString 406 | given Show[Double] = java.text.DecimalFormat("#.###").format(_).nn 407 | 408 | def log[T](value: T): Unit = ??? // println(value.show) 409 | 410 | def main(args: Array[String]): Unit = log(math.Pi) 411 | 412 | object exercise10b: 413 | /** 414 | * EXERCISE 10b 415 | * 416 | * Look at the type classes `Debug` and the instances which exist for `Int` and `Char`. Compare it 417 | * to `Show`, above. 418 | * 419 | * Uncomment the implementation of `log` and fix the compile error by adding a `using` parameter 420 | * (an implicit parameter) or a context bound on `T`. Comment out the calls to `log` which can't 421 | * possibly work. 422 | */ 423 | trait Debug[T]: 424 | def debug(value: T): String 425 | 426 | object Debug: 427 | extension [T](value: T)(using debug: Debug[T]) def debug: String = debug.debug(value) 428 | given Debug[Int] = int => s"Int($int)" 429 | given Debug[Char] = ch => s"'$ch'" 430 | 431 | def log[T](value: T): Unit = ??? //println(value.debug) 432 | 433 | def main(args: Array[String]): Unit = 434 | log(math.Pi) 435 | log(42) 436 | log('x') 437 | log("Hello world") 438 | 439 | object exercise10c: 440 | /** 441 | * EXERCISE 10c 442 | * 443 | * Using the same definition of `Debug` (imported from the previous exercise), try an alternative 444 | * method of making the body of `log` compile. Instead of adding a `using` parameter or context 445 | * bound, make `log` `inline`, and uncomment its body. Have we fixed it yet? 446 | * 447 | * Check the contents of the `scala.compiletime` package (check tab completion in Metals or the 448 | * REPL, read the Scaladocs, or look at the compiler source code in `github.com:lampepfl/dotty`) 449 | * to see if there's an alternative to `summon` which might work. Try it as a drop-in replacement. 450 | * 451 | * Finally, check what happens when you remove the `inline` modifier again. 452 | */ 453 | import exercise10b.Debug 454 | 455 | def log[T](value: T): Unit = ??? //println(summon[Debug[T]].debug(value)) 456 | 457 | def main(args: Array[String]): Unit = 458 | log(42) 459 | log('x') 460 | 461 | object exercise10d: 462 | /** 463 | * EXERCISE 10d 464 | * 465 | * There is a generalized version of `summonInline` called `summonFrom` which allows us to choose 466 | * a preference of different type class instances, based on availability -- with a fallback option 467 | * if no type class is available. 468 | * 469 | * Look at the implementation of `log`, below. The code which looks like a partial function inside 470 | * `summonFrom` is interpreted by a macro (and like `summonInline`, it must be called in an inline 471 | * context). This example should be read: "if we find a contextual instance of `Debug[T]`, use the 472 | * code `debug.debug(value)`, otherwise use `value.toString`. 473 | * 474 | * Try running the `main` method, and see how each line of output is produced. 475 | * 476 | * Now, add an additional `case` to the `log` to use a `Show[T]` type class, if one is available. 477 | * Which line of `main` output changes, and why? 478 | */ 479 | import exercise10a.Show 480 | import exercise10b.Debug 481 | import scala.compiletime.summonFrom 482 | 483 | inline def log[T](value: T): Unit = println: 484 | summonFrom: 485 | case debug: Debug[T] => debug.debug(value) 486 | case _ => value.toString 487 | 488 | def main(args: Array[String]): Unit = 489 | log(math.Pi) 490 | log(42) 491 | log('x') 492 | log("Hello world") 493 | 494 | object exercise10e: 495 | /** 496 | * EXERCISE 10e 497 | * 498 | * Here is an implementation of `log` which uses the extension methods for `Show` and `Debug`. But 499 | * it doesn't compile -- uncomment the RHS of each case to see why. 500 | * 501 | * `summonFrom` can find `Debug[T]` and `Show[T]` instances, but it doesn't make the contextual on 502 | * the RHS of each case, by default. However, Scala has new syntax to achieve this. Try replacing 503 | * the bound name of the type class instance, `show: Show[T]` with `given Show[T]` (with no name 504 | * binding). Do the same for `Debug[T]`. Does it compile? 505 | */ 506 | import exercise10a.Show 507 | import exercise10b.Debug 508 | import scala.compiletime.* 509 | 510 | inline def log[T](value: T): Unit = println: 511 | summonFrom: 512 | case debug: Debug[T] => ??? // value.debug 513 | case show: Show[T] => ??? // value.show 514 | case _ => value.toString 515 | 516 | object exercise11: 517 | /** 518 | * EXERCISE 11 (10 mins) 519 | * 520 | * Read and understand the simple inline method below, and definitions of `a` and `b`. We can see 521 | * that the compiler theoretically has all the information it would need to know `a`'s and `b`'s 522 | * types precisely. Change their definitions to more precise types, and check the compile errors. 523 | * 524 | * The `parse` method definition needs two additional modifier keywords to be added. With these, 525 | * it should compile. 526 | * 527 | * Bonus: Can you change the definition of `parse` slightly to persuade the compiler to make `a`'s 528 | * body conform to the precise singleton type, `Some["something"]` (not just `Some[String]`)? 529 | */ 530 | def parse(value: String): Option[String] = if value == "none" then None else Some(value) 531 | 532 | val a: Option[String] = parse("something") 533 | val b: Option[String] = parse("none") -------------------------------------------------------------------------------- /src/main/scala/03-reflection.scala: -------------------------------------------------------------------------------- 1 | package reflectionAndTrees 2 | 3 | import scala.quoted.* 4 | 5 | /** 6 | * REFLECTION AND TREES 7 | * 8 | * All the examples so far have worked on `Type` and `Expr` instances, which have been opaque black 9 | * boxes representing types and expressions respectively. They have few useful methods, and our main 10 | * tool for inspecting them has been pattern matching, which has granted us a limited ability to 11 | * deconstruct them. 12 | * 13 | * Scala 3's reflection capabilities offer a much deeper introspection into `Type`s and `Expr`s as 14 | * `TypeRepr`s and `Term`s, being the low-level AST elements that are used throughout the compiler. 15 | */ 16 | 17 | object exercise1: 18 | /** 19 | * EXERCISE 1 (20 mins) 20 | * 21 | * `inspect` is an uninteresting macro. Add a new parameter (with type `Any`) and corresponding 22 | * parameter in `inspectMacro`. 23 | * 24 | * Call `.asTerm` on the `Expr[String]` and print the result (at compiletime). Invoke the macro 25 | * with a parameter (anything!) in `playground.scala`, and see what gets printed. 26 | * 27 | * Try as many different parameter expressions as you can! 28 | */ 29 | inline def inspect(): Unit = ${inspectMacro()} 30 | 31 | def inspectMacro()(using Quotes): Expr[Unit] = 32 | import quotes.*, reflect.* 33 | '{()} 34 | 35 | object exercise2: 36 | /** 37 | * EXERCISE 2 (20 mins) 38 | * 39 | * Below is a simplistic macro. Call the macro in `playground.scala`, passing in a string. Change 40 | * the macro implementation to print the `term` value. This should show a representation of the 41 | * AST of the string parameter passed into `inspect`. 42 | * 43 | * Now try to pattern match on `term`. Extractors are available for different AST types in the 44 | * `quotes.reflect` object. The extractor for `Constant` is called `StringConstant`, but the 45 | * structure of `Term#toString` otherwise mirrors the structure of the extractors. 46 | * 47 | * `case Inlined(_, _, Literal(ConstantString(str))) =>` 48 | * 49 | * Check that the pattern matches successfully by printing the `str` value at compile-time. 50 | */ 51 | inline def inspect(string: String): String = ${inspectMacro('string)} 52 | 53 | def inspectMacro(string: Expr[String])(using Quotes): Expr[String] = 54 | import quotes.reflect.* 55 | val term = string.asTerm 56 | 57 | string 58 | 59 | object exercise3: 60 | /** 61 | * EXERCISE 3 (1 hour) 62 | * 63 | * Take a look at `github.com:lampepfl/dotty/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala` 64 | * for implementations of term extractors. This is currently our best documentation on reflection. 65 | * 66 | * Open up `inspections.scala` and run the main class, `inspections.test`. Examine the 67 | * implementation of `inspectMacro` below and understand how the output is being produced. 68 | * 69 | * First, without changing the output, restructure the pattern match to avoid repetition in the 70 | * three patterns. Split the match into different methods if that's helpful. 71 | * 72 | * Now, for each `inspect` call in `inspections.scala`, match its tree using extractors (from the 73 | * Dotty source code link, above) and give a brief textual description of what was matched. Make 74 | * sure that we distinguish between each input! 75 | * 76 | */ 77 | inline def inspect(inline any: Any): String = ${inspectMacro('any)} 78 | 79 | def inspectMacro(any: Expr[Any])(using Quotes): Expr[String] = 80 | import quotes.reflect.* 81 | 82 | val matched: String = any.asTerm match 83 | case Inlined(_, _, Literal(StringConstant(str))) => s"string constant: $str" 84 | case Inlined(_, _, Literal(IntConstant(int))) => s"int constant: $int" 85 | case Inlined(_, _, Ident(name)) => s"reference to $name" 86 | case other => s"???: $other" 87 | 88 | Expr(matched) 89 | 90 | /** 91 | * Pattern matching is one of the most important tools for working with terms reflectively. It is 92 | * important not only to be familiar with the most common Scala ASTs node types, but also to know 93 | * how to recursively dig deeper when you encounter less familiar node types. That skill can be 94 | * broken down into three steps: 95 | * 96 | * 1. Knowing what you have (`println` is your friend!) 97 | * 2. Understanding what that structure is composed of (search in `QuotesImpl.scala`) 98 | * 3. Accessing those components (trial and error, usually) 99 | * 100 | * These steps can be applied recursively. 101 | */ 102 | 103 | object exercise4: 104 | /** 105 | * EXERCISE 4 (45 mins) 106 | * 107 | * Let's try constructing a new tree. Imagine we have a partial function implemented with a 108 | * pattern match over several cases, all of which are literal strings. Naively, the scrutinee 109 | * will be compared to each case in turn, until a match is found. 110 | * 111 | * But what if we could map every case by a single small, precomputed integer? The compiler could 112 | * optimize the pattern match as a "jump" in bytecode without trying to match on every case. 113 | * (We would have to accept that unexpected input might result in a match.) 114 | * 115 | * The high-level steps needed to implement this macro are as follows: 116 | * 117 | * 1. Convert the `fn` expression to a term and use pattern matching to get the `CaseDef`s 118 | * 2. Find all the `CaseDef`s that are string literals, and convert it to a `Map[String, Term]` 119 | * 3. Calculate a hash function that disambiguates every key in the map (see below). 120 | * 4. Construct new `CaseDef`s matching the integer of the hashed keys, and put these back into 121 | * the original `PartialFunction` - this is fine because its type allows `Int` keys, but 122 | * requires copying parts of the original tree. 123 | * 5. Convert the new `PartialFunction` to an `Expr` with `asExprOf[...]`, and use a quoted block 124 | * to `compose` it with a function which computes the the hash key from the `String` input. 125 | * 126 | * There are several ways we could compute a hash function for the input, with a lot of room for 127 | * optimization. We will use a simple approach: 128 | * 129 | * 1. Take the `hashCode` of all the `String`s, and take the absolute value (so it's positive) 130 | * 2. Starting with the `n`, the number of keys we have, calculate each key's hashcode modulo `n` 131 | * 3. Keep adding `1` to `n` until every key maps to a different value. 132 | * 133 | * Note that this does not scale well for large numbers of keys! But we are optimizing 134 | * time at runtime over time at compile-time and space at runtime. 135 | */ 136 | inline def optimize[T](inline fn: PartialFunction[Any, T]): Function[String, T] = 137 | ${Optimizer.optimizeMacro('fn)} 138 | 139 | 140 | /** 141 | * Type Representations 142 | * 143 | * A `TypeRepr` is a representation of a type we can inspect as a datatype, much like `Term` is a 144 | * datatype we can inspect, representing terms. Here's the relationship between the four 145 | * types representing types and terms and the quotes and reflection APIs 146 | * 147 | * | quotes API | reflect API 148 | * ------------+------------+------------- 149 | * term space | Expr[T] | Term 150 | * type space | Type[T] | TypeRepr 151 | * 152 | * The meaning of type parameters (or their absence) is important, particularly for the distinction 153 | * between `Type[T]` and `TypeRepr`. 154 | * 155 | * - a `Type[T]` always corresponds to a type `T` that is in scope (but in the type namespace) 156 | * - a `Type[T]` object is not useful (and doesn't make sense) without `T` 157 | * - if we have a `Type[T]`, then we must have some `T` type we can use in type positions, even if 158 | * that type is abstract 159 | * - a pattern match which binds a new type, say `t`, creates a new scope with both the type `t` 160 | * AND a contextual `Type[t]` instance; the type is abstract, but can still be composed in 161 | * types, etc 162 | * - a `TypeRepr` has no type parameter, so there's no immediate way to use it in type position 163 | * - but it is "just" a data structure 164 | * - there is overview of the hierarchy of `TypeRepr`s in the Dotty source code: 165 | * `github.com:lampepfl/dotty/library/src/scala/quoted/Quotes.scala` 166 | * 167 | * To convert between them, use: 168 | * - `expr.asTerm` 169 | * - `term.asExpr` or `term.asExprOf[T]` 170 | * - `typeRepr.asType` 171 | * - `TypeRepr.of[T]` given a contextual `Type[T]` in scope 172 | */ 173 | 174 | object exercise5: 175 | /** 176 | * EXERCISE 5 177 | * 178 | * Let's define a macro called `typeReflect`. It will just take one type parameter. In the body 179 | * of the macro below, get the `TypeRepr` instance for `T` and print it. Then, in 180 | * `playground.scala`, call `typeReflect` a couple of times with a selection of interesting types 181 | * of your choice. Compare the output from `toString` and `show` on the `TypeRepr` instance. 182 | * 183 | * Take a look at the AST type node types in 184 | * `github.com:lampepfl/dotty/library/src/scala/quoted/Quotes.scala` 185 | * 186 | * and try to get the macro to display some of these different type nodes by calling `typeReflect` 187 | * with different types. 188 | * 189 | * Now use tab-completion in Metals to see the members of `TypeRepr`. Ignore the methods which 190 | * use `Symbol`s for now (we will experiment with these in the next exercise), and experiment with 191 | * some of the other methods. In particular, try the following: 192 | * 193 | * - check whether a type is a type of function with `isFunctionType` 194 | * - check for equality with the `TypeRepr` of a concrete type using `=:=` 195 | * - check a subtype relationship with another `TypeRepr` with `<:<` 196 | * - get a list of type parameters with `typeArgs` 197 | * - widening a singleton type with `widen` 198 | * - resolving a type alias with `de-alias` 199 | * 200 | * Now, make a change to the definitions of `typeReflect` and `typeReflectMacro`: an a type bound 201 | * of `<: AnyKind` to the type parameter, `T`. This will allow us to call `typeReflect` with 202 | * higher-kinded types, like `Either` or `[T] =>> Map[T, T]`. 203 | * 204 | * Now, we can try one more experiment: 205 | * - pass in a type constructor to `typeReflect` and in the macro apply a concrete type to it 206 | * 207 | */ 208 | inline def typeReflect[T] = ${typeReflectMacro[T]} 209 | 210 | def typeReflectMacro[T: Type](using Quotes): Expr[Unit] = 211 | import quotes.reflect.* 212 | '{()} 213 | 214 | /** 215 | * The types and terms we encounter in an AST are generally references to types and terms defined 216 | * elsewhere in the source or in dependent libraries. The entities themselves are described by 217 | * `Symbol`s, which can provide us with most of the information the compiler has about those 218 | * entities. 219 | */ 220 | 221 | object exercise6: 222 | /** 223 | * EXERCISE 6 (45 minutes) 224 | * 225 | * Let's explore symbols. Symbols contain a huge amount of information on a variety of different 226 | * entities that exist at compile-time, and the best way to understand them is to explore them. 227 | * Symbols are constructed by the compiler during typechecking. We can't create our own Symbols. 228 | * 229 | * First, let's create a macro called `explore` in which we can explore symbols. The implementation 230 | * below gets symbols for the parameter value and its type. Use tab-completion to find some 231 | * interesting members of `Symbol`, and `println` them in the macro body. 232 | * 233 | * Then, in `playground.scala`, invoke the `explore` macro a couple of times, passing in different 234 | * parameter values. Add more `println` statements in the macro for different properties, and add 235 | * more invocations of `explore` with different parameters, and compare and contrast! 236 | * 237 | * Note that many methods of `Symbol` return other `Symbol`s (or `List`s of `Symbol`s). This 238 | * allows us to navigate the relations between different entities. 239 | * 240 | * Here are a few interesting things to get started: 241 | * - `sym.declaredMethods` 242 | * - `sym.declaredMethod("apply")` 243 | * - `sym.fullName` 244 | * - `sym.flags` (check also the `Flags` object) 245 | * - `sym.companionClass.annotations` 246 | * - `sym.caseFields` 247 | */ 248 | 249 | inline def explore[T](value: T): Unit = ${exploreMacro[T]('value)} 250 | 251 | def exploreMacro[T: Type](value: Expr[T])(using Quotes): Expr[Unit] = 252 | import quotes.reflect.* 253 | val typeSymbol: Symbol = TypeRepr.of[T].typeSymbol 254 | val termSymbol: Symbol = value.asTerm.symbol 255 | println(typeSymbol.companionClass) 256 | '{()} 257 | 258 | object exercise7: 259 | /** 260 | * EXERCISE 7 (20 minutes) 261 | * 262 | * Let's write a simple macro which takes a single type parameter, and returns the companion 263 | * object for that type. Below is the stub for macro, `companion`. 264 | * 265 | * 1. Write the signature for `companionMacro` based on the signature of `companion`. 266 | * 2. Update `companion` to invoke `companionMacro`. 267 | * 3. Make sure we import everything from `quotes.reflect` 268 | * 4. Get the `TypeRepr` for the type parameter, `T`. 269 | * 5. Get the `typeSymbol` for the `TypeRepr[T]` 270 | * 6. Call `companionModule` on the `Symbol` 271 | * 7. Construct a `Ref` to the symbol 272 | * 8. Convert the `Ref` into an expression with `asExpr` 273 | */ 274 | 275 | transparent inline def companion[T]: Any = ??? 276 | 277 | 278 | object exercise8: 279 | /** 280 | * EXERCISE 8 (1 hour) 281 | * 282 | * Scala 3 introduces a new feature which provides us with record types. Scala will allow us to 283 | * define structural types with typed fields. Unlike `Dynamic`, Scala will only allow us to access 284 | * the fields defined in the type -- we just have to define a way to resolve a field name (as a 285 | * String) to a value for our particular type of record. 286 | * 287 | * Take a look at the definition of `Rec`. It inherits from `Selectable` which is a magic 288 | * trait! `selectDynamic` is the method that will be used to access fields for this record. So 289 | * we need to be sure that the `map` value contains all the fields we expect. 290 | * 291 | * 1. Create a new structural subtype of `Rec`, called `Person`, with fields `name` (a String) 292 | * and `age` (an Int) 293 | * 2. Create a new instance of the `Rec` case class containing a `Map` with keys `"name"` and 294 | * `"age"`, mapped to appropriate values. 295 | * 3. Use `asInstanceOf` to cast the `Rec` instance to `Person`. 296 | * 4. Try to access `name` and `age` on the instance of `Person`. 297 | * 298 | * Now, see if you can break it. What happens if you try to access a field called `birthday`? 299 | * What happens if the map does not contain one of the fields? What if the map contains the field 300 | * but it has the wrong runtime type? 301 | * 302 | * Understand where record types bring new safety (compared to a `Map`, say) and where they don't. 303 | */ 304 | 305 | case class Rec(map: Map[String, Any]) extends Selectable: 306 | def selectDynamic(name: String): Any = map(name) 307 | 308 | /** 309 | * EXERCISE 8b 310 | * 311 | * Remember that a `transparent` method can return a value of a more precise type than its 312 | * declared return type? We can use this to construct new record types at compile-time which 313 | * get their structure from runtime values -- such as a schema. 314 | * 315 | * We are going to use a `Map[String, String]` to represent a schema in the form 316 | * ``` 317 | * name: String 318 | * age: Int 319 | * ``` 320 | * 321 | * Below is an implementation of a method to read such a schema. Write a macro which takes a 322 | * filename String as a parameter and returns a `Map[String, String]` containing the schema, 323 | * using the `readSchema` method already provided. 324 | * 325 | * This macro will allow us to write a literal filename in source code, and have the compiler 326 | * turn it into a `Map[String, String]`, all at compile-time. (As long as it can read the file!) 327 | */ 328 | 329 | def readSchema(schema: List[String], map: Map[String, String] = Map()): Map[String, String] = 330 | schema match 331 | case s"$key: $typ" :: tail => readSchema(tail, map.updated(key, typ)) 332 | case _ :: tail => readSchema(tail, map) 333 | case Nil => map 334 | 335 | val schema = readSchema(scala.io.Source.fromFile("schema.txt").getLines.to(List)) 336 | 337 | inline def readSchemaFile(filename: String): Map[String, String] = ??? 338 | 339 | /** 340 | * EXERCISE 8c 341 | * 342 | * We can define a `Schema` class as a factory for creating new record types. Update the macro 343 | * from Exercise 6b to construct a new Schema instance. But what should its type parameter be? 344 | * It should be the subtype of `Rec` representing the schema, but for now, just leave it as `Rec`, 345 | * though, of course, this means that we are not yet using any of the information that we have 346 | * just read from our schema -- that will be the next step! 347 | */ 348 | 349 | abstract class Schema[RecType <: Rec](schema: Map[String, String]): 350 | def make(values: Map[String, Any]): RecType = Rec(values).asInstanceOf[RecType] 351 | 352 | inline def makeSchema(filename: String): Schema[Rec] = ??? 353 | 354 | /** 355 | * EXERCISE 8d 356 | * 357 | * In order to specify the `RecType` when we construct our `Schema`, we need to programmatically 358 | * construct the AST of the refined type. 359 | * 360 | * To do this, update the `makeSchema` macro to include a tail recursive function which iterates 361 | * over each key in the `schema` map, progressively adding refinements to the previous type. The 362 | * base case will be the `TypeRepr` of `Rec`. Here's the signature of the recursive method to get 363 | * you started: 364 | * 365 | * def mkType(keyTypes: List[(String, String)], tpe: TypeRepr): TypeRepr 366 | * 367 | * Don't forget that our schema represents return types as strings, like `"Int"`. We can support 368 | * a fixed list of a few concrete types, and we can always summon their `TypeRepr` if they're 369 | * non-abstract types, even without a `Type[?]` instance. 370 | * 371 | * The key building block of a refined type is `Refinement` which takes three parameters: 372 | * - the TypeRepr we are refining 373 | * - a string of the new member name 374 | * - the TypeRepr of that member's return type 375 | * 376 | * calling `mkType(schema, TypeRepr.of[Rec])` should give us a `TypeRepr` for the new refined 377 | * type. 378 | * 379 | * Finally, don't forget to make `makeSchema` transparent! 380 | * 381 | */ 382 | 383 | /** 384 | * EXERCISE 8e 385 | * 386 | * If everything worked, you should be able to call `makeSchema` on a filename and get a new 387 | * `Schema[RecType]` instance that can construct new instances from a key/value map. 388 | * 389 | * Try modifying the schema file and recompiling. 390 | * 391 | * But it's still unsafe. Here are a couple of improvements to make: 392 | * 393 | * 1. If anything goes wrong with reading or parsing the schema file, produce a compile error. 394 | * 2. Change `def make` to return an `Option[RecType]`, handling the case where the `values` Map 395 | * does not contain all the keys. 396 | */ 397 | 398 | /** 399 | * EXERCISE 8f 400 | * 401 | * Try adding a new field to the Schema file, with a new type, such as `Char`. In order to support 402 | * `Char` instances, we would need to modify the pattern match in our macro to also match on the 403 | * string `"Char"`. 404 | * 405 | * But a user of our library would not always be able to make modifications to the library itself. 406 | * So, it's not as extensible as we would like. Let's use contextual values (implicits) as a 407 | * replacement for the hardcoded pattern match in the macro. We will start by creating a 408 | * contextual type, `SchemaType`, which maps strings to the types they represent. 409 | * ``` 410 | * trait SchemaType[S <: String & Singleton, T] 411 | * given SchemaType["String", String] 412 | * given SchemaType["Int", Int] 413 | * ``` 414 | * 415 | * We will change the macro to search for a `SchemaType` instance whose first parameter is the 416 | * type string from the schema file, and will interpret that to mean the type in the second 417 | * parameter. 418 | * 419 | * We can't use `summon[SchemaType[str, ?]]` in the macro because we don't have the string 420 | * available as a singleton type when the macro is expanding. We only have it as a runtime value. 421 | * So we must use `Expr.summon` instead, on a type which we will construct. 422 | * 423 | * We want to write `Expr.summon[SchemaType[str, ?]]`, but we still need `str` as a type in scope! 424 | * One typical way to introduce a `str` type into scope is to pattern match on a `Type[?]`, and 425 | * we can get a `Type[?]` from a `TypeRepr` using `.asType`, and we can construct a new `TypeRepr` 426 | * representing a singleton literal `String` type from just a runtime string. 427 | * 428 | * So, in the macro, 429 | * 1. create a new `ConstantType` of the type name (which is a `TypeRepr` 430 | * 2. convert it to a `Type[?]` 431 | * 3. pattern match on it using `'[...]` 432 | * 4. use `Expr.summon` to do an implicit search for a `SchemaType` with the appropriate 433 | * parameters. Note that if the compiler says that it can't prove that type A conforms to 434 | * type B, but we know that it does, then we can write `A & B` in place of `A`. If it really 435 | * is true that `A <: B`, then `A & B` and `A` are _identical_ types anyway. 436 | * 5. pattern match on the result of `Expr.summon`, and handle the failure case with an error 437 | * 6. pattern match the successful result of Expr.summon with, 438 | * ``` 439 | * case Some('{ ${_}: schemaType }) => Type.of[schemaType] match 440 | * case '[SchemaType[?, fieldType]] => 441 | * ``` 442 | * 7. get the `TypeRepr` of the type `fieldType` and use it in the `Refinement`, as before. 443 | * 444 | * You can now experiment with adding new types in the schema, the data map, and new `SchemaType` 445 | * instances to make everything compile. 446 | */ 447 | 448 | object exercise9: 449 | /** 450 | * EXERCISE 9 (1 hour) 451 | * 452 | * We often want to work with quantities which have units - metres, joules, or metres per second 453 | * per second, for example, and perform arithmetic operations on them. 454 | * 455 | * When we multiply or divide two numbers, the units should be computed from the operands, but 456 | * when we add or subtract two numbers, we want to be certain that their units are the same. 457 | * 458 | * This can be achieved with a macro! But first we need to decide how to encode combinations of 459 | * units in a type. 460 | * 461 | * We need: 462 | * - arbitrary combinations of units, 463 | * - associated with a nonzero number (its power), 464 | * - whose order is unimportant 465 | * 466 | * Let's start by defining some types to represent units: 467 | * trait UnitType 468 | * trait Metre[N <: Int] extends UnitType 469 | * trait Second[N <: Int] extends UnitType 470 | * trait Kilogram[N <: Int] extends UnitType 471 | * 472 | * We can represent `m^2` as the type `Metre[2]` using a singleton type parameter, or `s^-1` as 473 | * `Second[-1]`. 474 | * 475 | * These can then be combined as intersection types, for example, `kg · m^-2 · s^2` would be, 476 | * `Kilogram[1] & Metre[-2] & Second[2]`. Note that the order of the intersection isn't important. 477 | * 478 | * We can then represent quantities with instances of the class, 479 | * `case class Quantity[U <: UnitType](value: Double)` 480 | * 481 | * Note that `Quantity`'s type parameter is invariant. 482 | * 483 | * We can define arithmetic operations on `Quantity`. Addition and subtraction are easy, because 484 | * the type system can already enforce the constraint that the two operands have the same type, 485 | * and hence the same units: 486 | * ``` 487 | * def +(right: Quantity[U]): Quantity[U] = Quantity[U](value + right.value) 488 | * def -(right: Quantity[U]): Quantity[U] = Quantity[U](value - right.value) 489 | * ``` 490 | * 491 | * The implementations of `*` and `/` are where it gets interesting! 492 | * 493 | * We will need to use a macros, and since the resultant type may be neither of the input types, 494 | * we know that it will need to be a `transparent` macro which computes its own type. 495 | * ``` 496 | * def *[V <: UnitType](right: Quantity[V]): Quantity[UnitType] = ??? 497 | * def /[V <: UnitType](right: Quantity[V]): Quantity[UnitType] = ??? 498 | * ``` 499 | * 500 | * Let's focus on `*` to begin with. Here's a stub: 501 | * ``` 502 | * transparent inline def *[V <: UnitType](right: Quantity[V]): Any = ??? 503 | * ``` 504 | * 505 | * Leave the return type as `Any`. Why not `Quantity[UnitType]`? 506 | * 507 | * To implement this, let's take the following steps: 508 | * 509 | * 1. Pass both `U` and `V` to the macro, with `Type` contextual values 510 | * 2. Get the `TypeRepr`s for these types deconstruct each recursively into a `Map[TypeRef, Int]` 511 | * We will need to use the pattern extractors, `AppliedType`, `TypeRef`, `ConstantType`, 512 | * `IntConstant` and `AndType`. 513 | * 3. Combine the two maps by adding together the values for matching `TypeRef` keys, and 514 | * removing any keys with value `0` after addition. 515 | * 4. Construct a new `TypeRepr` recursively from the keys and values in the `Map`, and convert 516 | * this to a `Type`, `q`. 517 | * 5. Use quotes to instantiate a new `Expr[Quantity[q]]`, with the product of the double values 518 | * from the two operands. N.B. The compiler doesn't know that `q` conforms to `UnitType`, but 519 | * we know that `q & UnitType` does, and it's safe to write this type. 520 | * 8. Handle the case where all the units have the power `0` -- and just return a raw `Double`! 521 | * 7. The implementation of `/` is very similar -- try to implement it without copy/pasting the 522 | * entire macro! 523 | * 8. Now, create extension methods, `*` and `/` on `Double` which take a `Quantity` instance and 524 | * Return a new `Quantity`. (These do not need to be macros.) 525 | * 526 | * Now try out the macro in `playground.scala`. Try the following: 527 | * 528 | * - define unit values, `metres`, `kilograms` and `second` as explicit `Quantity`s with value `1` 529 | * - create some values by multiplying combinations of `Double`s and `metres`, `kilograms` and 530 | * `seconds` 531 | * - let type inference work out the types; you can check them in Metals or by forcing a type mismatch 532 | * - define a method `circleArea` which takes a `radius` parameter 533 | * - define a new `UnitType` trait, `Yard` and a `yards` value 534 | * - create a new `Quantity` called `yardsPerMetre` with the value `1.09361` and appropriate units 535 | * - calculate the radius of a circle with radius 300 metres, and then convert the result into 536 | * square yards 537 | * - implement some of Newton's equations of motion for uniform acceleration as methods; are they 538 | * sound? (`https://en.wikipedia.org/wiki/Equations_of_motion`) 539 | * 540 | * Currently, we create a new `Quantity` object for every value, including intermediate 541 | * expressions. Can we do better with opaque types? Try it! 542 | * 543 | * If opaque types work, check the bytecode that gets generated for some of these methods. 544 | */ 545 | val _ = () -------------------------------------------------------------------------------- /src/main/scala/02-quotes.scala: -------------------------------------------------------------------------------- 1 | package quotesAndSplices 2 | /** 3 | * QUOTES AND SPLICES 4 | * 5 | * Scala 3 iterates on 2.x macros, introducing a new and improved macro system, which is based on 6 | * quotation and splicing. Quotes and splicing are a type-safe, feature-rich way to do code 7 | * generation at compile-time. 8 | */ 9 | 10 | import scala.quoted.* 11 | 12 | object exercise1: 13 | /** 14 | * EXERCISE 1 15 | * 16 | * This is a simple example of a (boring) Scala 3 macro. The syntax may look unfamiliar! 17 | * 18 | * Macro methods come in pairs: a macro invocation method and a macro implementation method. In 19 | * this example, `sayHello()` is the macro invocation, and `sayHelloMacro` is the macro 20 | * implementation. The macro invocation method will always reference the macro implementation 21 | * method. They don't have to be in the same file. 22 | * 23 | * We will overlook macro method parameters for now. There are several features of both methods to 24 | * be aware of: 25 | * 26 | * Macro Invocation methods: 27 | * 1. The invocation is always `inline` 28 | * 2. The entire RHS of the invocation is always a `${}` splice (but may be more complex) 29 | * 3. The invocation is a normal inline method -- as "normal" as other inline methods! 30 | * 31 | * Macro Implementation methods: 32 | * 4. The implementation will take a contextual `Quotes` parameter 33 | * 5. The return type of the implementation will be an `Expr` of the invocation's return type 34 | * 6. The implementation method is a normal (not even inline) method -- you can call it if you 35 | * have a `Quotes` instance. 36 | * 37 | * The only new syntax is the usage of `'{...}` and `${...}`, called "quotes" and "splices" 38 | * respectively, which is going to look like magic until we become familiar with it. 39 | * 40 | * Some let's get more familiar and make it seem a bit less magic. *Pretend* that we are writing 41 | * a text-based source generator: 42 | * 43 | * First Wrap remove the `inline` modifier from `sayHello` and wrap the entire `sayHello` method 44 | * definition in `s""` quotes. It should look just like we are substituting `sayHelloMacro` into 45 | * the string. 46 | * 47 | * Now change the return type of `sayHelloMacro` to `String`, delete the `Quotes` parameter and 48 | * replace the `'{` opening quote and `}` closing quote with string triple-quotes. It should look 49 | * like a method which returns a string containing some code. 50 | * 51 | * Conceptually, this is how Scala 3's quotes and splices work. But we will see that Scala 3 52 | * provides much more safety than Strings ever could! 53 | */ 54 | 55 | def sayHelloMacro(using Quotes): Expr[Unit] = '{println("Hello world")} 56 | inline def sayHello(): Unit = ${sayHelloMacro} 57 | 58 | object exercise2: 59 | /** 60 | * EXERCISE 2 61 | * 62 | * Now let's do the same thing in reverse. We will start with a naive string-based implementation 63 | * of a macro, and turn it into a real macro which writes code to pick a random Double. 64 | * 65 | * The transformed code should include all of the following: 66 | * 67 | * 1. `Quotes` 68 | * 2. `Expr[Double]` 69 | * 3. `${...}` 70 | * 4. `'{...}` 71 | * 5. `inline` 72 | */ 73 | 74 | def randomMacro: String = "math.random" 75 | s"def random: Double = ${randomMacro}" 76 | 77 | 78 | object exercise3: 79 | /** 80 | * EXERCISE 3 81 | * 82 | * Implement the main method which calls `exercise2.random` and `exercise1.sayHello()`. Does it 83 | * work? 84 | * 85 | * Apart from the restriction described in the compile error, Scala is happy to have macros 86 | * defined and expanded in the same run of the compiler -- we no longer need to split them into 87 | * separate compilation steps! 88 | * 89 | * Take a reasonable step to get the main method to compile in a single compilation run. 90 | */ 91 | def main(args: Array[String]): Unit = () 92 | 93 | object exercise4: 94 | /** 95 | * EXERCISE 4 96 | * 97 | * Take the untransformed example from Exercise 2, and change the macro implementation slightly: 98 | * We will now calculate `math.random` and then put the result into a string. Consider how the 99 | * contents of the string differs from the version in Exercise 2, and *when* `math.random` will 100 | * be invoked. 101 | * 102 | * Now uncomment `randomMacro` and `random`, and check the compile error. 103 | * 104 | * In the first version of `randomMacro`, the macro produced the code `math.random`. In the second 105 | * version, the macro *executes* `math.random` and tries to produce code containing the `Double` 106 | * value generated at compile-time. 107 | * 108 | * But that Double value exists only as 64 bits of data in memory in the JVM at *compiletime*, 109 | * whereas we need to substitute *code representing that value* into the quotes. Specifically, the 110 | * compiler needs an `Expr[Double]`. Thankfully that's easy to create by wrapping the `Double` 111 | * with `Expr(...)`. Apply this, and try again. 112 | * 113 | * The compiler should compile the code and give some useful advice on how it can be simplified. 114 | * Follow this advice, and simplify the macro implementation. 115 | */ 116 | def fakeRandomMacro: String = 117 | val rnd = math.random 118 | "$rnd" 119 | 120 | // def randomMacro(using Quotes): Expr[Double] = 121 | // val rnd: Double = math.random 122 | // '{$rnd} 123 | 124 | // inline def random: Double = ${randomMacro}" 125 | 126 | object exercise5: 127 | /** 128 | * EXERCISE 5 129 | * 130 | * Macros can also take parameters. These require changes to the macro implementation and the 131 | * macro invocation methods. We will change `random` to take an integer parameter, and it should 132 | * return a random value between 1 and the `bound` value (inclusive). 133 | * 134 | * Take a look at the updated `random` and `randomMacro` methods. See how `'bound` is passed into 135 | * `${randomMacro(...)}` quoted. (N.B. `'bound` is the same as `'{bound}`. Note the type of the 136 | * `bound` parameter in each method: `Int` and `Expr[Int]`. `'bound` is a quoted reference -- 137 | * think of it like a string containing the code which points to the parameter we need to refer 138 | * to: `randomMacro` can use that code, as an `Expr[Int]` (and not an `Int` itself) when it 139 | * produces its result. For now, the implementation always returns `0`. 140 | * 141 | * Experiment with passing `bound` instead of `'bound` to `randomMacro`, and changing the type 142 | * of `bound` in `randomMacro` from `Expr[Int]` to `Int`. Understand why we can't just change 143 | * both to make the types easier, and read the compiler's message about the restrictions. 144 | * 145 | * Understand how the Phase Consistency Principle applies to `bound`. 146 | * 147 | * Experiment with adding more parameters, such as a literal value or a String. 148 | */ 149 | 150 | inline def random(bound: Int): Int = ${randomMacro('bound)} 151 | 152 | def randomMacro(bound: Expr[Int])(using Quotes): Expr[Int] = '{0} 153 | 154 | object exercise6: 155 | /** 156 | * EXERCISE 6 157 | * 158 | * Uncomment the new implementation of `randomMacro`, and check the compile error, which is a 159 | * normal type error. If `'{...}` (quoting) can take us from an `Int` to an `Expr[Int]`, work 160 | * out (by intuition!) how we can go from an `Expr[Int]` to an `Int` so that we can use it in 161 | * the quoted macro implementation. 162 | * 163 | * Understand how each other part of the expression can be used in the implementation: `math`, 164 | * `.random`, `*`, `.toInt`, `+` and `1`. Try substituting other expressions into quoted block. 165 | * 166 | * Finally, try adding a `println("Hello world!")` to the body of `randomMacro`. What difference 167 | * does it make if you put it inside the quoted block, or before the quoted block? 168 | */ 169 | 170 | inline def random(bound: Int): Int = ${randomMacro('bound)} 171 | 172 | def randomMacro(bound: Expr[Int])(using Quotes): Expr[Int] = 173 | ??? // '{(math.random*bound).toInt + 1} 174 | 175 | /** In general, code that is not inside any `'{...}` or `${...}` blocks will be compiled at the time 176 | * we call "compile-time" and will be run at the time we call "runtime". No big surprise! If we 177 | * call compile-time T, then the runtime will be T+1. 178 | * 179 | * Code which appears inside `${...}` was compiled at T-1 and is run at T. 180 | * 181 | * Code which appears inside `'{...}` has not yet been compiled, but can be compiled at T+1 and run 182 | * at T+2. 183 | * 184 | * Here, "compilation" means the final production of runnable bytecode. One of the most elegant 185 | * features of Scala 3 Macros is code inside quotes will still be fully type-checked inside quotes 186 | * in whichever phase it is defined in. 187 | * 188 | * 189 | * Quotes and splices were described as "magic" when they were introduced. But if we think about 190 | * them in the right way, the "magic" is quite limited: code can only be quoted in the presence of 191 | * an instance of `Quotes`, and a splice block automatically provides a contextual (implicit) 192 | * `Quotes` instance. This means that quoting is possible inside a splice, or inside a macro 193 | * implementation method which has a `using Quotes` parameter. Such a method can be invoked inside 194 | * a splice. An inline method may be implemented with just such a splice, called a top-level 195 | * splice. Top-level splices remain the most magic aspect of quotes and splices. 196 | */ 197 | 198 | object exercise7: 199 | /** 200 | * EXERCISE 7 201 | * Look at the implementation of `describe` below. The macro does not produce any interesting 202 | * code, but it does print the expression passed into it. (Note that this is an `Expr[Int]`, not 203 | * an actual `Int`.) 204 | * 205 | * In `playground.scala`, write a method which invokes `exercise7.describe`, passing in an integer. 206 | * What is printed when it is compiled? 207 | * 208 | * The `toString` method of `Expr`s treats them as somewhat opaque, but the `show` method will 209 | * construct a string representing the expression. Change `describeMacro` to print `expr.show` 210 | * instead. 211 | * 212 | * But it's not very helpful: it prints the name of a synthetic reference. This is because, at the 213 | * call-site, that is all `expr` is -- an integer value to be accessed by through a reference. 214 | * 215 | * Try the following steps, and at each stage, try to understand how it changes the output at 216 | * compile-time. 217 | * 1. Make `describe`'s parameter, `expr` `inline`. 218 | * 2. Change the parameter passed to `exercise7.describe` from an integer literal to 219 | * `Int.MaxValue`. 220 | * 3. Change the parameter passed to `exercise7.describe` to `3*4 + 30`. 221 | * 4. Change the parameter to `(math.random*10).toInt`. 222 | */ 223 | inline def describe(expr: Int): Unit = ${describeMacro('expr)} 224 | def describeMacro(expr: Expr[Int])(using Quotes): Expr[Unit] = 225 | println(expr) 226 | '{()} 227 | 228 | object exercise8: 229 | /** 230 | * EXERCISE 8 231 | * 232 | * This exercise is a recap of pattern matching in Scala. Remember that the case of the first 233 | * letter may be significant and backticks may be required. 234 | * 235 | * Rewrite the patterns in the partial function, without changing their RHSs, so that it prints 236 | * the names of the values from the list in the order they appear. Use all the named values in 237 | * the patterns. 238 | */ 239 | object Three 240 | object four 241 | 242 | object Five: 243 | def unapply(n: Int): Boolean = n == 5 244 | 245 | object six: 246 | def unapply(n: Int): Boolean = n == 6 247 | 248 | object Seven: 249 | def unapply(n: Int): Option[Seven.type] = if n == 7 then Some(Seven) else None 250 | 251 | val Eight: Int = 8 252 | 253 | List[Any](1, 2, Three, four, 5, 6, 7, Eight).foreach: 254 | case 1 => println("1") 255 | case 2 => println("2") 256 | case 3 => println("Three") 257 | case 4 => println("four") 258 | case 5 => println("Five") 259 | case 6 => println("six") 260 | case 7 => println("7") 261 | case 8 => println("Eight") 262 | 263 | 264 | 265 | object exercise9: 266 | /** 267 | * EXERCISE 9 268 | * 269 | * Study the implementation of `describeMacro`. Call it in `playground.scala`, and check the 270 | * `Didn't match` message appears. 271 | * 272 | * Uncomment the commented-out `case`, and fix the compile error by adding type ascriptions and 273 | * parentheses. 274 | * 275 | * Spend a while trying to understand the match pattern. It's best to think of it the code you 276 | * would write (with some extra type ascriptions) if you already had `a` and `b` and wanted to 277 | * produce `expr`. 278 | * 279 | * Modify the call to `describe` (in `playground.scala`) to try to get it to print `Sum of integers`. 280 | * Don't forget that constant folding happens before the macro captures the expression, so you 281 | * will need to write an expression that retains its `+`! 282 | * 283 | * Try including `a.show` and `b.show` in the `println` output, and see how they vary with 284 | * different input values. 285 | * 286 | * Try adding additional cases which match different expressions. 287 | */ 288 | inline def describe(inline expr: Int): Unit = ${describeMacro('expr)} 289 | def describeMacro(expr: Expr[Int])(using Quotes): Expr[Unit] = 290 | expr match 291 | //case '{ $a + $b } => println("Sum of integers") 292 | case _ => println("Didn't match: "+expr.show) 293 | 294 | '{()} 295 | 296 | object exercise10: 297 | /** 298 | * EXERCISE 10 299 | * 300 | * `optimizeMacro` now matches on a more complex expression: the difference of two products. If 301 | * `a == b` and `c == d` then this would be the difference of two squares and could be optimized 302 | * into `(a + c)*(a - c)`. 303 | * 304 | * Equality for expressions is more complex than a simple `a == b` check, since two expressions 305 | * lifted from different parts of the code will always be different, even if they _look_ the same. 306 | * We can use the `matches` method in place of `==` to do a more reliable comparison. 307 | * 308 | * Implement the optimized RHS for the matched case using `a` and `c`, and provide a default 309 | * return value if the case does not match. 310 | */ 311 | inline def optimize(inline expr: Int): Int = ${optimizeMacro('expr)} 312 | 313 | def optimizeMacro(expr: Expr[Int])(using Quotes): Expr[Int] = 314 | expr match 315 | case '{ ($a: Int)*($b: Int) - ($c: Int)*($d: Int) } => ??? 316 | case _ => ??? 317 | 318 | object exercise11: 319 | /** 320 | * EXERCISE 11 321 | * 322 | * We commonly need to match on lambdas. These can take several forms, and can introduce their 323 | * own definitions, so this requires some syntactic gymnastics! 324 | * 325 | * The `lengthFrom` macro pattern matches against a lambda of `String => String` and constructs 326 | * a lambda of `String => Int` which returns the length (`Int`) of the `String` after applying 327 | * the original lambda, but without calculating the intermediate string's value. 328 | * 329 | * Check the implementation of the pattern which matches calls to `replace` (which does not change 330 | * the string's length). Write a correct RHS for the second case. Add a third case which matches 331 | * the two-parameter variant of `substring`. 332 | * 333 | * Try calling the macro in `playground.scala`. Does it make any difference whether you call, 334 | * `_.substring(7)` or `str => str.substring(7)` or `str substring 7`? 335 | * 336 | * Bonus: Can you match against the lambda, `_.take(n)` or `_.drop(m)`? 337 | * 338 | * Take a look at github:lampepfl/dotty/tests/pos-macros/quotedPatterns.scala for some more 339 | * examples of more complex pattern matches against lambdas. 340 | */ 341 | inline def calcLength(inline fn: String => String): String => Int = ${calcLengthMacro('fn)} 342 | 343 | def calcLengthMacro(expr: Expr[String => String])(using Quotes): Expr[String => Int] = 344 | expr match 345 | case '{ (str: String) => str.replace($_ : Char, $_ : Char): String } => 346 | '{ (str: String) => str.length } 347 | case '{ (str: String) => str.substring($n): String } => 348 | '{ (str: String) => ??? } 349 | 350 | /** 351 | * An important point to understand about pattern matching (in general) is that the pattern (between 352 | * `case` and `=>`) introduces new identifiers which become available on the RHS, which is a new 353 | * scope. Most commonly this applies to terms (i.e. values), but we will see later that it applies 354 | * equally to types. 355 | */ 356 | 357 | object exercise12: 358 | /** 359 | * EXERCISE 12 360 | * 361 | * All the macros we have seen so far have concrete types, but it's common to need to have 362 | * macros with type parameters. Here is a very simple example, which needs some changes: 363 | * 364 | * Uncomment the `println` and fix the compiler error by adding a new `using` parameter, or 365 | * context bound. What output do you get (at compile-time) when calling the macro in 366 | * `playground.scala`? 367 | * 368 | * Remember the invocation of `typeNameMacro` is just calling a normal method, and its type 369 | * parameter will be inferred to _something_ if there is nothing else to constrain it! 370 | * 371 | * Finally, remove the `println` and change the macro implementation to quote the type name as its 372 | * return value (instead of `""`). 373 | */ 374 | inline def typeName[T]: String = ${typeNameMacro} 375 | 376 | def typeNameMacro[T](using Quotes): Expr[String] = 377 | import quotes.* 378 | //println(Type.show[T]) 379 | '{""} 380 | 381 | /** 382 | * Macro implementations need `Type[T]` instances for every type parameter if its behavior is to 383 | * depend on the type in any way. Instances of `Type[T]` are primarily used as scrutinees for 384 | * pattern matching. `Type`s are often contextual values, and `Type.of[T]` is shorthand for 385 | * `summon[Type[T]]`. 386 | */ 387 | 388 | object exercise13: 389 | /** 390 | * EXERCISE 13 391 | * 392 | * In the same way we can match on `Expr`s with `case '{...}` patterns, we can match on `Type`s 393 | * with `case '[...]` patterns. 394 | * 395 | * Here's a less general version of the `typeName` macro. It matches strings and integers, but 396 | * fails for any other type. Check how that failure presents itself to a developer using this 397 | * macro. 398 | * 399 | * Uncomment the RHS of the final case and see what happens. It might not be what you would 400 | * expect! Remember: `typeNameMacro` is a macro, but the body of `typeNameMacro` is not an 401 | * `inline` context, so `compiletime.error` is *guaranteed* to produce a compile error every time. 402 | * 403 | * Use `quotes.reflect.report.errorAndAbort` to produce an error instead. 404 | */ 405 | 406 | inline def typeName[T]: String = ${typeNameMacro} 407 | 408 | def typeNameMacro[T: Type](using Quotes): Expr[String] = 409 | import quotes.* 410 | Type.of[T] match 411 | case '[ Int ] => '{"integer"} 412 | case '[ String ] => '{"string"} 413 | case _ => ??? // compiletime.error("Not supported") 414 | 415 | 416 | object exercise14: 417 | /** 418 | * EXERCISE 14 419 | * 420 | * We can also match on more complex types. Remember the rules for initial capitalization of 421 | * identifiers in pattern matches, and that they apply to types too. Read the first case pattern 422 | * below: 423 | * 424 | * `case '[ Option[t] ] =>` 425 | * 426 | * This pattern performs a few duties: 427 | * 1. it matches if the type is a `scala.Option` of _something_ 428 | * 2. it binds the Option type's type parameter to `t`, so that `t` is a *type* on the RHS 429 | * 3. it makes a contextual instance of `Type[t]` available on the RHS 430 | * 431 | * These are all subtle points, and should be understood very clearly! (1) makes it possible to 432 | * deconstruct types in a pattern. (2) makes it possible to refer to the deconstructed parts of 433 | * the type on the RHS. And (3) provides us with an on-heap object which represents that type. 434 | * 435 | * Try deconstructing some more types in the pattern match. 436 | */ 437 | inline def typeName[T]: String = ${typeNameMacro} 438 | 439 | def typeNameMacro[T: Type](using Quotes): Expr[String] = 440 | import quotes.* 441 | Type.of[T] match 442 | case '[ Option[t] ] => Expr("an option"+Type.show[t]) 443 | case '[ List[t] ] => Expr("a list of "+Type.show[t]) 444 | case '[ t ] => Expr(Type.show[t]) 445 | 446 | /** 447 | * Unlike terms, `Type`s are not subject to the phase consistency rules. We do not have to use 448 | * quotes and splices when referring to types, since they are "universal". 449 | * 450 | * But a macro can generate code which will not be compiled until a later phase, and may need to 451 | * refer to types which do not exist yet. How can this possibly work? 452 | * 453 | * Pattern matching on types is a principal means of doing so. Different branches in the macro's 454 | * runtime code correspond to different ASTs being generated, and each must be consistent with the 455 | * assumptions which are true for its branch. 456 | * 457 | * Often we need to work with a type, such as `t`, which we know nothing about, except that it is 458 | * called `t` and it is the same type as other types called `t` in the same scope -- this is the 459 | * the same as in the body of a method parameterized on a type, say, `T`. 460 | */ 461 | 462 | object exercise15: 463 | /** 464 | * EXERCISE 15 465 | * 466 | * The macro `sortSortables` is designed to take a collection, either a `List` or `Set`, and 467 | * if it's a `List` (whose elements are ordered), it will sort the elements. If it's a `Set`, then 468 | * there's no need. We need behavior that's dependent on the collection type, including a 469 | * contextual `Ordering` instance for the element type, if (and only if) we have a `List`. 470 | * 471 | * Look at the implementation of `sortSortablesMacro`. Note how it pattern matches on the `xs` 472 | * value, binding `t` to the (unknown) type parameter of `List` or `Set`, but at the same time, 473 | * binding `xs` to a value which is an instance of `List[t]` or `Set[t]`, and *shadowing* the 474 | * previous `xs` (which is typed as `T`). The relationship between `xs` and the type `t` is 475 | * critical for typechecking the RHS. 476 | * 477 | * Test the macro by calling it in `playground.scala` with different parameters: 478 | * - a `List[Int]` 479 | * - a `List[Exception]` 480 | * - a `Set[Int]` 481 | * - a `Set[Exception]` 482 | * 483 | * Next, compare the cases for `List` and `Set`. `Set` simply returns the same value, unaltered. 484 | * But the `List` case uses the `Expr.summon` method to produce code which provides an 485 | * `Ordering[t]` when the macro expands: an `Expr[Ordering[t]]`. Look at the quoted block at the 486 | * end, and check that everything type-checks, and as phase-consistent. 487 | * 488 | * To make "typechecking in your head" easier, you can do the following: 489 | * 1. Pretend every `Expr[T]` is just a `T` 490 | * 2. Pretend every `${}` splice is just the spliced value (without the dollar/braces) 491 | * 3. Pretend every `'{}` quote is just the quoted value (without the quote/braces) 492 | * 493 | * For the code to compile, we had to add an extra `.asExprOf[T]` to the quoted block. Can you 494 | * work out why? 495 | * 496 | */ 497 | inline def sortSortables[T](xs: T): T = ${sortSortablesMacro[T]('xs)} 498 | 499 | def sortSortablesMacro[T: Type](xs: Expr[T])(using Quotes): Expr[T] = 500 | import quotes.*, reflect.* 501 | xs match 502 | case '{ $xs: List[t] } => 503 | val ord: Expr[Ordering[t]] = Expr.summon[Ordering[t]].getOrElse: 504 | report.errorAndAbort("Can't sort "+Type.show[t]) 505 | 506 | '{ $xs.sorted(using $ord) }.asExprOf[T] 507 | 508 | case '{ $xs: Set[t] } => 509 | xs 510 | 511 | case _ => 512 | report.errorAndAbort("This type is not supported") 513 | 514 | import polynomials.{ParseError, Polynomial} 515 | 516 | object exercise16a: 517 | /** 518 | * EXERCISE 16a 519 | * 520 | * Start a REPL (`sbt console`) and import `polynomials.*`. This provides a simple parser for 521 | * polynomials in `x` over rational coefficients, with some reasonable limitations for simplicity. 522 | * 523 | * Try parsing the following strings with `Polynomial(str)` 524 | * - `x + 1` 525 | * - `x^3 + 2x^2 - 1` 526 | * - `1/4x^4 + 1/3x^3 + 1/2x^2 + x` 527 | * 528 | * These should produce `Polynomial` data structures. Now try applying some numeric values to them, 529 | * for example: 530 | * - `Polynomial("2x + 1")(3.0)` 531 | * 532 | * This all works adequately, but what happens if we were to try to parse `Polynomial("t + 1")` 533 | * or `Polynomial("x/2 + 1")` (which are both meaningful, but are not supported by the parser. We 534 | * would unfortunately find out only at *runtime*. 535 | * 536 | * Let's try to write a method which checks polynomials at compile-time! The stub implementation 537 | * below constructs the `Polynomial` without checking if it's valid. 538 | * 539 | * Use the `.value` method on the `Expr[String]` parameter to convert it from an `Expr` representing 540 | * a `String` to an actual `String` object on the heap. The `value` method returns a 541 | * `Some[String]` if this is possible. Consider under what circumstances this is possible or not. 542 | * 543 | * Provide an appropriate error message if it's not possible to get the `String` value at 544 | * compile-time. 545 | * 546 | * Now we have the string, check whether a polynomial can be constructed successfully in a try/catch 547 | * block. If not, then produce a compile error. (Note that we will end up parsing the polynomial 548 | * twice: once at compiletime, and again at runtime.) 549 | */ 550 | import polynomials.* 551 | 552 | inline def polynomial(value: String): Polynomial = ${polynomialMacro('value)} 553 | 554 | def polynomialMacro(expr: Expr[String])(using Quotes): Expr[Polynomial] = 555 | import quotes.*, reflect.* 556 | '{Polynomial($expr)} 557 | 558 | object exercise16b: 559 | /** 560 | * EXERCISE 16b 561 | * 562 | * Using the previous implementation of `polynomialMacro` (remember, it's an ordinary method), 563 | * let's arrange for it to be called in a different way, as an interpolated string (with no 564 | * interpolations, like so: 565 | * 566 | * `poly"4x^3 + 2x^2 - x - 5"` 567 | * 568 | * Before typechecking, we need to know that this call is transformed into, 569 | * 570 | * `StringContext("4x^3 + 2x^2 - x - 5").poly()` 571 | * 572 | * So we need to implement `poly()` as an extension method macro on `StringContext`, and then we 573 | * can just call the original `polynomialMacro` method. 574 | * 575 | * Experiment with tweaking the implementation below. Try calling it in `playground.scala`. Try 576 | * removing the `inline` from before `ctx` or `def poly`. Try to understand how the pattern 577 | * (which includes the `Varargs` extractor and the `*` for repeated arguments) works. 578 | * 579 | * Look up the definition of the `Exprs` extractor in 580 | * `github:lampepfl/dotty/library/src/scala/quoted/Exprs.scala`, and see if it could be useful 581 | * inside the `Varargs` extractor. 582 | */ 583 | 584 | import exercise16a.polynomialMacro 585 | import polynomials.* 586 | 587 | extension (inline ctx: StringContext) inline def poly(): Polynomial = ${polyMacro('ctx)} 588 | 589 | def polyMacro(ctx: Expr[StringContext])(using Quotes): Expr[Polynomial] = ctx match 590 | case '{ StringContext(${Varargs(strs)}*) } => strs match 591 | case Seq(str) => polynomialMacro(str) 592 | 593 | object exercise17: 594 | /** 595 | * EXERCISE 17 596 | * 597 | * It's easy to construct `Expr`s of primitive types and even some collection types, but it 598 | * doesn't work in general. Below is a full, more concise implementation of the polynomial 599 | * StringContext macro from the last exercise, and on the line beginning `try`, we wrap a 600 | * newly-constructed `Polynomial` value in `Expr`. 601 | * 602 | * This only works because there is a contextual `ToExpr[Polynomial]` in scope. Comment it out, 603 | * and see what error we get. But the given instance of `ToExpr[Polynomial]` does not have a 604 | * valid implementation. Write the implementation, and then call the macro in `playground.scala` 605 | * to test it. 606 | */ 607 | import polynomials.* 608 | 609 | extension (inline ctx: StringContext) inline def poly(): Polynomial = ${polyMacro('ctx)} 610 | 611 | def polyMacro(ctx: Expr[StringContext])(using Quotes): Expr[Polynomial] = ctx match 612 | case '{ StringContext(${Varargs(Seq(str))}*) } => 613 | try Expr(Polynomial(str.valueOrAbort)) 614 | catch case err: ParseError => quotes.reflect.report.errorAndAbort("Bad polynomial") 615 | 616 | given ToExpr[Polynomial] with 617 | def apply(poly: Polynomial)(using Quotes): Expr[Polynomial] = ??? 618 | --------------------------------------------------------------------------------