├── project ├── build.properties └── plugins.sbt ├── src ├── main │ └── scala │ │ └── com │ │ └── phenan │ │ └── scalalr │ │ ├── internal │ │ ├── EoI.scala │ │ ├── StartNode.scala │ │ ├── Literal.scala │ │ ├── TokenList.scala │ │ ├── Transitions.scala │ │ └── SeqTail.scala │ │ ├── macroimpl │ │ ├── MacroModule.scala │ │ ├── CommonNamesModule.scala │ │ ├── AnnotationFinderModule.scala │ │ ├── MacroUtilitiesModule.scala │ │ ├── SyntaxInfoModule.scala │ │ ├── TreeGeneratorModule.scala │ │ ├── TyperModule.scala │ │ ├── SyntaxGeneratorModule.scala │ │ └── SyntaxInfoCollectorModule.scala │ │ ├── sep.scala │ │ ├── syntax.scala │ │ ├── Main.scala │ │ ├── cli │ │ ├── CLISyntaxRuleModule.scala │ │ ├── CLIOptionParserModule.scala │ │ ├── ASTDataTypeWriterModule.scala │ │ ├── CLIApplication.scala │ │ ├── SyntaxFileParserModule.scala │ │ └── ScalaCodeGeneratorModule.scala │ │ ├── dsl.scala │ │ ├── package.scala │ │ └── shared │ │ ├── LALRAutomatonModule.scala │ │ ├── SyntaxRuleModule.scala │ │ └── CodeGeneratorModule.scala └── test │ └── scala │ └── com │ └── phenan │ └── scalalr │ ├── MathDSL.scala │ ├── MathDSLTest.scala │ ├── SyntaxTest.scala │ └── LALRAutomatonTest.scala ├── font ├── Scalig-Bold.ttf ├── Scalig-Bold.glyphs ├── Scalig-Regular.ttf ├── Scalig-Regular.glyphs ├── README.md └── LICENSE ├── .gitattributes ├── .gitignore ├── test ├── Math.syntax ├── out │ └── com │ │ └── phenan │ │ └── arith │ │ ├── ASTs.scala │ │ └── Main.scala └── GeneratedCodeSample.scala ├── LICENSE └── README.md /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version = 0.13.13 -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | logLevel := Level.Warn 2 | 3 | addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.2") 4 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/internal/EoI.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.internal 2 | 3 | case object EoI 4 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/internal/StartNode.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.internal 2 | 3 | case object StartNode 4 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/internal/Literal.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.internal 2 | 3 | case class Literal [T] (value: T) 4 | -------------------------------------------------------------------------------- /font/Scalig-Bold.ttf: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:9faa3d172f832b83774abf6499e1e2d926782471dccdef7cf2551c951f678fef 3 | size 107528 4 | -------------------------------------------------------------------------------- /font/Scalig-Bold.glyphs: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c7482135204a7eb064d502f5ccfab5c9d0f974e2bd498aad63b9c62e34ea408b 3 | size 503366 4 | -------------------------------------------------------------------------------- /font/Scalig-Regular.ttf: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:3262d2694defc5fc31e8b1df178cc3de7f4c780d8553f7579b5b4bc0d59b0195 3 | size 93344 4 | -------------------------------------------------------------------------------- /font/Scalig-Regular.glyphs: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:2c69c843fa55717e1374199921234972e4fa82041e3ffda3194eff29e5884623 3 | size 456210 4 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/MacroModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.macroimpl 2 | 3 | trait MacroModule { 4 | val c: scala.reflect.macros.whitebox.Context 5 | } 6 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/sep.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | import scala.annotation.StaticAnnotation 4 | 5 | class sep (val grammar: String*) extends StaticAnnotation 6 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/syntax.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | import scala.annotation.StaticAnnotation 4 | 5 | class syntax (val grammar: String*) extends StaticAnnotation 6 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/Main.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | import cli._ 4 | 5 | object Main { 6 | def main (args: Array[String]): Unit = CLIApplication.applicationMain(args) 7 | } 8 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | font/Scalig-Regular.ttf filter=lfs diff=lfs merge=lfs -text 2 | font/Scalig-Bold.ttf filter=lfs diff=lfs merge=lfs -text 3 | font/Scalig-Regular.glyphs filter=lfs diff=lfs merge=lfs -text 4 | font/Scalig-Bold.glyphs filter=lfs diff=lfs merge=lfs -text 5 | -------------------------------------------------------------------------------- /font/README.md: -------------------------------------------------------------------------------- 1 | # Scalig: Font for Scalalr 2 | 3 | Scalig is a modified font of Inconsolata. 4 | Scalig supports several ligatures. 5 | They enables programmers to use several symbols and keywords as an identifier in Scala. 6 | Scalig is released under SIL Open Font Licence. 7 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/internal/TokenList.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.internal 2 | 3 | sealed trait TokenList 4 | case class TokenListCons [+A, +B <: TokenList] (head: A, tail: B) extends TokenList 5 | sealed trait TokenListSentinel extends TokenList 6 | case object TokenListSentinel extends TokenListSentinel 7 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/internal/Transitions.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.internal 2 | 3 | case class Shift [T, N1, N2] (shift: (N1, T) => N2) 4 | case class Reduce [T, N1, N2] (reduce: N1 => N2) 5 | case class Accept [NX, R] (accept: NX => R) 6 | case class Transition [T, N1, N2] (transit: (N1, T) => N2) 7 | case class Transitions [L <: TokenList, N1, N2] (transit: (N1, L) => N2) 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Scala template 3 | *.class 4 | *.log 5 | 6 | # sbt specific 7 | .cache 8 | .history 9 | .lib/ 10 | dist/* 11 | target/ 12 | lib_managed/ 13 | src_managed/ 14 | project/boot/ 15 | project/plugins/project/ 16 | 17 | # Scala-IDE specific 18 | .scala_dependencies 19 | .worksheet 20 | 21 | # ENSIME specific 22 | .ensime_cache/ 23 | .ensime 24 | 25 | .idea/ 26 | -------------------------------------------------------------------------------- /test/Math.syntax: -------------------------------------------------------------------------------- 1 | 2 | syntax com.phenan.arith.MathDSL (program) { 3 | program = stmts | stmt ; 4 | stmts = stmt program ; 5 | stmt = expr ; 6 | expr = add | sub | term ; 7 | add = expr "+" term ; 8 | sub = expr "-" term ; 9 | term = mul | div | factor ; 10 | mul = term "*" factor ; 11 | div = term "/" factor ; 12 | factor = paren | num ; 13 | paren = "(" expr ")" ; 14 | num = (Int) ; 15 | } 16 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/internal/SeqTail.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.internal 2 | 3 | sealed trait SeqTail [+T] { 4 | def toSeq: Seq[T] 5 | } 6 | 7 | object SeqTail { 8 | def empty[T]: SeqTail [T] = NilSeqTail 9 | } 10 | 11 | case class ConsSeqTail [+T] (head: T, tail: SeqTail[T]) extends SeqTail[T] { 12 | override def toSeq: Seq[T] = head +: tail.toSeq 13 | } 14 | 15 | case object NilSeqTail extends SeqTail[Nothing] { 16 | override def toSeq: Seq[Nothing] = Nil 17 | } 18 | -------------------------------------------------------------------------------- /test/out/com/phenan/arith/ASTs.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.arith 2 | 3 | sealed trait Expr extends Stmt 4 | case class Mul (arg0: Term, arg1: Factor) extends Term 5 | sealed trait Term extends Expr 6 | case class Stmts (arg0: Stmt, arg1: Program) extends Program 7 | case class Div (arg0: Term, arg1: Factor) extends Term 8 | case class Num (arg0: Int) extends Factor 9 | sealed trait Factor extends Term 10 | sealed trait Stmt extends Program 11 | case class Paren (arg0: Expr) extends Factor 12 | sealed trait Program 13 | case class Add (arg0: Expr, arg1: Term) extends Expr 14 | case class Sub (arg0: Expr, arg1: Term) extends Expr 15 | -------------------------------------------------------------------------------- /src/test/scala/com/phenan/scalalr/MathDSL.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | @dsl[MathDSL.Expr] 4 | object MathDSL { 5 | 6 | sealed trait Expr 7 | 8 | @syntax(s"$n + $m") 9 | case class Add (n: Expr, m: Term) extends Expr 10 | 11 | @syntax(s"$n - $m") 12 | case class Sub (n: Expr, m: Term) extends Expr 13 | 14 | sealed trait Term extends Expr 15 | 16 | @syntax(s"$n * $m") 17 | case class Mul (n: Term, m: Factor) extends Term 18 | 19 | @syntax(s"$n / $m") 20 | case class Div (n: Term, m: Factor) extends Term 21 | 22 | sealed trait Factor extends Term 23 | 24 | @syntax(s"$n") 25 | case class Num (n: Int) extends Factor 26 | 27 | @syntax(s"( $e )") 28 | case class Paren (e: Expr) extends Factor 29 | 30 | @syntax(s"{ $ns }") 31 | case class Ints (ns: Int@sep(",")*) extends Expr 32 | } 33 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/cli/CLISyntaxRuleModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | package cli 3 | 4 | import shared._ 5 | 6 | trait CLISyntaxRuleModule { 7 | this: SyntaxRuleModule => 8 | 9 | case class NonTerminalImpl (name: String) 10 | case class LiteralTokenImpl (identifier: Option[String], litType: String) 11 | 12 | sealed trait SemanticActionImpl 13 | case object Branch extends SemanticActionImpl 14 | case object Derivation extends SemanticActionImpl 15 | 16 | override type NonTerminal = NonTerminalImpl 17 | override type LiteralToken = LiteralTokenImpl 18 | override type SemanticAction = SemanticActionImpl 19 | 20 | def nonTerminalSymbol (name: String): Symbol = Symbol(NonTerminalImpl(name)) 21 | def keywordSymbol (name: String): Symbol = Symbol(Terminal(Keyword(name))) 22 | def literalTokenSymbol (identifier: String, litType: String): Symbol = Symbol(Terminal(LiteralTokenImpl(Some(identifier), litType))) 23 | } 24 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/cli/CLIOptionParserModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.cli 2 | 3 | import java.io.File 4 | 5 | import scopt._ 6 | 7 | trait CLIOptionParserModule { 8 | case class Config 9 | (directory : Option[File] = None, 10 | printFlag : Boolean = false, 11 | syntaxFile : File = null) 12 | 13 | val optionParser: OptionParser[Config] = new OptionParser[Config] ("scalalr") { 14 | head("ScaLALR", com.phenan.scalalr.scaLALRVersion) 15 | 16 | opt[File] ('d', "directory").action((f, c) => c.copy(directory = Some(f))) 17 | .text("target file name to write the generated code out") 18 | 19 | opt[Unit] ('p', "print").action((_, c) => c.copy(printFlag = true)) 20 | .text("print the generated code") 21 | 22 | help("help") 23 | .text("print this usage text") 24 | 25 | arg[File]("").action((f, c) => c.copy(syntaxFile = f)) 26 | .text("input syntax file") 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Kazuhiro Ichikawa 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/test/scala/com/phenan/scalalr/MathDSLTest.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | import org.scalatest._ 4 | 5 | import scala.language.postfixOps 6 | 7 | class MathDSLTest extends FunSuite with DiagrammedAssertions { 8 | import MathDSL._ 9 | 10 | test ("test simple expression") { 11 | val x: Expr = (1) $$plus (2) 12 | 13 | assert (x == Add(Num(1), Num(2))) 14 | } 15 | 16 | test ("test complex expression") { 17 | val x: Expr = $$parenleft $$parenleft (1) $$plus (2) $$slash (3) $$parenright $$asterisk (4) $$parenright 18 | 19 | assert (x == Paren(Mul(Paren(Add(Num(1), Div(Num(2), Num(3)))), Num(4)))) 20 | } 21 | 22 | test ("test AST interpolation") { 23 | val x: Expr = $$parenleft $$parenleft (1) $$hyphen (Mul(Num(2), Num(2))) $$slash (3) $$parenright $$asterisk (4) $$parenright 24 | 25 | assert (x == Paren(Mul(Paren(Sub(Num(1), Div(Mul(Num(2), Num(2)), Num(3)))), Num(4)))) 26 | } 27 | 28 | test ("test ints") { 29 | val x : Expr = $$braceleft $$braceright 30 | 31 | assert (x == Ints()) 32 | 33 | val y : Expr = $$braceleft (1) $$braceright 34 | 35 | assert (y == Ints(1)) 36 | 37 | val z : Expr = $$braceleft (1)$$comma (2) $$braceright 38 | 39 | assert (z == Ints(1, 2)) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/CommonNamesModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.macroimpl 2 | 3 | trait CommonNamesModule { 4 | this: MacroModule => 5 | 6 | import c.universe._ 7 | 8 | def getNilListOf (componentTypeTree: Tree): Tree = q"scala.collection.immutable.List.empty[$componentTypeTree]" 9 | def makeSingleElementList (componentTypeTree: Tree, arg: Tree): Tree = q"scala.collection.immutable.List[$componentTypeTree]($arg)" 10 | def makeSingleElementSeqTail (componentTypeTree: Tree, arg: Tree): Tree = q"com.phenan.scalalr.internal.ConsSeqTail[$componentTypeTree]($arg, com.phenan.scalalr.internal.NilSeqTail)" 11 | 12 | def consSeq (left: Tree, right: Tree): Tree = q"$left +: $right" 13 | def consSeqTail (left: Tree, right: Tree): Tree = q"com.phenan.scalalr.internal.ConsSeqTail($left, $right)" 14 | 15 | def seqTailToSeq (s: Tree): Tree = q"$s.toSeq" 16 | 17 | def seqTypeTreeOf (componentTypeTree: Tree): Tree = tq"scala.collection.Seq[$componentTypeTree]" 18 | def seqTailTypeTreeOf (componentTypeTree: Tree): Tree = tq"com.phenan.scalalr.internal.SeqTail[$componentTypeTree]" 19 | 20 | lazy val syntaxAnnotationType: Type = c.typecheck(tq"com.phenan.scalalr.syntax", c.TYPEmode).tpe 21 | lazy val sepAnnotationType: Type = c.typecheck(tq"com.phenan.scalalr.sep", c.TYPEmode).tpe 22 | } 23 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/dsl.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | import shared._ 4 | import macroimpl._ 5 | 6 | import scala.annotation.StaticAnnotation 7 | import scala.language.experimental.macros 8 | 9 | class dsl [T] extends StaticAnnotation { 10 | def macroTransform (annottees: Any*): Any = macro dslBundle.impl 11 | } 12 | 13 | class dslBundle (val c: scala.reflect.macros.whitebox.Context) 14 | extends TreeGeneratorModule with CodeGeneratorModule with LALRAutomatonModule 15 | with SyntaxGeneratorModule with TyperModule with SyntaxInfoCollectorModule with SyntaxInfoModule 16 | with SyntaxRuleModule with AnnotationFinderModule with CommonNamesModule with MacroUtilitiesModule with MacroModule 17 | { 18 | import c.universe._ 19 | 20 | /** 21 | * マクロのエントリーポイント 22 | * @param annottees マクロ変換対象となる構文木 23 | * @return マクロ変換後の構文木 24 | */ 25 | def impl (annottees: Tree*): Tree = { 26 | q""" 27 | ${translate(annottees.head)} 28 | ..${annottees.tail} 29 | """ 30 | } 31 | 32 | /** 33 | * object のボディを変換する 34 | * @param tree マクロ変換対象となる構文木 35 | * @return マクロ変換後の構文木 36 | */ 37 | private def translate (tree: Tree): Tree = { 38 | val (t, s) = processSyntaxAnnotations(tree) 39 | doTypeCheck(t) 40 | val syntax = generateSyntax(s) 41 | val generated = CodeGenerator(LALRAutomaton(syntax)).generatedDefinitions 42 | MacroUtilities.addMembers(t, generated) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/cli/ASTDataTypeWriterModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | package cli 3 | 4 | import shared._ 5 | 6 | import java.io._ 7 | 8 | import shapeless._ 9 | 10 | trait ASTDataTypeWriterModule { 11 | this: CodeGeneratorModule with CLISyntaxRuleModule with SyntaxRuleModule => 12 | 13 | import output._ 14 | 15 | def writeASTDataType (qualifiedName: List[String], syntax: Syntax, writer: PrintWriter): Unit = { 16 | if (qualifiedName.init.nonEmpty) { 17 | writer.println(s"package ${qualifiedName.init.mkString(".")}") 18 | } 19 | writer.println(generateProgram(astDataTypeDefinitions(syntax))) 20 | writer.flush() 21 | } 22 | 23 | private def astDataTypeDefinitions (syntax: Syntax): List[MemberDef] = syntax.nonTerminals.toList.flatMap { nt => 24 | syntax.rules.find(_.left == nt).map { 25 | case Rule(_, _, Branch) => sealedTraitDef(nt.name, findSuperType(nt, syntax).map(nonTerminalType)) 26 | case Rule(_, right, Derivation) => caseClassDef(nt.name, Nil, collectDerivationDataParameters(right), findSuperType(nt, syntax).map(nonTerminalType)) 27 | } 28 | } 29 | 30 | private def findSuperType (nt: NonTerminal, syntax: Syntax): Option[NonTerminal] = syntax.rules.collectFirst { 31 | case Rule(left, right, Branch) if right == List(Symbol(nt)) => left 32 | } 33 | 34 | private def collectDerivationDataParameters (right: List[Symbol]): List[Parameter] = right.collect { 35 | case Inl(nt) => nonTerminalType(nt) 36 | case Inr(Inl(Inl(lit))) => literalType(lit) 37 | }.zipWithIndex.map { 38 | case (t, n) => parameter(s"arg$n", t) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/cli/CLIApplication.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | package cli 3 | 4 | import shared._ 5 | 6 | import java.io._ 7 | 8 | import scala.{Console => Stdio} 9 | 10 | object CLIApplication extends CLIOptionParserModule with SyntaxFileParserModule 11 | with CLISyntaxRuleModule with SyntaxRuleModule with LALRAutomatonModule 12 | with ScalaCodeGeneratorModule with CodeGeneratorModule with ASTDataTypeWriterModule 13 | { 14 | def applicationMain (args: Array[String]): Unit = optionParser.parse(args, Config()) match { 15 | case Some(config) if config.syntaxFile != null => run(config) 16 | case _ => optionParser.showUsage() 17 | } 18 | 19 | private def run (config: Config): Unit = { 20 | SyntaxParsers.runParser(config.syntaxFile) match { 21 | case Right(SyntaxDefinition(qualifiedName, syntax)) => 22 | if (config.printFlag) printCode(qualifiedName, syntax) 23 | else writeCode(qualifiedName, syntax, config.directory) 24 | case Left(msg) => 25 | Stdio.err.println(s"invalid syntax file : ${config.syntaxFile}\n $msg") 26 | } 27 | } 28 | 29 | private def printCode (qualifiedName: List[String], syntax: Syntax): Unit = { 30 | val writer = new PrintWriter(Stdio.out) 31 | writeASTDataType(qualifiedName, syntax, writer) 32 | writeGeneratedDefinitions(qualifiedName, syntax, writer) 33 | } 34 | 35 | private def writeCode (qualifiedName: List[String], syntax: Syntax, directory: Option[File]): Unit = { 36 | val dir = directory.getOrElse(new File(".")) 37 | val dslFile = new File(dir, qualifiedName.mkString("/") + ".scala") 38 | val parent = dslFile.getParentFile 39 | val astFile = new File(parent, "ASTs.scala") 40 | parent.mkdirs() 41 | 42 | val writer1 = new PrintWriter(astFile) 43 | writeASTDataType(qualifiedName, syntax, writer1) 44 | writer1.close() 45 | 46 | val writer2 = new PrintWriter(dslFile) 47 | writeGeneratedDefinitions(qualifiedName, syntax, writer2) 48 | writer2.close() 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/AnnotationFinderModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.macroimpl 2 | 3 | trait AnnotationFinderModule { 4 | this: CommonNamesModule with MacroModule => 5 | 6 | import c.universe._ 7 | 8 | def findSyntaxAnnotation (modifiers: Modifiers): List[List[Tree]] = modifiers.annotations.collect { 9 | case AnnotationTree(ann, args) if c.typecheck(ann, c.TYPEmode).tpe =:= syntaxAnnotationType => args 10 | } 11 | 12 | def removeSyntaxAnnotation (modifiers: Modifiers): Modifiers = { 13 | Modifiers(modifiers.flags, modifiers.privateWithin, modifiers.annotations.filterNot { 14 | case AnnotationTree(ann, _) if c.typecheck(ann, c.TYPEmode).tpe =:= syntaxAnnotationType => true 15 | case _ => false 16 | }) 17 | } 18 | 19 | object AnnotationTree { 20 | /** 21 | * アノテーションを表現する構文木を分解して、アノテーションの型を表す構文木と引数部分の構文木に分ける Extractor 22 | * @param tree アノテーションを表現する構文木 23 | * @return Option[(アノテーションの型を表す構文木, 引数部分の構文木)] 24 | */ 25 | def unapply (tree: Tree): Option[(Tree, List[Tree])] = tree match { 26 | case Apply(Select(New(annType), termNames.CONSTRUCTOR), args) => Some((annType, args)) 27 | case _ => None 28 | } 29 | } 30 | 31 | object TypeWithSyntaxAnnotation { 32 | def unapply (tree: Tree): Option[(List[List[Tree]], Tree)] = unapplyHelper(tree, Nil) 33 | 34 | private def unapplyHelper (tree: Tree, anns: List[List[Tree]]): Option[(List[List[Tree]], Tree)] = tree match { 35 | case Annotated(AnnotationTree(ann, args), t) if c.typecheck(ann, c.TYPEmode).tpe =:= syntaxAnnotationType => unapplyHelper(t, anns :+ args) 36 | case t => Some(anns -> t) 37 | } 38 | } 39 | 40 | object TypeWithSepAnnotation { 41 | def unapply (tree: Tree): Option[(Option[String], Tree)] = tree match { 42 | case Annotated(AnnotationTree(ann, List(Literal(Constant(sep: String)))), t) if c.typecheck(ann, c.TYPEmode).tpe =:= sepAnnotationType => Some((Some(sep), t)) 43 | case t => Some((None, t)) 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/MacroUtilitiesModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.macroimpl 2 | 3 | trait MacroUtilitiesModule { 4 | this: MacroModule => 5 | 6 | import c.universe._ 7 | 8 | object MacroUtilities { 9 | 10 | /** 11 | * 使用されていない名前を取得する関数 12 | * @return prefix に ScaLALR$ のついた未使用の名前 13 | */ 14 | def freshName: String = c.freshName("ScaLALR$") 15 | 16 | /** 17 | * 指定されたモジュールまたはクラス定義にメンバを追加する 18 | * @param tree モジュールまたはクラス定義 19 | * @param members メンバ定義 20 | * @return メンバを追加したモジュールまたはクラス定義 21 | */ 22 | def addMembers (tree: Tree, members: List[Tree]): Tree = tree match { 23 | case ModuleDef (mod, name, Template(parents, self, body)) => 24 | ModuleDef (mod, name, Template(parents, self, body ++ members)) 25 | case ClassDef (mod, name, typeParams, Template(parents, self, body)) => 26 | ClassDef (mod, name, typeParams, Template(parents, self, body ++ members)) 27 | case other => 28 | c.abort(other.pos, s"cannot add members to $other") 29 | } 30 | 31 | /** 32 | * 全てのコンストラクタ定義を返す関数 33 | * @param body クラス定義のボディ部分 34 | * @return 全てのコンストラクタ定義 35 | */ 36 | def findConstructors (body: List[Tree]): List[DefDef] = body.collect { 37 | case c @ DefDef(_, termNames.CONSTRUCTOR, _, _, _, _) => c 38 | } 39 | 40 | /** 41 | * プライマリコンストラクタを探す関数 42 | * @param body クラス定義のボディ部分 43 | * @return Option[プライマリコンストラクタ] 44 | */ 45 | def findPrimaryConstructor (body: List[Tree]): Option[DefDef] = { 46 | val paramAccessors = body.collect { 47 | case ValDef(mods, name, _, _) if mods.hasFlag(Flag.PARAMACCESSOR) => name 48 | } 49 | findConstructors(body).find { 50 | case DefDef(_, termNames.CONSTRUCTOR, _, paramLists, _, _) => paramAccessors.forall { paramName => 51 | paramLists.exists { 52 | _.exists { p => p.mods.hasFlag(Flag.PARAMACCESSOR) && p.name == paramName } 53 | } 54 | } 55 | } 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/test/scala/com/phenan/scalalr/SyntaxTest.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | import cli._ 4 | 5 | import org.scalatest._ 6 | 7 | /** 8 | * Created by @phenan on 2016/12/12. 9 | */ 10 | class SyntaxTest extends FunSuite with Matchers { 11 | 12 | import CLIApplication._ 13 | 14 | test("expression") { 15 | val syntax = simpleSyntax 16 | 17 | syntax.expressions(NonTerminalImpl("S")) shouldBe List(derivation_S_T) 18 | syntax.expressions(NonTerminalImpl("T")) shouldBe List(branch_T_M, branch_T_N) 19 | syntax.expressions(NonTerminalImpl("M")) shouldBe List(derivation_M_mul) 20 | syntax.expressions(NonTerminalImpl("N")) shouldBe List(derivation_N_int) 21 | } 22 | 23 | test("first") { 24 | val syntax = simpleSyntax 25 | 26 | syntax.lookupFirst(List(nonTerminalSymbol("S"))) shouldBe Set(Terminal(LiteralTokenImpl(Some("int"), "Int"))) 27 | syntax.lookupFirst(List(nonTerminalSymbol("T"))) shouldBe Set(Terminal(LiteralTokenImpl(Some("int"), "Int"))) 28 | syntax.lookupFirst(List(nonTerminalSymbol("T"), keywordSymbol("mul"), nonTerminalSymbol("N"))) shouldBe Set(Terminal(LiteralTokenImpl(Some("int"), "Int"))) 29 | syntax.lookupFirst(List(nonTerminalSymbol("M"))) shouldBe Set(Terminal(LiteralTokenImpl(Some("int"), "Int"))) 30 | syntax.lookupFirst(List(keywordSymbol("mul"), nonTerminalSymbol("N"))) shouldBe Set(Terminal(Keyword("mul"))) 31 | } 32 | 33 | 34 | def simpleSyntax = Syntax(NonTerminalImpl("S"), List(derivation_S_T, branch_T_M, branch_T_N, derivation_M_mul, derivation_N_int)) 35 | 36 | private lazy val derivation_S_T: Rule = Rule(NonTerminalImpl("S"), List(nonTerminalSymbol("T")), Derivation) 37 | private lazy val branch_T_M: Rule = Rule(NonTerminalImpl("T"), List(nonTerminalSymbol("M")), Branch) 38 | private lazy val branch_T_N: Rule = Rule(NonTerminalImpl("T"), List(nonTerminalSymbol("N")), Branch) 39 | private lazy val derivation_M_mul: Rule = Rule(NonTerminalImpl("M"), List(nonTerminalSymbol("T"), keywordSymbol("mul"), nonTerminalSymbol("N")), Derivation) 40 | private lazy val derivation_N_int: Rule = Rule(NonTerminalImpl("N"), List(literalTokenSymbol("int", "Int")), Derivation) 41 | } 42 | 43 | -------------------------------------------------------------------------------- /src/test/scala/com/phenan/scalalr/LALRAutomatonTest.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | 3 | import cli._ 4 | 5 | import org.scalatest._ 6 | import shapeless._ 7 | 8 | /** 9 | * Created by @phenan on 2016/12/12. 10 | */ 11 | class LALRAutomatonTest extends FunSuite with Matchers { 12 | import CLIApplication._ 13 | 14 | test ("LALR automaton") { 15 | val automaton = LALRAutomaton(simpleSyntax) 16 | 17 | val node0 = LRClosure(Map( 18 | LRItem(derivation_S_T, List(nonTerminalSymbol("T"))) -> Set(Terminal.eoi), 19 | LRItem(branch_T_M, List(nonTerminalSymbol("M"))) -> Set(Terminal.eoi, Terminal(Keyword("mul"))), 20 | LRItem(branch_T_N, List(nonTerminalSymbol("N"))) -> Set(Terminal.eoi, Terminal(Keyword("mul"))), 21 | LRItem(derivation_M_mul, List(nonTerminalSymbol("T"), keywordSymbol("mul"), nonTerminalSymbol("N"))) -> Set(Terminal.eoi, Terminal(Keyword("mul"))), 22 | LRItem(derivation_N_int, List(literalTokenSymbol("int", "Int"))) -> Set(Terminal.eoi, Terminal(Keyword("mul"))) 23 | )) 24 | 25 | val node1 = LRClosure(Map( 26 | LRItem(derivation_S_T, Nil) -> Set(Terminal.eoi), 27 | LRItem(derivation_M_mul, List(keywordSymbol("mul"), nonTerminalSymbol("N"))) -> Set(Terminal.eoi, Terminal(Keyword("mul"))))) 28 | 29 | val node2 = LRClosure(Map( 30 | LRItem(derivation_M_mul, List(nonTerminalSymbol("N"))) -> Set(Terminal.eoi, Terminal(Keyword("mul"))), 31 | LRItem(derivation_N_int, List(literalTokenSymbol("int", "Int"))) -> Set(Terminal.eoi, Terminal(Keyword("mul"))))) 32 | 33 | 34 | automaton.start shouldBe node0 35 | automaton.edges(node0)(Inl(NonTerminalImpl("T"))) shouldBe node1 36 | automaton.edges(node1)(Inr(Inl(Terminal(Keyword("mul"))))) shouldBe node2 37 | } 38 | 39 | def simpleSyntax = Syntax(NonTerminalImpl("S"), List(derivation_S_T, branch_T_M, branch_T_N, derivation_M_mul, derivation_N_int)) 40 | 41 | private lazy val derivation_S_T: Rule = Rule(NonTerminalImpl("S"), List(nonTerminalSymbol("T")), Derivation) 42 | private lazy val branch_T_M: Rule = Rule(NonTerminalImpl("T"), List(nonTerminalSymbol("M")), Branch) 43 | private lazy val branch_T_N: Rule = Rule(NonTerminalImpl("T"), List(nonTerminalSymbol("N")), Branch) 44 | private lazy val derivation_M_mul: Rule = Rule(NonTerminalImpl("M"), List(nonTerminalSymbol("T"), keywordSymbol("mul"), nonTerminalSymbol("N")), Derivation) 45 | private lazy val derivation_N_int: Rule = Rule(NonTerminalImpl("N"), List(literalTokenSymbol("int", "Int")), Derivation) 46 | } 47 | 48 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/cli/SyntaxFileParserModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | package cli 3 | 4 | import shared._ 5 | 6 | import java.io._ 7 | 8 | import shapeless._ 9 | import shapeless.ops.coproduct.Inject 10 | 11 | import scala.util.parsing.combinator.JavaTokenParsers 12 | 13 | trait SyntaxFileParserModule { 14 | this: CLISyntaxRuleModule with SyntaxRuleModule => 15 | 16 | case class SyntaxDefinition (qualifiedName: List[String], syntax: Syntax) 17 | 18 | object SyntaxParsers extends JavaTokenParsers { 19 | 20 | def runParser (file: File): Either[String, SyntaxDefinition] = { 21 | val reader = new BufferedReader(new FileReader(file)) 22 | val parseResult = parseAll(syntax, reader) 23 | reader.close() 24 | parseResult match { 25 | case Success(r, _) => Right(r) 26 | case NoSuccess(m, _) => Left(m) 27 | } 28 | } 29 | 30 | def syntax: Parser[SyntaxDefinition] = "syntax" ~> rep1sep(ident, ".") ~ ("(" ~> nonTerminal <~ ")" ) ~ ("{" ~> rule.* <~ "}" ) ^^ { 31 | case name ~ start ~ rules => SyntaxDefinition(name, Syntax(start, rules.flatten)) 32 | } 33 | 34 | def rule: Parser[List[Rule]] = branch | derivation 35 | 36 | def branch: Parser[List[Rule]] = ( nonTerminal <~ "=" ) ~ rep1sep(nonTerminal, "|") <~ ";" ^^ { 37 | case left ~ right => right.map(nt => Rule(left, List(Symbol(nt)), Branch)) 38 | } 39 | 40 | def derivation: Parser[List[Rule]] = ( nonTerminal <~ "=" ) ~ choice[Symbol](terminal, nonTerminal).+ <~ ";" ^^ { 41 | case left ~ right => List(Rule(left, right, Derivation)) 42 | } 43 | 44 | def nonTerminal: Parser[NonTerminal] = ident ^^ { id => NonTerminalImpl(id.capitalize) } 45 | 46 | def terminal: Parser[Terminal] = choice[Terminal](literal, keyword) 47 | 48 | def literal: Parser[LiteralToken] = "(" ~> ((ident <~ ":").? ~ ident) <~ ")" ^^ { case id ~ t => LiteralTokenImpl(id, t) } 49 | 50 | def keyword: Parser[Keyword] = stringLiteral ^^ { lit => Keyword(lit.substring(1, lit.length - 1)) } 51 | 52 | private def choice[R <: Coproduct]: Choice[R] = new Choice[R] 53 | 54 | private class Choice [R <: Coproduct] { 55 | def apply [T1, T2] (p1: => Parser[T1], p2: => Parser[T2]) (implicit inj1: Inject[R, T1], inj2: Inject[R, T2]): Parser[R] = { 56 | p1.map(inj1(_)) | p2.map(inj2(_)) 57 | } 58 | def apply [T1, T2, T3] (p1: => Parser[T1], p2: => Parser[T2], p3: => Parser[T3]) (implicit inj1: Inject[R, T1], inj2: Inject[R, T2], inj3: Inject[R, T3]): Parser[R] = { 59 | p1.map(inj1(_)) | p2.map(inj2(_)) | p3.map(inj3(_)) 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/package.scala: -------------------------------------------------------------------------------- 1 | package com.phenan 2 | 3 | import com.phenan.scalalr.internal._ 4 | 5 | import scala.language.implicitConversions 6 | 7 | package object scalalr { 8 | 9 | implicit class LiteralOps [T] (t: T) { 10 | def apply [U] (value: TokenListCons[Literal[U], TokenListSentinel]): TokenListCons[Literal[U], TokenListCons[Literal[T], TokenListSentinel]] = TokenListCons(value.head, singleToken(Literal(t))) 11 | } 12 | 13 | implicit class TokenListOps [T <: TokenList] (t: T) { 14 | def apply [U] (value: TokenListCons[Literal[U], TokenListSentinel]): TokenListCons[Literal[U], T] = TokenListCons(value.head, t) 15 | } 16 | 17 | def singleToken [T] (t: T): TokenListCons[T, TokenListSentinel] = TokenListCons(t, TokenListSentinel) 18 | 19 | implicit def shift_transition [T, N1, N2] (implicit shift: Shift[T, N1, N2]): Transition[T, N1, N2] = Transition ({ (n: N1, t: T) => shift.shift(n, t) }) 20 | implicit def reduce_transition [T, N1, N2, N3] (implicit reduce: Reduce[T, N1, N2], transition: Transition[T, N2, N3]): Transition[T, N1, N3] = Transition { (state, terminal) => transition.transit(reduce.reduce(state), terminal) } 21 | implicit def accept_transition [NX, R] (implicit accept: Accept[NX, R]): Transition[EoI.type, NX, R] = Transition { (n, _) => accept.accept(n) } 22 | implicit def accept_ast [NX, R] (node: NX)(implicit transition: Transition[EoI.type, NX, R]): R = transition.transit(node, EoI) 23 | implicit def simple_transition [T, N1, N2] (implicit transition: Transition[T, N1, N2]): Transitions[TokenListCons[T, TokenListSentinel], N1, N2] = Transitions((n, h) => transition.transit(n, h.head)) 24 | implicit def composed_transitions [T, L <: TokenList, N1, N2, N3] (implicit transitions: Transitions[L, N1, N2], transition: Transition[T, N2, N3]): Transitions[TokenListCons[T, L], N1, N3] = Transitions((n, h) => transition.transit(transitions.transit(n, h.tail), h.head)) 25 | 26 | implicit def literal [T] (value: T): TokenListCons[Literal[T], TokenListSentinel] = singleToken(Literal(value)) 27 | 28 | implicit def acceptLiteral [T, N, R] (token: T) (implicit transition1: Transition[Literal[T], StartNode.type, N], transition2: Transition[EoI.type, N, R]): R = transition2.transit(transition1.transit(StartNode, Literal(token)), EoI) 29 | implicit def acceptTokenList [L <: TokenList, N, R] (tokens: L) (implicit transitions: Transitions[L, StartNode.type, N], transition: Transition[EoI.type, N, R]): R = transition.transit(transitions.transit(StartNode, tokens), EoI) 30 | 31 | implicit class LiteralTransition [N1, N2] (node: N1) { 32 | def literal [T](value: T)(implicit transition: Transition[Literal[T], N1, N2]): N2 = transition.transit(node, Literal[T](value)) 33 | } 34 | 35 | def $$semicolon : TokenListCons[EoI.type, TokenListSentinel] = singleToken(EoI) 36 | 37 | val scaLALRVersion: String = "2.3.3" 38 | } 39 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/SyntaxInfoModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.macroimpl 2 | 3 | trait SyntaxInfoModule { 4 | this: MacroModule => 5 | 6 | import c.universe._ 7 | 8 | case class SyntaxInfo (returnType: Tree, operators: List[List[String]], operandTypes: List[Tree], semantics: SemanticActionImpl) 9 | 10 | object SyntaxInfo { 11 | def epsilonOperator (returnType: Tree, returnValue: Tree): SyntaxInfo = { 12 | SyntaxInfo(returnType, Nil, Nil, SemanticActionImpl.returnConstant(returnValue)) 13 | } 14 | 15 | def unaryOperator (returnType: Tree, prefix: List[String], operandType: Tree, postfix: List[String], semantics: Tree => Tree): SyntaxInfo = { 16 | SyntaxInfo(returnType, List(prefix, postfix), List(operandType), SemanticActionImpl.unaryOperation(semantics)) 17 | } 18 | 19 | def binaryOperator (returnType: Tree, prefix: List[String], operandType1: Tree, infix: List[String], operandType2: Tree, postfix: List[String], semantics: (Tree, Tree) => Tree): SyntaxInfo = { 20 | SyntaxInfo(returnType, List(prefix, infix, postfix), List(operandType1, operandType2), SemanticActionImpl.binaryOperation(semantics)) 21 | } 22 | } 23 | 24 | class SemanticActionImpl (val run: List[Tree] => Tree) 25 | 26 | object SemanticActionImpl { 27 | 28 | lazy val returnArgument: SemanticActionImpl = SemanticActionImpl { args => 29 | if (args.lengthCompare(1) == 0) args.head 30 | else c.abort(c.enclosingPosition, s"wrong macro implementation: expected one argument, but takes ${args.mkString("(", ", ", ")")}") 31 | } 32 | 33 | def returnConstant (constant: Tree): SemanticActionImpl = SemanticActionImpl { args => 34 | if (args.isEmpty) constant 35 | else c.abort(c.enclosingPosition, s"wrong macro implementation: expected no argument, but takes ${args.mkString("(", ", ", ")")}") 36 | } 37 | 38 | def unaryOperation (operator: Tree => Tree): SemanticActionImpl = SemanticActionImpl { args => 39 | if (args.lengthCompare(1) == 0) operator(args.head) 40 | else c.abort(c.enclosingPosition, s"wrong macro implementation: expected one argument, but takes ${args.mkString("(", ", ", ")")}") 41 | } 42 | 43 | def binaryOperation (operator: (Tree, Tree) => Tree): SemanticActionImpl = SemanticActionImpl { args => 44 | if (args.lengthCompare(2) == 0) operator(args.head, args.tail.head) 45 | else c.abort(c.enclosingPosition, s"wrong macro implementation: expected two argument, but takes ${args.mkString("(", ", ", ")")}") 46 | } 47 | 48 | def constructorCall (typeName: Tree, parameterCorrespondence: List[Tree] => List[List[Tree]]): SemanticActionImpl = SemanticActionImpl { args => 49 | q"new $typeName(...${parameterCorrespondence(args)})" 50 | } 51 | 52 | def functionCall (functionRef: Tree, parameterCorrespondence: List[Tree] => List[List[Tree]]): SemanticActionImpl = SemanticActionImpl { args => 53 | q"$functionRef(...${parameterCorrespondence(args)})" 54 | } 55 | 56 | def apply (run: List[Tree] => Tree): SemanticActionImpl = new SemanticActionImpl(run) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /test/out/com/phenan/arith/Main.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.arith 2 | 3 | import com.phenan.scalalr._ 4 | 5 | import scala.language.postfixOps 6 | 7 | object Main { 8 | import MathDSL._ 9 | 10 | def main (args: Array[String]): Unit = { 11 | 12 | val program: Program = literal(10) $$plus literal(2) $$asterisk $$parenleft literal(10) $$slash literal(5) $$parenright 13 | 14 | println(program) 15 | 16 | val program2: Program = literal(10).$$plus.literal(2).$$asterisk.$$parenleft.literal(10).$$slash.literal(5).$$parenright 17 | 18 | println(program2) 19 | 20 | val program3: Program = literal(10) $$plus (2) $$asterisk $$parenleft literal(10) $$slash (5) $$parenright 21 | 22 | println(program3) 23 | 24 | val program4: Program = (10) $$plus (2) $$asterisk $$parenleft (10) $$slash (5) $$hyphen (8) $$slash (2) $$parenright 25 | 26 | println(program4) 27 | 28 | val program5: Program = (10) 29 | 30 | println(program5) 31 | 32 | val program6: Program = literal(10) 33 | 34 | println(program6) 35 | 36 | val longProgram: Program = 37 | 1 $$plus 2 $$plus 3 $$plus 4 $$plus 5 $$plus 6 $$plus 7 $$plus 8 $$plus 9 $$plus 10 $$plus 38 | 11 $$plus 12 $$plus 13 $$plus 14 $$plus 15 $$plus 16 $$plus 17 $$plus 18 $$plus 19 $$plus 20 $$plus 39 | 21 $$plus 22 $$plus 23 $$plus 24 $$plus 25 $$plus 26 $$plus 27 $$plus 28 $$plus 29 $$plus 30 $$plus 40 | 31 $$plus 32 $$plus 33 $$plus 34 $$plus 35 $$plus 36 $$plus 37 $$plus 38 $$plus 39 $$plus 40 $$plus 41 | 41 $$plus 42 $$plus 43 $$plus 44 $$plus 45 $$plus 46 $$plus 47 $$plus 48 $$plus 49 $$plus 50 $$plus 42 | 51 $$plus 52 $$plus 53 $$plus 54 $$plus 55 $$plus 56 $$plus 57 $$plus 58 $$plus 59 $$plus 60 $$plus 43 | 61 $$plus 62 $$plus 63 $$plus 64 $$plus 65 $$plus 66 $$plus 67 $$plus 68 $$plus 69 $$plus 70 $$plus 44 | 71 $$plus 72 $$plus 73 $$plus 74 $$plus 75 $$plus 76 $$plus 77 $$plus 78 $$plus 79 $$plus 80 $$plus 45 | 81 $$plus 82 $$plus 83 $$plus 84 $$plus 85 $$plus 86 $$plus 87 $$plus 88 $$plus 89 $$plus 90 $$plus 46 | 91 $$plus 92 $$plus 93 $$plus 94 $$plus 95 $$plus 96 $$plus 97 $$plus 98 $$plus 99 $$plus 100 $$plus 47 | 101 $$plus 102 $$plus 103 $$plus 104 $$plus 105 $$plus 106 $$plus 107 $$plus 108 $$plus 109 $$plus 110 $$plus 48 | 111 $$plus 112 $$plus 113 $$plus 114 $$plus 115 $$plus 116 $$plus 117 $$plus 118 $$plus 119 $$plus 120 $$plus 49 | 121 $$plus 122 $$plus 123 $$plus 124 $$plus 125 $$plus 126 $$plus 127 $$plus 128 $$plus 129 $$plus 130 $$plus 50 | 131 $$plus 132 $$plus 133 $$plus 134 $$plus 135 $$plus 136 $$plus 137 $$plus 138 $$plus 139 $$plus 140 $$plus 51 | 141 $$plus 142 $$plus 143 $$plus 144 $$plus 145 $$plus 146 $$plus 147 $$plus 148 $$plus 149 $$plus 150 $$plus 52 | 151 $$plus 152 $$plus 153 $$plus 154 $$plus 155 $$plus 156 $$plus 157 $$plus 158 $$plus 159 $$plus 160 $$plus 53 | 161 $$plus 162 $$plus 163 $$plus 164 $$plus 165 $$plus 166 $$plus 167 $$plus 168 $$plus 169 $$plus 170 $$plus 54 | 171 $$plus 172 $$plus 173 $$plus 174 $$plus 175 $$plus 176 $$plus 177 $$plus 178 $$plus 179 $$plus 180 $$plus 55 | 181 $$plus 182 $$plus 183 $$plus 184 $$plus 185 $$plus 186 $$plus 187 $$plus 188 $$plus 189 $$plus 190 $$plus 56 | 191 $$plus 192 $$plus 193 $$plus 194 $$plus 195 $$plus 196 $$plus 197 $$plus 198 $$plus 199 $$plus 200 $$plus 57 | 201 $$plus 202 $$plus 203 $$plus 204 $$plus 205 $$plus 206 $$plus 207 $$plus 208 $$plus 209 $$plus 210 $$plus 58 | 211 $$plus 212 $$plus 213 $$plus 214 $$plus 215 $$plus 216 $$plus 217 $$plus 218 $$plus 219 $$plus 220 $$plus 59 | 221 $$plus 222 $$plus 223 $$plus 224 $$plus 225 $$plus 226 $$plus 227 $$plus 228 $$plus 229 $$plus 230 $$plus 60 | 231 $$plus 232 $$plus 233 $$plus 234 $$plus 235 $$plus 236 $$plus 237 $$plus 238 $$plus 239 $$plus 240 $$plus 61 | 241 $$plus 242 $$plus 243 $$plus 244 $$plus 245 $$plus 246 $$plus 247 $$plus 248 $$plus 249 $$plus 250 $$plus 62 | 251 $$plus 252 $$plus 253 $$plus 254 $$plus 255 $$plus 256 $$plus 257 $$plus 258 $$plus 259 $$plus 260 $$plus 63 | 261 $$plus 262 $$plus 263 $$plus 264 $$plus 265 $$plus 266 $$plus 267 $$plus 268 $$plus 269 $$plus 270 $$plus 64 | 271 $$plus 272 $$plus 273 $$plus 274 $$plus 275 $$plus 276 $$plus 277 $$plus 278 $$plus 279 $$plus 280 $$plus 65 | 281 $$plus 282 $$plus 283 $$plus 284 $$plus 285 $$plus 286 $$plus 287 $$plus 288 $$plus 289 $$plus 290 $$plus 66 | 291 $$plus 292 $$plus 293 $$plus 294 $$plus 295 $$plus 296 $$plus 297 $$plus 298 $$plus 299 $$plus 300 67 | 68 | println(longProgram) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /font/LICENSE: -------------------------------------------------------------------------------- 1 | Original work Copyright 2006 The Inconsolata Project Authors 2 | Modified work Copyright 2018 Kazuhiro Ichikawa 3 | 4 | This Font Software is licensed under the SIL Open Font License, Version 1.1. 5 | This license is copied below, and is also available with a FAQ at: 6 | http://scripts.sil.org/OFL 7 | 8 | 9 | ----------------------------------------------------------- 10 | SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 11 | ----------------------------------------------------------- 12 | 13 | PREAMBLE 14 | The goals of the Open Font License (OFL) are to stimulate worldwide 15 | development of collaborative font projects, to support the font creation 16 | efforts of academic and linguistic communities, and to provide a free and 17 | open framework in which fonts may be shared and improved in partnership 18 | with others. 19 | 20 | The OFL allows the licensed fonts to be used, studied, modified and 21 | redistributed freely as long as they are not sold by themselves. The 22 | fonts, including any derivative works, can be bundled, embedded, 23 | redistributed and/or sold with any software provided that any reserved 24 | names are not used by derivative works. The fonts and derivatives, 25 | however, cannot be released under any other type of license. The 26 | requirement for fonts to remain under this license does not apply 27 | to any document created using the fonts or their derivatives. 28 | 29 | DEFINITIONS 30 | "Font Software" refers to the set of files released by the Copyright 31 | Holder(s) under this license and clearly marked as such. This may 32 | include source files, build scripts and documentation. 33 | 34 | "Reserved Font Name" refers to any names specified as such after the 35 | copyright statement(s). 36 | 37 | "Original Version" refers to the collection of Font Software components as 38 | distributed by the Copyright Holder(s). 39 | 40 | "Modified Version" refers to any derivative made by adding to, deleting, 41 | or substituting -- in part or in whole -- any of the components of the 42 | Original Version, by changing formats or by porting the Font Software to a 43 | new environment. 44 | 45 | "Author" refers to any designer, engineer, programmer, technical 46 | writer or other person who contributed to the Font Software. 47 | 48 | PERMISSION & CONDITIONS 49 | Permission is hereby granted, free of charge, to any person obtaining 50 | a copy of the Font Software, to use, study, copy, merge, embed, modify, 51 | redistribute, and sell modified and unmodified copies of the Font 52 | Software, subject to the following conditions: 53 | 54 | 1) Neither the Font Software nor any of its individual components, 55 | in Original or Modified Versions, may be sold by itself. 56 | 57 | 2) Original or Modified Versions of the Font Software may be bundled, 58 | redistributed and/or sold with any software, provided that each copy 59 | contains the above copyright notice and this license. These can be 60 | included either as stand-alone text files, human-readable headers or 61 | in the appropriate machine-readable metadata fields within text or 62 | binary files as long as those fields can be easily viewed by the user. 63 | 64 | 3) No Modified Version of the Font Software may use the Reserved Font 65 | Name(s) unless explicit written permission is granted by the corresponding 66 | Copyright Holder. This restriction only applies to the primary font name as 67 | presented to the users. 68 | 69 | 4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font 70 | Software shall not be used to promote, endorse or advertise any 71 | Modified Version, except to acknowledge the contribution(s) of the 72 | Copyright Holder(s) and the Author(s) or with their explicit written 73 | permission. 74 | 75 | 5) The Font Software, modified or unmodified, in part or in whole, 76 | must be distributed entirely under this license, and must not be 77 | distributed under any other license. The requirement for fonts to 78 | remain under this license does not apply to any document created 79 | using the Font Software. 80 | 81 | TERMINATION 82 | This license becomes null and void if any of the above conditions are 83 | not met. 84 | 85 | DISCLAIMER 86 | THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 87 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF 88 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT 89 | OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE 90 | COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 91 | INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL 92 | DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 93 | FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM 94 | OTHER DEALINGS IN THE FONT SOFTWARE. 95 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/TreeGeneratorModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | package macroimpl 3 | 4 | import shared._ 5 | 6 | trait TreeGeneratorModule { 7 | this: CodeGeneratorModule with TyperModule with SyntaxGeneratorModule with SyntaxInfoModule with SyntaxRuleModule with MacroUtilitiesModule with MacroModule => 8 | 9 | import c.universe._ 10 | 11 | override type GeneratedCode = List[Tree] 12 | 13 | override lazy val output: TreeOutput = new TreeOutput(typer) 14 | 15 | class TreeOutput (typer: Typer) extends Output { 16 | override type Type = Tree 17 | override type Parameter = ValDef 18 | override type TypeParameter = TypeDef 19 | override type MemberDef = Tree 20 | override type Expr = Tree 21 | 22 | def generateProgram (modules: List[Tree]): GeneratedCode = modules 23 | 24 | def generateUniqueName: String = MacroUtilities.freshName 25 | 26 | def literalIdentifier (lit: LiteralToken): Option[String] = None 27 | 28 | def simpleType (typeName: String): Type = typer.stringToTypeTree(typeName) 29 | def objectType (objectName: String): Type = tq"${typer.stringToQualifiedTerm(objectName)}.type" 30 | def nonTerminalType (nt: NonTerminal): Type = typer.unchecked(nt.ntType) 31 | def literalType (lit: LiteralToken): Type = typer.unchecked(lit.literalType) 32 | 33 | def tuple2Type (v1: Type, v2: Type): Type = tq"($v1, $v2)" 34 | def functionType (left: Type, right: Type): Type = tq"$left => $right" 35 | def parameterizedType (genName: String, args: List[Type]): Type = tq"${typer.stringToTypeTree(genName)}[..$args]" 36 | 37 | def parameter (name: String, paramType: Type): Parameter = ValDef(Modifiers(Flag.PARAM), TermName(name), paramType, EmptyTree) 38 | def unusedParameter (paramType: Type): Parameter = ValDef(Modifiers(Flag.PARAM), TermName(generateUniqueName), paramType, EmptyTree) 39 | 40 | def typeParameter (name: String): TypeParameter = TypeDef(Modifiers(Flag.PARAM), TypeName(name), Nil, TypeBoundsTree(EmptyTree, EmptyTree)) 41 | def typeParameter (name: String, bound: Type): TypeParameter = TypeDef(Modifiers(Flag.PARAM), TypeName(name), Nil, TypeBoundsTree(EmptyTree, bound)) 42 | 43 | def sealedTraitDef (name: String, superType: Option[Type]): MemberDef = superType match { 44 | case Some(sup) => q"sealed trait ${TypeName(name)} extends $sup" 45 | case None => q"sealed trait ${TypeName(name)}" 46 | } 47 | 48 | def caseClassDef (name: String, typeParams: List[TypeParameter], params: List[Parameter], superType: Option[Type]): MemberDef = superType match { 49 | case Some(sup) => q"case class ${TypeName(name)} [..$typeParams] (..$params) extends $sup" 50 | case None => q"case class ${TypeName(name)} [..$typeParams] (..$params)" 51 | } 52 | 53 | def caseObjectDef (name: String): MemberDef = q"case object ${TermName(name)}" 54 | 55 | def lazyValDef (name: String, valType: Type, value: Expr): MemberDef = q"lazy val ${TermName(name)}: $valType = $value" 56 | 57 | def functionDef (name: String, typeParams: List[TypeParameter], parameters: List[Parameter], implicitParams: List[Parameter], returnType: Type, body: Expr): MemberDef = { 58 | if (parameters.isEmpty) q"def ${TermName(name)} [..$typeParams] (implicit ..$implicitParams): $returnType = $body" 59 | else q"def ${TermName(name)} [..$typeParams] (..$parameters)(implicit ..$implicitParams): $returnType = $body" 60 | } 61 | 62 | def implicitFunctionDef (typeParams: List[TypeParameter], parameters: List[Parameter], implicitParams: List[Parameter], returnType: Type, body: Expr): MemberDef = { 63 | if (parameters.isEmpty) q"implicit def ${TermName(generateUniqueName)} [..$typeParams] (implicit ..$implicitParams): $returnType = $body" 64 | else q"implicit def ${TermName(generateUniqueName)} [..$typeParams] (..$parameters)(implicit ..$implicitParams): $returnType = $body" 65 | } 66 | 67 | def implicitClassDef (typeParams: List[TypeParameter], parameter: Parameter, implicitParams: List[Parameter], members: List[MemberDef]): MemberDef = { 68 | q"implicit class ${TypeName(generateUniqueName)} [..$typeParams] ($parameter)(implicit ..$implicitParams) { ..$members }" 69 | } 70 | 71 | def objectRef (objectName: String): Expr = typer.stringToQualifiedTerm(objectName) 72 | 73 | def methodCall (receiver: Expr, methodName: String, typeArgs: List[Type], args: List[Expr]): Expr = { 74 | q"$receiver.${TermName(methodName)}[..$typeArgs](..$args)" 75 | } 76 | 77 | def fieldRef (receiver: Expr, fieldName: String): Expr = { 78 | q"$receiver.${TermName(fieldName)}" 79 | } 80 | 81 | def callApply (receiver: Expr, typeArgs: List[Type], args: List[Expr]): Expr = { 82 | q"$receiver[..$typeArgs](..$args)" 83 | } 84 | 85 | def lambda (parameters: List[Parameter], body: Expr): Expr = { 86 | q"(..$parameters) => $body" 87 | } 88 | 89 | def constructAST (rule: Rule, args: List[Expr]): Expr = rule.action.run(args) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/TyperModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.macroimpl 2 | 3 | import java.util.regex.Pattern 4 | 5 | trait TyperModule { 6 | this: AnnotationFinderModule with MacroUtilitiesModule with MacroModule => 7 | 8 | import c.universe._ 9 | 10 | def typer: Typer = typerVar 11 | 12 | def doTypeCheck (tree: Tree): Unit = { 13 | typerVar = Typer(tree) 14 | } 15 | 16 | private var typerVar: Typer = _ 17 | 18 | case class Typer private (classTypes: Map[TypeName, Type], moduleTypes: Map[TermName, Type], outerName: TermName) { 19 | 20 | lazy val packageName: String = c.typecheck(q"object ${TermName(MacroUtilities.freshName)}").symbol.owner.fullName 21 | 22 | def check (tree: Tree): Type = treeToTypeMemo.getOrElseUpdate(tree, resolveType(tree)) 23 | 24 | def unchecked (t: Type): Tree = t match { 25 | case AnnotatedType(_, underlying) => // remove annotations 26 | unchecked(underlying) 27 | case _ if t.typeArgs.nonEmpty => 28 | tq"${stringToTypeTree(t.typeConstructor.typeSymbol.fullName)}[..${t.typeArgs.map(unchecked)}]" 29 | case _ => 30 | classTypes_rev.get(t).map(clazz => tq"$clazz") 31 | .orElse(moduleTypes_rev.get(t).map(module => tq"$module.type")) 32 | .getOrElse(tq"$t") 33 | } 34 | 35 | def stringToTypeTree (string: String): Tree = { 36 | val dot = string.lastIndexOf('.') 37 | val prefix = string.take(dot) 38 | val postfix = string.drop(dot + 1) 39 | 40 | if (prefix.isEmpty) tq"${TypeName(postfix)}" 41 | else tq"${stringToQualifiedTerm(prefix)}.${TypeName(postfix)}" 42 | } 43 | 44 | def stringToQualifiedTerm (string: String): Tree = { 45 | val terms = string.split(Pattern.quote(".")).map(TermName(_)) 46 | terms.tail.foldLeft [Tree] (q"${terms.head}") { (left, term) => q"$left.$term" } 47 | } 48 | 49 | private lazy val classTypes_rev = classTypes.map { case (k, v) => (v, k) } 50 | private lazy val moduleTypes_rev = moduleTypes.map { case (k, v) => (v, k) } 51 | 52 | private def resolveType (tree: Tree): Type = tree match { 53 | case Ident(name: TypeName) if classTypes.contains(name) => 54 | classTypes(name) 55 | case Select(qualifier, name: TypeName) if qualifierIsOuter(qualifier) && classTypes.contains(name) => 56 | classTypes(name) 57 | case SingletonTypeTree(Ident(name: TermName)) if moduleTypes.contains(name) => 58 | moduleTypes(name) 59 | case SingletonTypeTree(Select(qualifier, name: TermName)) if qualifierIsOuter(qualifier) && moduleTypes.contains(name) => 60 | moduleTypes(name) 61 | case Ident(_) => 62 | c.typecheck(tree, c.TYPEmode).tpe.dealias 63 | case Select(_, _) => 64 | c.typecheck(tree, c.TYPEmode).tpe.dealias 65 | case SingletonTypeTree(_) => 66 | c.typecheck(tree, c.TYPEmode).tpe.dealias 67 | case AppliedTypeTree(typeConstructor, args) => 68 | c.typecheck(tq"$typeConstructor[..${args.map(check)}]", c.TYPEmode).tpe.dealias 69 | case Annotated(AnnotationTree(ann, args), t) => 70 | c.typecheck(tq"${check(t)}@$ann(..$args)", c.TYPEmode).tpe.dealias 71 | case _ => 72 | c.typecheck(tree, c.TYPEmode).tpe.dealias 73 | } 74 | 75 | private def qualifierIsOuter (qualifier: Tree): Boolean = qualifier match { 76 | case Ident(name) => name == outerName 77 | case Select(pack, name) => show(pack) == packageName && name == outerName 78 | } 79 | 80 | private val treeToTypeMemo: scala.collection.mutable.Map[Tree, Type] = scala.collection.mutable.Map.empty 81 | } 82 | 83 | object Typer { 84 | def apply (tree: Tree): Typer = tree match { 85 | case m : ModuleDef => 86 | moduleDefTypeChecker(m) 87 | case other => 88 | c.abort(tree.pos, s"@dsl can be annotated to object: $other") 89 | } 90 | 91 | private def moduleDefTypeChecker (m: ModuleDef): Typer = { 92 | val ModuleDef(mod, name, Template(parents, self, body)) = m 93 | 94 | val classDefs = collectClassDefs(body) 95 | val moduleDefs = collectModuleDefs(body) 96 | 97 | val nameAndType_Class = classDefs.map(TermName(MacroUtilities.freshName) -> _.name).toMap 98 | val nameAndType_Module = moduleDefs.map(TermName(MacroUtilities.freshName) -> _.name).toMap 99 | 100 | val classExprs = nameAndType_Class.map { case (n, t) => q"def $n : $t = null" }.toList 101 | val moduleExprs = nameAndType_Module.map { case (n, t) => q"def $n : $t.type = null" }.toList 102 | 103 | val moduleDef = ModuleDef(mod, name, Template(parents, self, classDefs ++ moduleDefs ++ classExprs ++ moduleExprs)) 104 | 105 | val ModuleDef(_, _, Template(_, _, trees)) = c.typecheck(moduleDef) 106 | 107 | val classTypes = trees.collect { 108 | case DefDef(_, n, _, _, t, _) if nameAndType_Class.contains(n) => nameAndType_Class(n) -> t.tpe.dealias 109 | }.toMap 110 | 111 | val moduleTypes = trees.collect { 112 | case DefDef(_, n, _, _, t, _) if nameAndType_Module.contains(n) => nameAndType_Module(n) -> t.tpe.dealias 113 | }.toMap 114 | 115 | Typer(classTypes, moduleTypes, name) 116 | } 117 | 118 | private def collectClassDefs (body: List[Tree]) = body.collect { case c: ClassDef => c } 119 | private def collectModuleDefs (body: List[Tree]) = body.collect { case m: ModuleDef => m } 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/SyntaxGeneratorModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | package macroimpl 3 | 4 | import shared._ 5 | 6 | import shapeless._ 7 | 8 | import scala.collection._ 9 | 10 | trait SyntaxGeneratorModule { 11 | this: TyperModule with SyntaxInfoModule with SyntaxRuleModule with MacroModule => 12 | 13 | import c.universe._ 14 | 15 | case class NonTerminalImpl (ntType: Type) 16 | case class LiteralTokenImpl (literalType: Type) 17 | 18 | override type NonTerminal = NonTerminalImpl 19 | override type LiteralToken = LiteralTokenImpl 20 | override type SemanticAction = SemanticActionImpl 21 | 22 | def generateSyntax (syntaxInfoList: List[SyntaxInfo]): Syntax = SyntaxGenerator.generate(syntaxInfoList) 23 | 24 | private object SyntaxGenerator { 25 | /** 26 | * syntax アノテーションの情報から文法を生成する関数 27 | * @param syntaxInfoList syntax アノテーションの情報 28 | * @return 生成された文法 29 | */ 30 | def generate (syntaxInfoList: List[SyntaxInfo]): Syntax = { 31 | val derivations = syntaxInfoList.map(generateRule) 32 | val inheritances = inheritanceRules(start, derivations) 33 | val literals = literalRules(collectNonTerminals(derivations ++ inheritances)) 34 | Syntax(start, derivations ++ inheritances ++ literals) 35 | } 36 | 37 | /** 38 | * dslアノテーションの型引数から開始記号を求める関数 39 | * @return 開始記号 40 | */ 41 | private lazy val start: NonTerminal = c.prefix.tree match { 42 | case Apply(Select(New(AppliedTypeTree(Ident(TypeName("dsl")), List(t))), termNames.CONSTRUCTOR), Nil) => NonTerminalImpl(typer.check(t)) 43 | case _ => c.abort(c.prefix.tree.pos, "@dsl should take a type argument and should not take an argument") 44 | } 45 | 46 | /** 47 | * syntax アノテーションで指定された文法を Rule に変換する 48 | * @param syntaxInfo syntax アノテーションで指定された文法の情報 49 | * @return 対応する文法規則 50 | */ 51 | private def generateRule (syntaxInfo: SyntaxInfo): Rule = { 52 | val left = NonTerminalImpl(typer.check(syntaxInfo.returnType)) 53 | if (syntaxInfo.operators == Nil && syntaxInfo.operandTypes == Nil) Rule(left, List(Symbol(EmptyString)), syntaxInfo.semantics) 54 | else { 55 | val operators = syntaxInfo.operators.map(_.map(Keyword)) 56 | val operands = syntaxInfo.operandTypes.map(t => NonTerminalImpl(typer.check(t))) 57 | val expr = buildSyntaxExpression(operators, operands) 58 | Rule(left, expr, syntaxInfo.semantics) 59 | } 60 | } 61 | 62 | /** 63 | * オペレータ(キーワード)のリストとオペランド(引数部)のリストから文法式を組み立てる 64 | * @param operators オペレータ(キーワード)のリスト 65 | * @param operands オペランド(引数部)のリスト 66 | * @return 文法式 67 | */ 68 | private def buildSyntaxExpression (operators: List[List[Keyword]], operands: List[NonTerminal]): List[Symbol] = { 69 | def operatorSymbols = operators.map(_.map(k => Symbol(Terminal(k)))) 70 | def operandSymbols = operands.map(nt => List(Symbol(nt))) 71 | ( Nil :: operandSymbols zip operatorSymbols ).flatMap ( pair => pair._1 ++ pair._2 ) 72 | } 73 | 74 | /** 75 | * syntax アノテーションで指定された文法に追加する、継承関係を表現した文法規則を生成する関数 76 | * @param start 開始記号 77 | * @param derivations syntax アノテーションで指定された文法 78 | * @return 継承関係を表現した文法規則 79 | */ 80 | private def inheritanceRules (start: NonTerminal, derivations: List[Rule]): List[Rule] = { 81 | val leftNonTerminals: Set[NonTerminal] = derivations.map(_.left) (breakOut) 82 | val rightNonTerminals: Set[NonTerminal] = collectNonTerminals(derivations) 83 | 84 | val inheritances: Map[NonTerminal, Set[NonTerminal]] = (leftNonTerminals ++ rightNonTerminals).map { left => 85 | left -> (rightNonTerminals + start).filter(left.ntType <:< _.ntType).filterNot(left.ntType =:= _.ntType) 86 | } (breakOut) 87 | 88 | eliminateShortcut(inheritances).flatMap { 89 | case (left, rights) => rights.map(right => Rule(right, List(Symbol(left)), SemanticActionImpl.returnArgument)) 90 | } (breakOut) 91 | } 92 | 93 | /** 94 | * 継承関係を表現する Map から、ショートカットとなるようなパスを削除する関数 95 | * 例えば、A -> B, B -> C があったとき A -> C のパスを削除する 96 | * @param inheritances 継承関係を表現する Map 97 | * @return ショートカットパスを削除したもの 98 | */ 99 | private def eliminateShortcut (inheritances: Map[NonTerminal, Set[NonTerminal]]): Map[NonTerminal, Set[NonTerminal]] = { 100 | inheritances.map { case (left, rights) => 101 | left -> rights.filterNot { right => 102 | existsLongPath(left, right, inheritances) 103 | } 104 | } 105 | } 106 | 107 | private def existsLongPath (from: NonTerminal, to: NonTerminal, paths: Map[NonTerminal, Set[NonTerminal]]): Boolean = { 108 | paths(from).filter(_ != to).exists(via => paths.get(via).exists(_.contains(to))) 109 | } 110 | 111 | /** 112 | * 文法規則の右辺で利用されている全ての非終端記号の集合を求める関数 113 | * @param rules 文法規則 114 | * @return 非終端記号の集合 115 | */ 116 | private def collectNonTerminals (rules: List[Rule]): Set[NonTerminal] = { 117 | rules.flatMap { _.right.collect { case Inl(nt) => nt } } (breakOut) 118 | } 119 | 120 | /** 121 | * 与えられた非終端記号に対応するリテラルを記述できる文法規則を生成する関数 122 | * @param nonTerminals 非終端記号の集合 123 | * @return 対応するリテラルの文法規則 124 | */ 125 | private def literalRules (nonTerminals: Set[NonTerminal]): Set[Rule] = { 126 | nonTerminals.map(nt => Rule(nt, List(Symbol(Terminal(LiteralTokenImpl(nt.ntType)))), SemanticActionImpl.returnArgument)) 127 | } 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/cli/ScalaCodeGeneratorModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr 2 | package cli 3 | 4 | import shared._ 5 | import java.io._ 6 | 7 | import scala.util.Random 8 | 9 | trait ScalaCodeGeneratorModule { 10 | this: ASTDataTypeWriterModule with CodeGeneratorModule with CLISyntaxRuleModule with SyntaxRuleModule with LALRAutomatonModule => 11 | 12 | def writeGeneratedDefinitions (qualifiedName: List[String], syntax: Syntax, writer: PrintWriter): Unit = { 13 | val gen = CodeGenerator(LALRAutomaton(syntax)) 14 | if (qualifiedName.lengthCompare(1) > 0) { 15 | writer.println(s"package ${qualifiedName.init.mkString(".")}") 16 | } 17 | writer.println(gen.generateCode(output.moduleDefinition(qualifiedName.last, gen.generatedDefinitions))) 18 | } 19 | 20 | override type GeneratedCode = String 21 | 22 | override val output: StringOutput.type = StringOutput 23 | 24 | object StringOutput extends Output { 25 | case class OutputState (indentLevel: Int) { 26 | lazy val indent: OutputState = OutputState(indentLevel + 1) 27 | lazy val newLine: String = "\n" + indentString 28 | lazy val indentString: String = (0 until indentLevel).map(_ => " ").mkString 29 | } 30 | 31 | type OutputBuilder = OutputState => String 32 | 33 | type MemberDef = OutputBuilder 34 | 35 | type Type = String 36 | type Parameter = String 37 | type TypeParameter = String 38 | type Expr = OutputBuilder 39 | 40 | Random.setSeed(System.currentTimeMillis()) 41 | 42 | def generateProgram (modules: List[MemberDef]): GeneratedCode = modules.map(_(OutputState(0))).mkString 43 | 44 | def generateUniqueName: String = "ScaLALR$" + Random.nextInt.abs 45 | 46 | def literalIdentifier (lit: LiteralToken): Option[String] = lit.identifier 47 | 48 | def simpleType (typeName: String): Type = typeName 49 | def objectType (objectName: String): Type = objectName + ".type" 50 | def nonTerminalType (nt: NonTerminal): Type = nt.name 51 | def literalType (lit: LiteralToken): String = lit.litType 52 | 53 | def tuple2Type (v1: Type, v2: Type): Type = s"($v1, $v2)" 54 | def functionType (left: Type, right: Type): Type = s"$left => $right" 55 | def parameterizedType (genName: String, args: List[Type]): Type = s"$genName${typeArguments(args)}" 56 | 57 | def parameter (name: String, paramType: Type): Parameter = s"$name: $paramType" 58 | def unusedParameter (paramType: Type): Parameter = s"_ : $paramType" 59 | 60 | def typeParameter (name: String): TypeParameter = name 61 | def typeParameter (name: String, bound: Type): TypeParameter = s"$name <: $bound" 62 | 63 | def moduleDefinition (moduleName: String, members: List[MemberDef]): MemberDef = s => { 64 | s"${s.newLine}object $moduleName {${members.map(_(s.indent)).mkString}${s.newLine}}" 65 | } 66 | 67 | def sealedTraitDef (name: String, superType: Option[Type]): MemberDef = s => { 68 | s"${s.newLine}sealed trait $name${extendsClause(superType)}" 69 | } 70 | 71 | def caseClassDef (name: String, typeParams: List[TypeParameter], params: List[Parameter], superType: Option[Type]): MemberDef = s => { 72 | s"${s.newLine}case class $name ${typeParameters(typeParams)} ${parameters(params)}${extendsClause(superType)}" 73 | } 74 | 75 | def caseObjectDef (name: String): MemberDef = s => s"${s.newLine}case object $name" 76 | 77 | def lazyValDef (name: String, valType: Type, value: Expr): MemberDef = s => s"${s.newLine}lazy val $name : $valType = ${value(s)}" 78 | 79 | def functionDef (name: String, typeParams: List[String], params: List[Parameter], implicitParams: List[Parameter], returnType: Type, body: Expr): MemberDef = s => { 80 | s"${s.newLine}def $name ${typeParameters(typeParams)}${parameters(params)}${implicitParameters(implicitParams)}: $returnType = ${body(s)}" 81 | } 82 | 83 | def implicitFunctionDef (typeParams: List[String], params: List[Parameter], implicitParams: List[Parameter], returnType: Type, body: Expr): MemberDef = s => { 84 | s"${s.newLine}implicit def $generateUniqueName ${typeParameters(typeParams)}${parameters(params)}${implicitParameters(implicitParams)}: $returnType = ${body(s)}" 85 | } 86 | 87 | def implicitClassDef (typeParams: List[String], param: Parameter, implicitParams: List[Parameter], members: List[MemberDef]): MemberDef = s => { 88 | s"${s.newLine}implicit class $generateUniqueName ${typeParameters(typeParams)}($param)${implicitParameters(implicitParams)} {${members.map(_(s.indent)).mkString}${s.newLine}}" 89 | } 90 | 91 | def objectRef (objectName: String): Expr = _ => objectName 92 | def methodCall (receiver: Expr, methodName: String, typeArgs: List[Type], args: List[Expr]): Expr = s => s"${receiver(s)}.$methodName${typeArguments(typeArgs)}${arguments(args)(s)}" 93 | def fieldRef (receiver: Expr, fieldName: String): Expr = s => s"${receiver(s)}.$fieldName" 94 | def callApply (receiver: Expr, typeArgs: List[Type], args: List[Expr]): Expr = s => s"${receiver(s)}${typeArguments(typeArgs)}${arguments(args)(s)}" 95 | def lambda (params: List[Parameter], body: Expr): Expr = s => s"{ ${parameters(params)} => ${body(s)} }" 96 | 97 | def constructAST (rule: Rule, args: List[Expr]): Expr = s => rule.action match { 98 | case Branch if args.lengthCompare(1) == 0 => args.head(s) 99 | case Derivation => s"${rule.left.name}${arguments(args)(s)}" 100 | case _ => throw new RuntimeException("branch rule should take only one argument") 101 | } 102 | 103 | private def extendsClause (superType: Option[Type]): String = superType match { 104 | case Some(t) => s" extends $t" 105 | case None => "" 106 | } 107 | 108 | private def typeParameters (typeParams: List[TypeParameter]): String = { 109 | if (typeParams.nonEmpty) typeParams.mkString("[", ", ", "]") 110 | else "" 111 | } 112 | 113 | private def typeArguments (typeArgs: List[Type]): String = { 114 | if (typeArgs.nonEmpty) typeArgs.mkString("[", ", ", "]") 115 | else "" 116 | } 117 | 118 | private def parameters (params: List[Parameter]): String = { 119 | if (params.nonEmpty) params.mkString("(", ", ", ")") 120 | else "" 121 | } 122 | 123 | private def implicitParameters (params: List[Parameter]): String = { 124 | if (params.nonEmpty) params.mkString("(implicit ", ", ", ")") 125 | else "" 126 | } 127 | 128 | private def arguments (args: List[Expr]): OutputBuilder = s => { 129 | args.map(_(s)).mkString("(", ", ", ")") 130 | } 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/shared/LALRAutomatonModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.shared 2 | 3 | import shapeless._ 4 | 5 | import collection.breakOut 6 | 7 | trait LALRAutomatonModule { 8 | self: SyntaxRuleModule => 9 | 10 | import HList.ListCompat._ 11 | 12 | case class LALRAutomaton (syntax: Syntax) { 13 | 14 | lazy val start: LRClosure = mappingLR1toLALR(lr1.start) 15 | 16 | /** 17 | * 全てのエッジの集合 18 | */ 19 | lazy val edges: Map[LRClosure, Map[NonEmptySymbol, LRClosure]] = lr1.edges.toSet.map { (pair: (LRClosure, Map[NonEmptySymbol, LRClosure])) => 20 | mappingLR1toLALR(pair._1) -> pair._2.mapValues(mappingLR1toLALR) 21 | }.groupBy(_._1).mapValues(_.map(_._2).reduce(unionEdges)) 22 | 23 | /** 24 | * nodes: 全ての LR closure 25 | */ 26 | lazy val nodes: Set[LRClosure] = edges.keySet 27 | 28 | /** 29 | * shift: 終端記号による遷移 30 | * 遷移元 LR closure -> (終端記号 -> 遷移先 LR closure) 31 | */ 32 | lazy val shift: Map[LRClosure, Map[Terminal, LRClosure]] = edges.mapValues { 33 | _.collect { case (Inr(Inl(t)), n) => (t, n) } 34 | } 35 | 36 | /** 37 | * reduce: 非終端記号の導出による遷移 38 | * 遷移元 List[ (LR closure, 文法式, 先読み記号の集合) ] 39 | */ 40 | lazy val reduce: List[(LRClosure, Rule, Set[Terminal])] = nodes.flatMap { closure => 41 | closure.items.collect { case (LRItem(rule, Nil), lookahead) => 42 | (closure, rule, lookahead) 43 | } 44 | } (breakOut) 45 | 46 | /** 47 | * goto: 非終端記号による遷移 48 | * 遷移元 LR closure -> (非終端記号 -> 遷移先 LR closure) 49 | */ 50 | lazy val goTo: Map[LRClosure, Map[NonTerminal, LRClosure]] = edges.mapValues { 51 | _.collect { case (Inl(nt), n) => (nt, n) } 52 | } 53 | 54 | /** 55 | * accept: 終了状態となるような LR closure 56 | */ 57 | lazy val accept: Map[LRClosure, Rule] = nodes.flatMap { node => 58 | node.items.collectFirst { 59 | case (item, lookahead) if item.rule.left == syntax.start && item.rest.isEmpty && lookahead.contains(Terminal.eoi) => node -> item.rule 60 | } 61 | } (breakOut) 62 | 63 | /** 64 | * state: LR closure の持つ状態 65 | * ある LR closure への遷移は全て同じ記号によるものであると仮定し、その対応関係を表現する 66 | */ 67 | lazy val state: Map[LRClosure, NonEmptySymbol] = { 68 | edges.values.flatten.groupBy(_._2).mapValues(pair => ensureEqualAll(pair.map(_._1))) 69 | } 70 | 71 | /** 72 | * エッジを逆にたどる Map 73 | * Symbol の情報は state の方に入っているため、ここでは省略している 74 | */ 75 | lazy val reverseEdges: Map[LRClosure, Set[LRClosure]] = state.map { case (to, symbol) => 76 | to -> edges.collect { case (from, map) if map.get(symbol).contains(to) => from }.toSet 77 | } 78 | 79 | def project: String = { 80 | val ns = nodes.zipWithIndex.toMap 81 | val nodeStrings = ns.map(x => s"node ${x._2} : ${x._1.project}").mkString("\n") 82 | val edgeStrings = edges.flatMap { case (from, map) => 83 | map.map { case (s, to) => 84 | s"${ns(from)} --(${projectNonEmptySymbol(s)})--> ${ns(to)}" 85 | } 86 | }.mkString("\n") 87 | nodeStrings + edgeStrings 88 | } 89 | 90 | /** 91 | * LR(1) automaton における LR closure から LALR(1) automaton における LR closure へのマッピング 92 | * LALR(1) automaton では LR(1) automaton における LR closure を併合して LR closure の数を減らす。 93 | */ 94 | private lazy val mappingLR1toLALR: Map[LRClosure, LRClosure] = lr1.edges.keySet.groupBy(_.items.keySet).values.flatMap { set => 95 | val closure = set.reduce(_ ++ _) 96 | set.map(_ -> closure) 97 | }.toMap 98 | 99 | /** 100 | * エッジの集合を合成する関数 101 | * @param e1 エッジの集合その1 102 | * @param e2 エッジの集合その2 103 | * @return 合成したエッジの集合 104 | */ 105 | private def unionEdges (e1: Map[NonEmptySymbol, LRClosure], e2: Map[NonEmptySymbol, LRClosure]): Map[NonEmptySymbol, LRClosure] = { 106 | e1.foldLeft(e2) { case (e, (s, c)) => 107 | if (e.contains(s) && e(s) == c) e 108 | else if (! e.contains(s)) e + (s -> c) 109 | else throw new RuntimeException("broken LALR automaton") 110 | } 111 | } 112 | 113 | private def ensureEqualAll (s: Iterable[NonEmptySymbol]): NonEmptySymbol = { 114 | require(s.nonEmpty) 115 | if (s.tail.forall(_ == s.head)) s.head 116 | else throw new RuntimeException(s"cannot unify the state of LR closure: $s") 117 | } 118 | 119 | private lazy val lr1 = LR1Automaton(syntax) 120 | } 121 | 122 | case class LR1Automaton (syntax: Syntax) { 123 | /** 124 | * 最初の LR closure 125 | * LR オートマトンの開始地点となる。 126 | */ 127 | lazy val start: LRClosure = { 128 | growLRClosure(LRClosure(syntax.expressions(syntax.start).map(r => LRItem(r, r.right) -> Set(Terminal.eoi)).toMap)) 129 | } 130 | 131 | /** 132 | * 全てのエッジを表現する Map 133 | */ 134 | lazy val edges: Map[LRClosure, Map[NonEmptySymbol, LRClosure]] = growEdges(List(start), Map.empty) 135 | 136 | /** 137 | * LR closure を成長させる関数 138 | * LR closure は A -> x・B y [w] を含むとき、B -> ・z [first(yw)] も含む。 139 | * この更新操作を収束するまで繰り返し行う。 140 | * @param closure 更新対象となる LR closure 141 | * @return 完成した LR closure 142 | */ 143 | private def growLRClosure (closure: LRClosure): LRClosure = { 144 | val newClosure = closure ++ closure.items.collect { 145 | case (LRItem(_, Inl(n) :: rest), lookahead) => 146 | syntax.expressions(n).map(r => LRItem(r, r.right) -> syntax.lookupFirst(rest, lookahead)) 147 | case (LRItem(rule, Inr(Inr(_)) :: rest), lookahead) => 148 | List(LRItem(rule, rest) -> lookahead) 149 | }.flatten 150 | if (closure == newClosure) closure 151 | else growLRClosure(newClosure) 152 | } 153 | 154 | /** 155 | * 与えられた LR closure から張られるエッジとそこから推移的に張られるエッジを全て求める関数 156 | * @param closures まだ調べていない LR closure 157 | * @param edges エッジのアキュムレータ 158 | * @return 全てのエッジを表す Map 159 | */ 160 | private def growEdges (closures: List[LRClosure], edges: Map[LRClosure, Map[NonEmptySymbol, LRClosure]]): Map[LRClosure, Map[NonEmptySymbol, LRClosure]] = closures match { 161 | case closure :: rest if edges.contains(closure) => growEdges(rest, edges) 162 | case closure :: rest => 163 | val es = closureEdges(closure) 164 | growEdges(rest ++ es.values, edges + (closure -> es)) 165 | case Nil => edges 166 | } 167 | 168 | /** 169 | * 与えられた LR closure から張られる全てのエッジを返す関数 170 | * @param closure LR closure 171 | * @return どのシンボルによってどの LR closure に遷移するかを示す Map 172 | */ 173 | private def closureEdges (closure: LRClosure): Map[NonEmptySymbol, LRClosure] = transitions(closure).groupBy(_._1).mapValues { 174 | es => growLRClosure(LRClosure(es.map { case (_, item, lookahead) => item -> lookahead }.toMap)) 175 | } 176 | 177 | /** 178 | * LR closure の各アイテムに関して、全ての遷移をリストアップする関数 179 | * @param closure LR closure 180 | * @return 遷移のために必要なシンボル, 遷移先の LR item, 遷移後の先読み集合 181 | */ 182 | private def transitions (closure: LRClosure): Iterable[(NonEmptySymbol, LRItem, Set[Terminal])] = closure.items.collect { 183 | case (LRItem(rule, Inl(nt) :: rest), lookahead) => (Inl(nt), LRItem(rule, rest), lookahead) 184 | case (LRItem(rule, Inr(Inl(t)) :: rest), lookahead) => (Inr(Inl(t)), LRItem(rule, rest), lookahead) 185 | } 186 | } 187 | 188 | type NonEmptySymbol = NonTerminal :+: Terminal :+: CNil 189 | 190 | def projectNonEmptySymbol (s: NonEmptySymbol): String = s match { 191 | case Inl(nt) => nt.toString 192 | case Inr(Inl(t)) => projectTerminal(t) 193 | case Inr(Inr(x)) => x.impossible 194 | } 195 | 196 | case class LRClosure (items: Map[LRItem, Set[Terminal]]) { 197 | def ++ (that: LRClosure): LRClosure = this ++ that.items 198 | def ++ (newItems: Iterable[(LRItem, Set[Terminal])]): LRClosure = LRClosure { 199 | newItems.foldLeft(items) { case (map, (item, lookahead)) => 200 | map + (item -> (map.getOrElse(item, Set.empty) ++ lookahead)) 201 | } 202 | } 203 | def project: String = { 204 | s"""item { 205 | | ${items.map(x => x._1.project + x._2.map(projectTerminal).mkString("[", " ", "]"))} 206 | |}""".stripMargin 207 | } 208 | } 209 | 210 | case class LRItem (rule: Rule, rest: List[Symbol]) { 211 | def project : String = { 212 | s"${rule.left} := ${rule.right.diff(rest).map(projectSymbol).mkString(" ")} @ ${rest.map(projectSymbol).mkString(" ")}" 213 | } 214 | } 215 | 216 | } 217 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ScaLALR : Type-Safe Syntax Extension in Scala 2 | 3 | ScaLALR is a parser generator but it generates just a library in Scala. 4 | You can define and use your own syntax in your Scala project. 5 | ScaLALR emulates LALR(1) parsing by exploiting type checking in Scala. 6 | 7 | ## Quick tour 8 | 9 | The following program is an example of a definition of DSL syntax by using ScaLALR. 10 | 11 | ```scala 12 | import com.phenan.scalalr._ 13 | 14 | @dsl[JValue] 15 | object JSONSyntax { 16 | @syntax(s"[ $values ]") 17 | def jArray (values: JValue@sep(",")*): JArray = JArray(values.toList) 18 | 19 | @syntax(s"{ $fields }") 20 | def jObject (fields: JField@sep(",")*): JObject = JObject(fields.toList) 21 | 22 | @syntax(s"$name : $value") 23 | def jField (name: String, value: JValue): JField = JField(name, value) 24 | 25 | @syntax(s"$value") 26 | def jDouble (value: Double): JDouble = JDouble(value) 27 | 28 | @syntax(s"$value") 29 | def jLong (value: Long): JLong = JLong(value) 30 | 31 | @syntax(s"$value") 32 | def jBool (value: Boolean): JBool = JBool(value) 33 | 34 | @syntax(s"$value") 35 | def jString (value: String): JString = JString(value) 36 | } 37 | ``` 38 | 39 | This definition allows you to write JSON-like program as follows: 40 | 41 | ```scala 42 | import com.phenan.scalalr._ 43 | import scala.language.postfixOps 44 | import JSONSyntax._ 45 | 46 | val doubleValue: JValue = (10.0) 47 | 48 | val jsonArray: JValue = $$bracketleft (10.0)$$comma ("hello") $$bracketright 49 | 50 | val jsonObject: JValue = ( 51 | $$braceleft 52 | ("foo") $$colon (false)$$comma 53 | ("bar") $$colon $$bracketleft ("baz")$$comma (20.0) $$bracketright 54 | $$braceright 55 | ) 56 | ``` 57 | 58 | This code looks quite strange, however, 59 | if you use our custom font that supports several ligatures, 60 | the code is displayed as follows: 61 | 62 | 2018-02-08 16 34 20 63 | 64 | ## Installation 65 | 66 | This project currently supports Scala 2.12. 67 | You should add the following to your `build.sbt`. 68 | 69 | ```sbtshell 70 | resolvers += Resolver.jcenterRepo 71 | 72 | libraryDependencies += "com.phenan" %% "scalalr" % "2.3.3" 73 | 74 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full) 75 | ``` 76 | 77 | We provide a custom font for ScaLALR, named Scalig. 78 | The font file is placed in `font` directory. 79 | 80 | If you want to fully use ScaLALR, 81 | please change the font to Scalig and enable the font ligatures of your editor. 82 | 83 | 84 | ## DSL syntax definition 85 | 86 | ### Annotate `@dsl` to the declaration of a singleton object 87 | 88 | To define your own DSL, you declare a singleton object with `@dsl` annotation. 89 | The `@dsl` annotation takes a type argument. 90 | The given type argument expresses that this is a DSL for a value of the type. 91 | 92 | ```scala 93 | @dsl[JValue] 94 | object JSONSyntax { 95 | // declarations of DSL 96 | } 97 | ``` 98 | 99 | ### Define DSL syntax by declaring methods with `@syntax` annotation 100 | 101 | You can define syntax by attaching `@syntax` annotation to a method declaration 102 | in the body of the singleton object. 103 | `@syntax` annotation takes an argument of the form `s"..."` that expresses the syntax of the method. 104 | For example, look at the following declaration: 105 | 106 | ```scala 107 | @syntax(s"$name : $value") 108 | def jField (name: String, value: JValue): JField = JField(name, value) 109 | ``` 110 | 111 | Here, `$name : $value` indicates the syntax. 112 | An identifier preceded by `$` such as `$name` indicates an argument part of the syntax 113 | and the identifier corresponds to a parameter name. 114 | So this declaration means that you can use the syntax like `x : y` and 115 | `jField(x, y)` is called in this case. 116 | Of course, such the syntax is available only at the limited expression. 117 | This syntax is only available at the expression that expects a value of `JField`. 118 | Like this, you can define your own DSL by declaring syntax for each types. 119 | 120 | 121 | ### Another use case of `@syntax` annotation 122 | 123 | Unfortunately, `@syntax` annotation causes an error reporting in IntelliJ. 124 | (In fact, `@syntax` annotation causes a compilation error if you use it without `@dsl` annotation.) 125 | So we enable the `@syntax` annotation to attach to the return type of functions as follows: 126 | 127 | ```scala 128 | def jField (name: String, value: JValue): JField @syntax(s"$name : $value") = JField(name, value) 129 | ``` 130 | 131 | This does not cause an error reporting in IntelliJ, because `name` and `value` can be referred from there. 132 | 133 | 134 | If you define your own types for your DSL, 135 | you can declare your syntax by attaching `@syntax` annotations to your class definitions instead of methods. 136 | The following is an example: 137 | 138 | ```scala 139 | @dsl[Math.Expr] 140 | object Math { 141 | sealed trait Expr 142 | 143 | @syntax(s"$n + $m") 144 | case class Add (n: Expr, m: Int) extends Expr 145 | 146 | @syntax(s"$n - $m") 147 | case class Sub (n: Expr, m: Int) extends Expr 148 | 149 | @syntax(s"$n") 150 | case class Num (n: Int) extends Expr 151 | } 152 | ``` 153 | 154 | ### Expressing repetitions 155 | 156 | To express a repetition, you can use variable arguments and `@sep` annotation. 157 | For example, the following uses variable arguments with `@sep` annotation. 158 | 159 | ```scala 160 | @syntax(s"[ $values ]") 161 | def jArray (values: JValue@sep(",")*): JArray = JArray(values.toList) 162 | ``` 163 | 164 | This expresses the syntax that recognizes 165 | `[]`, `[a]`, `[a, b]`, `[a, b, c]`, and so on. 166 | `$values` in the `@syntax` annotation indicates an argument 167 | and the corresponding parameter `values` is a parameter taking variable arguments. 168 | The element type of the variable arguments is `JValue@sep(",")`, 169 | which means that the argument takes zero or more values of `JValue` and each arguments are separated by `,`. 170 | 171 | 172 | ## Using DSLs 173 | 174 | If you want to use your DSL, 175 | you should import all members of your singleton object that you have declared with `@dsl` annotation. 176 | You must also import `com.phenan.scalalr._` and `scala.language.postfixOps` to use DSLs defined in ScaLALR. 177 | 178 | ```scala 179 | import com.phenan.scalalr._ 180 | import scala.language.postfixOps 181 | import JSONSyntax._ 182 | ``` 183 | 184 | Now, you can use your DSL syntax. 185 | To use the DSL, you have to explicitly write the return type of a DSL program. 186 | 187 | If your DSL program is a single line program, 188 | the next line of the program should be a blank line. 189 | If your DSL program is a multiline, 190 | you should enclose your program with parentheses. 191 | 192 | ```scala 193 | val doubleValue: JValue = (10.0) 194 | 195 | val jsonArray: JValue = $$bracketleft (10.0)$$comma ("hello") $$bracketright 196 | 197 | val jsonObject: JValue = ( 198 | $$braceleft 199 | ("foo") $$colon (false)$$comma 200 | ("bar") $$colon $$bracketleft ("baz")$$comma (20.0) $$bracketright 201 | $$braceright 202 | ) 203 | ``` 204 | 205 | 206 | You can use a Scala expression as a part of a DSL program. 207 | To do so, you should enclose the Scala expression with parentheses. 208 | You can write a Scala code enclosed by parentheses in any argument part of DSL syntax. 209 | 210 | 211 | Symbols in the syntax of the DSL are automatically translated into other identifiers as shown below. 212 | Our font, Scalig, provides ligatures that make these identifiers look the same as the original symbols. 213 | 214 | 215 | | symbol | identifier | 216 | |:------:|------------| 217 | | ! | $$exclam | 218 | | " | $$quotedbl | 219 | | # | $$numbersign | 220 | | % | $$percent | 221 | | & | $$ampersand | 222 | | ' | $$quotesingle | 223 | | ( | $$parenleft | 224 | | ) | $$parenright | 225 | | * | $$asterisk | 226 | | + | $$plus | 227 | | , | $$comma | 228 | | - | $$hyphen | 229 | | . | $$period | 230 | | / | $$slash | 231 | | : | $$colon | 232 | | ; | $$semicolon | 233 | | < | $$less | 234 | | = | $$equal | 235 | | > | $$greater | 236 | | ? | $$question | 237 | | @ | $$at | 238 | | [ | $$bracketleft | 239 | | \ | $$backslash | 240 | | ] | $$bracketright | 241 | | ^ | $$asciicircum | 242 | | ` | $$grave | 243 | | { | $$braceleft | 244 | | | | $$bar | 245 | | } | $$braceright | 246 | | ~ | $$asciitilde | 247 | 248 | 249 | 250 | 251 | ## Author 252 | 253 | [@phenan](https://twitter.com/phenan) 254 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/shared/SyntaxRuleModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.shared 2 | 3 | import shapeless._ 4 | 5 | import collection.breakOut 6 | 7 | trait SyntaxRuleModule { 8 | 9 | import HList.ListCompat._ 10 | 11 | type Symbol = NonTerminal :+: Terminal :+: EmptyString.type :+: CNil 12 | 13 | object Symbol { 14 | def apply (n: NonTerminal): Symbol = Coproduct[Symbol](n) 15 | def apply (t: Terminal): Symbol = Coproduct[Symbol](t) 16 | def apply (e: EmptyString.type): Symbol = Coproduct[Symbol](e) 17 | def epsilon: Symbol = Coproduct[Symbol](EmptyString) 18 | } 19 | 20 | type NonTerminal 21 | 22 | type Terminal = LiteralToken :+: Keyword :+: EndOfInput.type :+: CNil 23 | 24 | object Terminal { 25 | def apply (t: LiteralToken): Terminal = Coproduct[Terminal](t) 26 | def apply (k: Keyword): Terminal = Coproduct[Terminal](k) 27 | def apply (e: EndOfInput.type): Terminal = Coproduct[Terminal](e) 28 | def eoi: Terminal = Coproduct[Terminal](EndOfInput) 29 | } 30 | 31 | type LiteralToken 32 | 33 | case class Keyword (kw: String) { 34 | lazy val scalaIdent: String = translateKeyword(kw) 35 | } 36 | 37 | case object EndOfInput 38 | 39 | case object EmptyString 40 | 41 | type SemanticAction 42 | 43 | /** 44 | * 文法規則 45 | */ 46 | case class Rule (left: NonTerminal, right: List[Symbol], action: SemanticAction) 47 | 48 | /** 49 | * 文法を表現するデータ 50 | * @param start 開始記号 51 | * @param rules 全ての文法規則 52 | */ 53 | case class Syntax (start: NonTerminal, rules: List[Rule]) { 54 | 55 | /** 56 | * 文法規則一覧 57 | */ 58 | lazy val expressions: Map[NonTerminal, List[Rule]] = rules.groupBy(_.left) 59 | 60 | /** 61 | * 全ての非終端記号 62 | */ 63 | lazy val nonTerminals: Set[NonTerminal] = rules.map(_.left)(breakOut) 64 | 65 | /** 66 | * 全ての終端記号 67 | */ 68 | lazy val terminals: Set[Terminal] = rules.flatMap { rule => 69 | rule.right.collect { case Inr(Inl(t)) => t } 70 | } (breakOut) 71 | 72 | /** 73 | * 全てのリテラル 74 | */ 75 | lazy val literals: Set[LiteralToken] = terminals.collect { 76 | case Inl(l) => l 77 | } 78 | 79 | /** 80 | * 全てのキーワード (オペレータ) 81 | */ 82 | lazy val keywords: Set[Keyword] = terminals.collect { 83 | case Inr(Inl(k)) => k 84 | } 85 | 86 | /** 87 | * 与えられた文法式の先読み集合を求める関数 88 | * @param expr 文法式 89 | * @return 先読み集合 90 | */ 91 | def lookupFirst (expr: List[Symbol]): Set[Terminal] = lookupFirst(expr, Set.empty, Set.empty) 92 | 93 | /** 94 | * 文法式と親の文法規則の先読み集合から、先読み集合を計算する関数 95 | * @param expr 文法式 96 | * @param lookahead 親の文法規則の先読み集合 97 | * @return 先読み集合 98 | */ 99 | def lookupFirst (expr: List[Symbol], lookahead: Set[Terminal]): Set[Terminal] = lookupFirst(expr, lookahead, Set.empty) 100 | 101 | /** 102 | * εになりうる全ての非終端記号 103 | */ 104 | private lazy val canEmpty: Set[NonTerminal] = buildCanEmpty(nonTerminals) 105 | 106 | /** 107 | * εになりうる全ての非終端記号を求める関数 108 | * 最初は全ての非終端記号がεになりうると仮定し、そこからεになり得ないものを除いていく。 109 | * それ以上除かれなくなれば終了する。単調減少のため必ず停止する。 110 | * @param set εになりうる非終端記号の候補 111 | * @return εになりうる全ての非終端記号 112 | */ 113 | private def buildCanEmpty (set: Set[NonTerminal]): Set[NonTerminal] = { 114 | val newSet = set.filter { 115 | expressions(_).exists { 116 | _.right.forall { 117 | case Inl(n) => set.contains(n) 118 | case Inr(Inl(_)) => false 119 | case Inr(Inr(_)) => true 120 | } 121 | } 122 | } 123 | if (set == newSet) set 124 | else buildCanEmpty(newSet) 125 | } 126 | 127 | /** 128 | * first set : ある非終端記号が表現する文字列の最初の1アルファベットの集合 129 | */ 130 | private lazy val firstSet: Map[NonTerminal, Set[Terminal]] = buildFirstSet(nonTerminals.map(_ -> Set.empty[Terminal])(breakOut)) 131 | 132 | /** 133 | * first set を構築する関数 134 | * first set が変化しなくなるまで再帰的に構築する。 135 | * first set のサイズは単調増加するため必ず停止する。 136 | * 効率はそれほど良くないかもしれないが、ボトルネックになるような場所ではないと思われる。 137 | * @param fs 前回構築した first set 138 | * @return first set 139 | */ 140 | private def buildFirstSet (fs: Map[NonTerminal, Set[Terminal]]): Map[NonTerminal, Set[Terminal]] = { 141 | val newSet = fs.map { 142 | case (nt, set) => nt -> expressions(nt).map(_.right).foldRight(set)(updateFirst(_, fs, _)) 143 | } 144 | if (fs == newSet) fs 145 | else buildFirstSet(newSet) 146 | } 147 | 148 | /** 149 | * 文法式を参照して、ある非終端記号 N の first set を更新する関数 150 | * @param expr 文法式 151 | * @param fs 構築済みの first set 152 | * @param set 非終端記号 N の first set のアキュムレータ 153 | * @return 非終端記号 N の新しい first set 154 | */ 155 | private def updateFirst (expr: List[Symbol], fs: Map[NonTerminal, Set[Terminal]], set: Set[Terminal]): Set[Terminal] = expr match { 156 | case Inl(nt) :: rest if canEmpty(nt) => updateFirst(rest, fs, set ++ fs(nt)) 157 | case Inl(nt) :: _ => set ++ fs(nt) 158 | case Inr(Inl(t)) :: _ => set + t 159 | case Inr(Inr(_)) :: rest => updateFirst(rest, fs, set) 160 | case Nil => set 161 | } 162 | 163 | /** 164 | * 文法式と親の文法規則の先読み集合から、先読み集合を計算する関数 165 | * @param expr 文法式 166 | * @param lookahead 親の文法規則の先読み集合 167 | * @param set 先読み集合のアキュムレータ 168 | * @return 先読み集合 169 | */ 170 | private def lookupFirst (expr: List[Symbol], lookahead: Set[Terminal], set: Set[Terminal]): Set[Terminal] = expr match { 171 | case Inl(nt) :: rest if canEmpty(nt) => lookupFirst(rest, lookahead, set ++ firstSet(nt)) 172 | case Inl(nt) :: _ => set ++ firstSet(nt) 173 | case Inr(Inl(t)) :: _ => set + t 174 | case Inr(Inr(_)) :: rest => lookupFirst(rest, lookahead, set) 175 | case Nil => set ++ lookahead 176 | } 177 | } 178 | 179 | def projectRule (r: Rule): String = { 180 | s"${r.left.toString} := ${r.right.map(projectSymbol).mkString(" ")}" 181 | } 182 | 183 | def projectSymbol (s: Symbol): String = s match { 184 | case Inl(nt) => nt.toString 185 | case Inr(Inl(t)) => projectTerminal(t) 186 | case Inr(Inr(_)) => "ε" 187 | } 188 | 189 | def projectTerminal (terminal: Terminal): String = terminal match { 190 | case Inl(lit) => lit.toString 191 | case Inr(Inl(k)) => k.kw 192 | case Inr(Inr(_)) => "$" 193 | } 194 | 195 | private def translateKeyword (kw: String): String = kw match { 196 | case "abstract" => "$$abstract" 197 | case "case" => "$$case" 198 | case "catch" => "$$catch" 199 | case "class" => "$$class" 200 | case "def" => "$$def" 201 | case "do" => "$$do" 202 | case "else" => "$$else" 203 | case "extends" => "$$extends" 204 | case "false" => "$$false" 205 | case "final" => "$$final" 206 | case "finally" => "$$finally" 207 | case "for" => "$$for" 208 | case "forSome" => "$$forSome" 209 | case "if" => "$$if" 210 | case "implicit" => "$$implicit" 211 | case "import" => "$$import" 212 | case "lazy" => "$$lazy" 213 | case "match" => "$$match" 214 | case "new" => "$$new" 215 | case "null" => "$$null" 216 | case "object" => "$$object" 217 | case "override" => "$$override" 218 | case "package" => "$$package" 219 | case "private" => "$$private" 220 | //case "protected" => "$$ protected" // なぜか IntelliJ では protected だけバグる 221 | case "return" => "$$return" 222 | case "sealed" => "$$sealed" 223 | case "super" => "$$super" 224 | case "this" => "$$this" 225 | case "throw" => "$$throw" 226 | case "trait" => "$$trait" 227 | case "true" => "$$true" 228 | case "try" => "$$try" 229 | case "type" => "$$type" 230 | case "val" => "$$val" 231 | case "var" => "$$var" 232 | case "while" => "$$while" 233 | case "with" => "$$with" 234 | case "yield" => "$$yield" 235 | case _ => kw.flatMap { 236 | case '!' => "$$exclam" 237 | case '\"' => "$$quotedbl" 238 | case '#' => "$$numbersign" 239 | case '%' => "$$percent" 240 | case '&' => "$$ampersand" 241 | case '\'' => "$$quotesingle" 242 | case '(' => "$$parenleft" 243 | case ')' => "$$parenright" 244 | case '*' => "$$asterisk" 245 | case '+' => "$$plus" 246 | case ',' => "$$comma" 247 | case '-' => "$$hyphen" 248 | case '.' => "$$period" 249 | case '/' => "$$slash" 250 | case ':' => "$$colon" 251 | case ';' => "$$semicolon" 252 | case '<' => "$$less" 253 | case '=' => "$$equal" 254 | case '>' => "$$greater" 255 | case '?' => "$$question" 256 | case '@' => "$$at" 257 | case '[' => "$$bracketleft" 258 | case '\\' => "$$backslash" 259 | case ']' => "$$bracketright" 260 | case '^' => "$$asciicircum" 261 | case '`' => "$$grave" 262 | case '{' => "$$braceleft" 263 | case '|' => "$$bar" 264 | case '}' => "$$braceright" 265 | case '~' => "$$asciitilde" 266 | case c => c.toString 267 | } 268 | } 269 | } 270 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/macroimpl/SyntaxInfoCollectorModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.macroimpl 2 | 3 | trait SyntaxInfoCollectorModule { 4 | this: AnnotationFinderModule with SyntaxInfoModule with CommonNamesModule with MacroUtilitiesModule with MacroModule => 5 | 6 | import c.universe._ 7 | 8 | /** 9 | * syntax アノテーションを処理して、文法規則の情報を集め syntax アノテーションを削除した構文木を返す関数 10 | * @param tree 対象の構文木 11 | * @return (syntax アノテーションを除去した構文木, 文法規則の情報のリスト) 12 | */ 13 | def processSyntaxAnnotations (tree: Tree): (Tree, List[SyntaxInfo]) = tree match { 14 | case ClassDef(mod, name, typeParams, Template(parents, self, body)) => 15 | (ClassDef(mod, name, typeParams, Template(parents, self, removeSyntaxAnnotations(body))), collectRules(body)) 16 | case ModuleDef(mod, name, Template(parents, self, body)) => 17 | (ModuleDef(mod, name, Template(parents, self, removeSyntaxAnnotations(body))), collectRules(body)) 18 | } 19 | 20 | /** 21 | * syntax アノテーションは Scala のコンパイルエラーを起こしてしまうため削除する 22 | * @param trees 変換対象の構文木のリスト 23 | * @return syntax アノテーションを取り除いた構文木のリスト 24 | */ 25 | private def removeSyntaxAnnotations (trees: List[Tree]): List[Tree] = trees.map { 26 | case ClassDef(mod, name, typeParams, template) => ClassDef(removeSyntaxAnnotation(mod), name, typeParams, template) 27 | case ModuleDef(mod, name, template) => ModuleDef(removeSyntaxAnnotation(mod), name, template) 28 | case DefDef(mod, name, typeParams, params, returnType, body) => DefDef(removeSyntaxAnnotation(mod), name, typeParams, params, returnType, body) 29 | case ValDef(mod, name, valType, body) => ValDef(mod, name, valType, body) 30 | case other => other 31 | } 32 | 33 | /** 34 | * syntax アノテーションで指定された文法規則の情報を集めてくる関数 35 | * @param trees 対象とする構文木のリスト 36 | * @return 文法規則の情報のリスト 37 | */ 38 | private def collectRules (trees: List[Tree]): List[SyntaxInfo] = trees.flatMap { 39 | case classDef : ClassDef => classRule(classDef) 40 | case moduleDef: ModuleDef => objectRule(moduleDef) 41 | case fieldDef : ValDef => fieldRule(fieldDef) 42 | case funDef : DefDef => functionRule(funDef) 43 | case _ => Nil 44 | } 45 | 46 | /** 47 | * クラス定義に対応する文法規則の情報を返す関数 48 | * @param classDef クラス定義 49 | * @return プライマリコンストラクタに対応する文法規則 ++ 補助コンストラクタに対応する文法規則 50 | */ 51 | private def classRule (classDef: ClassDef): List[SyntaxInfo] = { 52 | val ClassDef(mod, name, Nil, Template(_, _, body)) = classDef 53 | val constructors = MacroUtilities.findConstructors(body) 54 | val primaryRules = MacroUtilities.findPrimaryConstructor(body).map(primaryConstructorRules(mod, name, _)).getOrElse(Nil) 55 | val auxiliaryRules = constructors.flatMap(auxiliaryConstructorRules(name, _)) 56 | primaryRules ++ auxiliaryRules 57 | } 58 | 59 | /** 60 | * シングルトンオブジェクト定義に対応する文法規則の情報を返す関数 61 | * @param moduleDef シングルトンオブジェクトの定義 62 | * @return オブジェクト参照に対応する文法規則の情報 63 | */ 64 | private def objectRule (moduleDef: ModuleDef): List[SyntaxInfo] = { 65 | val ModuleDef(mod, name, _) = moduleDef 66 | operatorRule(mod, Nil, SingletonTypeTree(Ident(name)), _ => SemanticActionImpl.returnConstant(q"$name")) 67 | } 68 | 69 | /** 70 | * フィールド定義に対応する文法規則の情報を返す関数 71 | * @param fieldDef フィールド定義 72 | * @return オブジェクト参照に対応する文法規則の情報 73 | */ 74 | private def fieldRule (fieldDef: ValDef): List[SyntaxInfo] = { 75 | val ValDef(mod, name, fieldType, _) = fieldDef 76 | operatorRule(mod, Nil, fieldType, _ => SemanticActionImpl.returnConstant(q"$name")) 77 | } 78 | 79 | /** 80 | * 関数に対応する文法規則の情報を返す関数 81 | * @param funDef 関数定義 82 | * @return 対応する文法規則の情報 83 | */ 84 | private def functionRule (funDef: DefDef): List[SyntaxInfo] = { 85 | val DefDef(mod, name, Nil, paramLists, returnType, _) = funDef 86 | operatorRule(mod, paramLists, returnType, SemanticActionImpl.functionCall(q"$name", _)) 87 | } 88 | 89 | /** 90 | * プライマリコンストラクタに対応する文法規則の情報を返す関数 91 | * @param mod クラス定義のアノテーションなどの修飾詞情報 92 | * @param typeName 型名 93 | * @param primaryConstructor プライマリコンストラクタ 94 | * @return 対応する文法規則の情報 95 | */ 96 | private def primaryConstructorRules (mod: Modifiers, typeName: TypeName, primaryConstructor: DefDef): List[SyntaxInfo] = { 97 | val DefDef(_, termNames.CONSTRUCTOR, Nil, paramLists, _, _) = primaryConstructor 98 | operatorRule(mod, paramLists, Ident(typeName), SemanticActionImpl.constructorCall(tq"$typeName", _)) 99 | } 100 | 101 | /** 102 | * 補助コンストラクタに対応する文法規則の情報を返す関数 103 | * @param typeName 型名 104 | * @param constructor 補助コンストラクタ 105 | * @return 対応する文法規則の情報 106 | */ 107 | private def auxiliaryConstructorRules (typeName: TypeName, constructor: DefDef): List[SyntaxInfo] = { 108 | val DefDef(mod, termNames.CONSTRUCTOR, Nil, paramLists, _, _) = constructor 109 | operatorRule(mod, paramLists, Ident(typeName), SemanticActionImpl.constructorCall(tq"$typeName", _)) 110 | } 111 | 112 | /** 113 | * syntaxアノテーションで指定された文法規則の情報を返す関数 114 | * @param mod アノテーションなどの修飾詞情報 115 | * @param paramLists 引数リストのリスト 116 | * @param returnType 返り値の型を表す構文木 117 | * @param semantics 対応する関数呼び出しを表現する 118 | * @return syntaxアノテーションで表現された文法規則の情報 119 | */ 120 | private def operatorRule (mod: Modifiers, paramLists: List[List[ValDef]], returnType: Tree, semantics: (List[Tree] => List[List[Tree]]) => SemanticActionImpl): List[SyntaxInfo] = { 121 | val TypeWithSyntaxAnnotation(annotations, ret) = returnType 122 | 123 | ( findSyntaxAnnotation(mod) ++ annotations ).flatten.flatMap { syntax => 124 | val (operandLists, ss) = variableParameterRules(paramLists) 125 | syntaxAnnotationRule(syntax, operandLists, ret, semantics) :: ss 126 | } 127 | } 128 | 129 | private def variableParameterRules (paramLists: List[List[ValDef]]): (List[List[Operand]], List[SyntaxInfo]) = { 130 | paramLists.foldRight[(List[List[Operand]], List[SyntaxInfo])]((Nil, Nil)) { 131 | case (paramList, (operandLists, syntax)) => 132 | val (operands, ss) = variableParameterRules_paramList(paramList) 133 | (operands :: operandLists, ss ++ syntax) 134 | } 135 | } 136 | 137 | private def variableParameterRules_paramList (paramList: List[ValDef]): (List[Operand], List[SyntaxInfo]) = { 138 | paramList.foldRight[(List[Operand], List[SyntaxInfo])]((Nil, Nil)) { 139 | case (ValDef(_, name, AppliedTypeTree(Select(Select(Ident(termNames.ROOTPKG), TermName("scala")), TypeName("")), List(TypeWithSepAnnotation(sep, t))), _), (params, syntax)) => 140 | (RepOperand(name, t) :: params, variableParameterSyntax(t, sep) ++ syntax) 141 | case (ValDef(_, name, t, _), (params, syntax)) => 142 | (NormalOperand(name, t) :: params, syntax) 143 | } 144 | } 145 | 146 | private def variableParameterSyntax (componentType: Tree, sep: Option[String]): List[SyntaxInfo] = { 147 | List(SyntaxInfo.epsilonOperator(seqTypeTreeOf(componentType), getNilListOf(componentType)), 148 | SyntaxInfo.unaryOperator(seqTypeTreeOf(componentType), Nil, componentType, Nil, arg => makeSingleElementList(componentType, arg)), 149 | SyntaxInfo.binaryOperator(seqTypeTreeOf(componentType), Nil, componentType, Nil, seqTailTypeTreeOf(componentType), Nil, (x, xs) => consSeq(x, seqTailToSeq(xs))), 150 | SyntaxInfo.unaryOperator(seqTailTypeTreeOf(componentType), sep.toList, componentType, Nil, arg => makeSingleElementSeqTail(componentType, arg)), 151 | SyntaxInfo.binaryOperator(seqTailTypeTreeOf(componentType), sep.toList, componentType, Nil, seqTailTypeTreeOf(componentType), Nil, (x, xs) => consSeqTail(x, xs))) 152 | } 153 | 154 | /** 155 | * syntaxアノテーションに対応する文法規則の情報を返す関数 156 | * @param syntaxAnnotation syntax アノテーションの引数 157 | * @param operandLists 引数リストのリスト 158 | * @param returnType 返り値の型を表す構文木 159 | * @param semantics 対応する関数呼び出しを表現する 160 | * @return syntaxアノテーションに対応する文法規則の情報 161 | */ 162 | private def syntaxAnnotationRule (syntaxAnnotation: Tree, operandLists: List[List[Operand]], returnType: Tree, semantics: (List[Tree] => List[List[Tree]]) => SemanticActionImpl): SyntaxInfo = { 163 | syntaxAnnotation match { 164 | case Apply(Select(Apply(Ident(TermName("StringContext")), parts), TermName("s")), args) => 165 | val operators = getOperators(parts) 166 | val (operands, correspondence) = getOperands(args, operandLists) 167 | SyntaxInfo(returnType, operators, operands, semantics(correspondence)) 168 | 169 | case other => 170 | c.abort(other.pos, s"""@syntax only supports s"..." for describing syntax: $other""") 171 | } 172 | } 173 | 174 | /** 175 | * StringContextの引数からオペレータ(キーワード)のリストを作る関数 176 | * @param parts StringContextの引数 177 | * @return オペレータ(キーワード)を表現する文字列のリスト 178 | */ 179 | private def getOperators (parts: List[Tree]): List[List[String]] = parts.map { 180 | case Literal(Constant("")) => Nil 181 | case Literal(Constant(str: String)) => str.split(" ").filter(_ != "").toList 182 | case other => c.abort(other.pos, s"""@syntax only supports s"..." for describing syntax: $other""") 183 | } 184 | 185 | /** 186 | * オペランド名を解決する関数 187 | * @param args オペランド名のリスト 188 | * @param operandLists 引数リストのリスト 189 | * @return (オペランドの型を表現する構文木のリスト, オペランド列を引数リストのリストに直す関数) 190 | */ 191 | private def getOperands (args: List[Tree], operandLists: List[List[Operand]]): (List[Tree], List[Tree] => List[List[Tree]]) = { 192 | val parameters = for { 193 | (operandList, index1) <- operandLists.zipWithIndex 194 | (operand, index2) <- operandList.zipWithIndex 195 | } yield operand.name -> (operand -> (index1 -> index2)) 196 | 197 | val parameterMap = parameters.toMap[Name, (Operand, (Int, Int))] 198 | 199 | val operands = args.map { case Ident(termName) => parameterMap(termName) } 200 | 201 | // List[(arg, index1, index2)] を作り, index1 でグループ化してソートし, 更に各グループ内で index2 でソートする 202 | val correspondence: List[Tree] => List[List[Tree]] = tree => { 203 | val zipped = tree.zip(operands).map { 204 | case (arg, (RepOperand(_, _), (i1, i2))) => (q"$arg:_*", i1, i2) 205 | case (arg, (NormalOperand(_, _), (i1, i2))) => (arg, i1, i2) 206 | } 207 | zipped.groupBy(_._2).toList.sortBy(_._1).map(_._2.sortBy(_._3).map(_._1)) 208 | } 209 | 210 | (operands.map(_._1.valType), correspondence) 211 | } 212 | 213 | private sealed trait Operand { 214 | def name: TermName 215 | def valType: Tree 216 | } 217 | 218 | private case class NormalOperand (name: TermName, valType: Tree) extends Operand 219 | 220 | private case class RepOperand (name: TermName, componentType: Tree) extends Operand { 221 | override def valType: Tree = seqTypeTreeOf(componentType) 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /src/main/scala/com/phenan/scalalr/shared/CodeGeneratorModule.scala: -------------------------------------------------------------------------------- 1 | package com.phenan.scalalr.shared 2 | 3 | import shapeless._ 4 | 5 | trait CodeGeneratorModule { 6 | self: SyntaxRuleModule with LALRAutomatonModule => 7 | 8 | import HList.ListCompat._ 9 | 10 | val output: Output 11 | 12 | type GeneratedCode 13 | 14 | trait Output { 15 | type Type 16 | 17 | type MemberDef 18 | 19 | type Parameter 20 | 21 | type TypeParameter 22 | 23 | type Expr 24 | 25 | def generateProgram (modules: List[MemberDef]): GeneratedCode 26 | 27 | def generateUniqueName: String 28 | 29 | def literalIdentifier (lit: LiteralToken): Option[String] 30 | 31 | def simpleType (typeName: String): Type 32 | def objectType (objectName: String): Type 33 | 34 | def nonTerminalType (nt: NonTerminal): Type 35 | def literalType (lit: LiteralToken): Type 36 | 37 | def simpleTypes (names: String*): List[Type] = names.map(simpleType).toList 38 | 39 | def tuple2Type (v1: Type, v2: Type): Type 40 | def functionType (left: Type, right: Type): Type 41 | def parameterizedType (genName: String, args: List[Type]): Type 42 | 43 | def parameter (name: String, paramType: Type): Parameter 44 | def unusedParameter (paramType: Type): Parameter 45 | 46 | def typeParameter (name: String): TypeParameter 47 | def typeParameter (name: String, bound: Type): TypeParameter 48 | 49 | def typeParameters (names: String*): List[TypeParameter] = names.map(typeParameter).toList 50 | 51 | def sealedTraitDef (name: String, superType: Option[Type]): MemberDef 52 | 53 | def caseClassDef (name: String, typeParams: List[TypeParameter], params: List[Parameter], superType: Option[Type]): MemberDef 54 | def caseObjectDef (name: String): MemberDef 55 | 56 | def lazyValDef (name: String, valType: Type, value: Expr): MemberDef 57 | def functionDef (name: String, typeParams: List[TypeParameter], parameters: List[Parameter], implicitParams: List[Parameter], returnType: Type, body: Expr): MemberDef 58 | def implicitFunctionDef (typeParams: List[TypeParameter], parameters: List[Parameter], implicitParams: List[Parameter], returnType: Type, body: Expr): MemberDef 59 | 60 | def implicitClassDef (typeParams: List[TypeParameter], parameter: Parameter, implicitParams: List[Parameter], members: List[MemberDef]): MemberDef 61 | 62 | def objectRef (objectName: String): Expr 63 | def methodCall (receiver: Expr, methodName: String, typeArgs: List[Type], args: List[Expr]): Expr 64 | def fieldRef (receiver: Expr, fieldName: String): Expr 65 | def callApply (receiver: Expr, typeArgs: List[Type], args: List[Expr]): Expr 66 | def lambda (parameters: List[Parameter], body: Expr): Expr 67 | 68 | def constructAST (rule: Rule, args: List[Expr]): Expr 69 | } 70 | 71 | case class CodeGenerator (automaton: LALRAutomaton) { 72 | 73 | import output._ 74 | 75 | def generateCode (module: MemberDef): GeneratedCode = generateProgram(List(module)) 76 | def generateCode (modules: List[MemberDef]): GeneratedCode = generateProgram(modules) 77 | 78 | lazy val generatedDefinitions: List[MemberDef] = { 79 | nodeClassDefinitions ++ keywordObjectDefinitions ++ literalDSLDefinitions ++ 80 | keywordDSLDefinitions ++ keywordTransitionDefinitions ++ 81 | literalTransitionDefinitions ++ 82 | shiftImplicitDefinitions ++ reduceImplicitDefinitions ++ acceptImplicitDefinitions 83 | } 84 | 85 | /** 86 | * LALR オートマトンの各ノードを表現するデータ型の定義を出力する関数 87 | */ 88 | lazy val nodeClassDefinitions: List[MemberDef] = automaton.nodes.filterNot(automaton.start == _).toList.map { node => 89 | automaton.state(node) match { 90 | case Inl(nt) => caseClassDef(nodeName(node), typeParameters("NX"), List(parameter("prev", simpleType("NX")), parameter("value", nonTerminalType(nt))), None) 91 | case Inr(Inl(Inl(lit))) => caseClassDef(nodeName(node), typeParameters("NX"), List(parameter("prev", simpleType("NX")), parameter("value", literalType(lit))), None) 92 | case _ => caseClassDef(nodeName(node), typeParameters("NX"), List(parameter("prev", simpleType("NX"))), None) 93 | } 94 | } 95 | 96 | /** 97 | * キーワード(オペレータ)を表現するオブジェクトの定義を出力する関数 98 | */ 99 | lazy val keywordObjectDefinitions: List[MemberDef] = automaton.syntax.keywords.toList.map { k => 100 | caseObjectDef(keywordTokenTypeNames(k)) 101 | } 102 | 103 | /** 104 | * リテラルを表現するDSL関数の定義 105 | */ 106 | lazy val literalDSLDefinitions: List[MemberDef] = for { 107 | literal <- automaton.syntax.literals.toList 108 | id <- literalIdentifier(literal) 109 | } yield { 110 | functionDef(id, Nil, List(parameter("value", literalType(literal))), Nil, 111 | tokenListType(literalTokenTypes(literal)), 112 | singleTokenListObj(literalTokenTypes(literal), constructLiteralObj(literalType(literal), objectRef("value")))) 113 | } 114 | 115 | /** 116 | * キーワード(オペレータ)を表すDSL関数の定義 117 | */ 118 | lazy val keywordDSLDefinitions: List[MemberDef] = automaton.syntax.keywords.toList.map { k => 119 | lazyValDef(k.scalaIdent, tokenListType(keywordType(k)), 120 | singleTokenListObj(keywordType(k), keywordObjRef(k))) 121 | } 122 | 123 | /** 124 | * DSL のキーワードによる遷移を表現する関数の定義 125 | */ 126 | lazy val keywordTransitionDefinitions: List[MemberDef] = automaton.syntax.keywords.toList.flatMap { k => 127 | val literalArgFunctionDef = functionDef(k.scalaIdent, typeParameters("U", "N3"), List(parameter("literal", simpleType("U"))), 128 | List(parameter("transition", transitionType(genericLiteralType("U"), simpleType("N2"), simpleType("N3")))), 129 | simpleType("N3"), 130 | objectRef("transition").callTransit(objectRef(k.scalaIdent), constructLiteralObj(simpleType("U"), objectRef("literal")))) 131 | 132 | val tokenListArgFunctionDef = functionDef(k.scalaIdent, List(typeParameter("U", genericTokenListType), typeParameter("N3")), List(parameter("tokens", simpleType("U"))), 133 | List(parameter("transitions", transitionsType(simpleType("U"), simpleType("N2"), simpleType("N3")))), 134 | simpleType("N3"), 135 | objectRef("transitions").callTransit(objectRef(k.scalaIdent), objectRef("tokens"))) 136 | 137 | List(implicitClassDef(typeParameters("T", "N1", "N2"), parameter("value", simpleType("T")), 138 | List(parameter("transition1", transitionType(genericLiteralType("T"), startNodeType, simpleType("N1"))), 139 | parameter("transition2", transitionType(keywordType(k), simpleType("N1"), simpleType("N2")))), 140 | List(functionDef(k.scalaIdent, Nil, Nil, Nil, simpleType("N2"), 141 | objectRef("transition2").callTransit(objectRef("transition1").callTransit(startNodeObjRef, constructLiteralObj(simpleType("T"), objectRef("value"))), keywordObjRef(k))), 142 | literalArgFunctionDef, tokenListArgFunctionDef)), 143 | implicitClassDef(List(typeParameter("T", genericTokenListType), typeParameter("N1"), typeParameter("N2")), parameter("value", simpleType("T")), 144 | List(parameter("transition1", transitionsType(simpleType("T"), startNodeType, simpleType("N1"))), 145 | parameter("transition2", transitionType(keywordType(k), simpleType("N1"), simpleType("N2")))), 146 | List(functionDef(k.scalaIdent, Nil, Nil, Nil, simpleType("N2"), 147 | objectRef("transition2").callTransit(objectRef("transition1").callTransit(startNodeObjRef, objectRef("value")), keywordObjRef(k))), 148 | literalArgFunctionDef, tokenListArgFunctionDef)), 149 | implicitClassDef(typeParameters("N1", "N2"), parameter("node", simpleType("N1")), 150 | List(parameter("transition1", transitionType(keywordType(k), simpleType("N1"), simpleType("N2")))), 151 | List(functionDef(k.scalaIdent, Nil, Nil, Nil, simpleType("N2"), 152 | objectRef("transition1").callTransit(objectRef("node"), keywordObjRef(k))), 153 | literalArgFunctionDef, tokenListArgFunctionDef))) 154 | } 155 | 156 | /** 157 | * DSL のリテラルによる遷移を表現する関数の定義 158 | */ 159 | lazy val literalTransitionDefinitions: List[MemberDef] = for { 160 | lit <- automaton.syntax.literals.toList 161 | id <- literalIdentifier(lit) 162 | } yield { 163 | val methodBody = objectRef("transition").callTransit(objectRef("node"), constructLiteralObj(literalType(lit), objectRef("value"))) 164 | implicitClassDef(typeParameters("N1", "N2"), parameter("node", simpleType("N1")), 165 | List(parameter("transition", transitionType(literalTokenTypes(lit), simpleType("N1"), simpleType("N2")))), 166 | List(functionDef(id, Nil, List(parameter("value", literalType(lit))), Nil, simpleType("N2"), methodBody))) 167 | } 168 | 169 | /** 170 | * Shift 操作を表す implicit value の定義 171 | */ 172 | lazy val shiftImplicitDefinitions: List[MemberDef] = { 173 | automaton.shift.flatMap { case (from, map) => 174 | val (typeParams, fromType) = 175 | if (automaton.start == from) (Nil, startNodeType) 176 | else (typeParameters("NX"), parameterizedType(nodeName(from), simpleTypes("NX"))) 177 | 178 | map.map { case (terminal, to) => 179 | val toType = parameterizedType(nodeName(to), List(fromType)) 180 | val bodyLambda = terminal match { 181 | case Inl(lit) => lambda(List(parameter("s", fromType), parameter("t", literalTokenTypes(lit))), 182 | callApply(objectRef(nodeName(to)), List(fromType), List(objectRef("s"), fieldRef(objectRef("t"), "value")))) 183 | case _ => lambda(List(parameter("s", fromType), unusedParameter(terminalType(terminal))), 184 | callApply(objectRef(nodeName(to)), List(fromType), List(objectRef("s")))) 185 | } 186 | implicitFunctionDef(typeParams, Nil, Nil, shiftType(terminalType(terminal), fromType, toType), 187 | constructShiftObj(terminalType(terminal), fromType, toType, bodyLambda)) 188 | } 189 | } 190 | }.toList 191 | 192 | /** 193 | * Reduce 操作を表す implicit value の定義 194 | */ 195 | lazy val reduceImplicitDefinitions: List[MemberDef] = for { 196 | (from, rule, lookahead) <- automaton.reduce.toList 197 | path <- reducePath(from, rule) 198 | la <- lookahead 199 | dest <- automaton.goTo(path.head).get(rule.left) 200 | } yield reduceImplicitDefinition(rule, path, dest, la) 201 | 202 | /** 203 | * 終了処理を表す implicit value の定義 204 | */ 205 | lazy val acceptImplicitDefinitions: List[MemberDef] = for { 206 | (node, rule) <- automaton.accept.toList 207 | path <- reducePath(node, rule) 208 | } yield acceptImplicitDefinition(rule, path) 209 | 210 | /** 211 | * Reduce による巻き戻りの道のりを求める関数 212 | * @param from reduce の開始地点となる LR closure 213 | * @param rule reduce 対象の文法 214 | * @return Reduce による巻き戻りの道のりを表現する LR closure のリストの集合 215 | */ 216 | private def reducePath (from: LRClosure, rule: Rule): Set[List[LRClosure]] = rule.right.foldRight(Set(List(from))) { 217 | case (Inl(nt), set) => reducePath(Inl(nt), set) 218 | case (Inr(Inl(t)), set) => reducePath(Inr(Inl(t)), set) 219 | case (Inr(Inr(_)), set) => set 220 | } 221 | 222 | private def reducePath (symbol: NonEmptySymbol, set: Set[List[LRClosure]]): Set[List[LRClosure]] = for { 223 | path <- set if automaton.state(path.head) == symbol 224 | node <- automaton.reverseEdges(path.head) 225 | } yield node :: path 226 | 227 | private def reduceImplicitDefinition (rule: Rule, path: List[LRClosure], destination: LRClosure, lookahead: Terminal): MemberDef = { 228 | val (typeParams, baseType) = 229 | if (automaton.start == path.head) (Nil, startNodeType) 230 | else (typeParameters("NX"), parameterizedType(nodeName(path.head), simpleTypes("NX"))) 231 | 232 | val fromType = path.tail.foldLeft(baseType) { (arg, node) => parameterizedType(nodeName(node), List(arg)) } 233 | val toType = parameterizedType(nodeName(destination), List(baseType)) 234 | 235 | val (prevField, astElements) = path.tail.foldRight[(Expr, List[Expr])]((objectRef("s"), Nil)) { case (node, (cur, args)) => 236 | automaton.state(node) match { 237 | case Inl(_) | Inr(Inl(Inl(_))) => (fieldRef(cur, "prev"), fieldRef(cur, "value") :: args) // 有意な引数 238 | case _ => (fieldRef(cur, "prev"), args) // キーワード等は値に意味がない 239 | } 240 | } 241 | val bodyLambda = lambda(List(parameter("s", fromType)), 242 | callApply(objectRef(nodeName(destination)), List(baseType), 243 | List(prevField, constructAST(rule, astElements)))) 244 | 245 | implicitFunctionDef(typeParams, Nil, Nil, reduceType(terminalType(lookahead), fromType, toType), 246 | constructReduceObj(terminalType(lookahead), fromType, toType, bodyLambda)) 247 | } 248 | 249 | private def acceptImplicitDefinition (rule: Rule, path: List[LRClosure]): MemberDef = { 250 | val (typeParams, baseType) = 251 | if (automaton.start == path.head) (Nil, startNodeType) 252 | else (typeParameters("NX"), parameterizedType(nodeName(path.head), simpleTypes("NX"))) 253 | 254 | val fromType = path.tail.foldLeft(baseType) { (arg, node) => parameterizedType(nodeName(node), List(arg)) } 255 | 256 | val astElements = path.tail.foldRight[(Expr, List[Expr])]((objectRef("s"), Nil)) { case (node, (cur, args)) => 257 | automaton.state(node) match { 258 | case Inl(_) | Inr(Inl(Inl(_))) => (fieldRef(cur, "prev"), fieldRef(cur, "value") :: args) // 有意な引数 259 | case _ => (fieldRef(cur, "prev"), args) // キーワード等は値に意味がない 260 | } 261 | }._2 262 | 263 | val bodyLambda = lambda(List(parameter("s", fromType)), constructAST(rule, astElements)) 264 | 265 | implicitFunctionDef(typeParams, Nil, Nil, acceptType(fromType, nonTerminalType(automaton.syntax.start)), 266 | constructAcceptObj(fromType, nonTerminalType(automaton.syntax.start), bodyLambda)) 267 | } 268 | 269 | /** 270 | * 終端記号に対応する型を求める関数 271 | * @param t 終端記号 272 | * @return 型 273 | */ 274 | private def terminalType (t: Terminal): Type = t match { 275 | case Inl(lit) => literalTokenTypes(lit) 276 | case Inr(Inl(k)) => objectType(keywordTokenTypeNames(k)) 277 | case Inr(Inr(_)) => objectType("com.phenan.scalalr.internal.EoI") 278 | } 279 | 280 | /** 281 | * リテラルとそのリテラルの内部型のマッピング 282 | * LALR オートマトンの開始ノードからそのリテラルのエッジが出ている場合、そのエッジが指すノードをリテラルの型として用いる 283 | */ 284 | private lazy val literalTokenTypes: Map[LiteralToken, Type] = automaton.syntax.literals.map { literal => 285 | literal -> parameterizedType("com.phenan.scalalr.internal.Literal", List(literalType(literal))) 286 | }.toMap 287 | 288 | private def genericLiteralType (name: String): Type = parameterizedType("com.phenan.scalalr.internal.Literal", simpleTypes(name)) 289 | 290 | private def constructLiteralObj (litType: Type, arg: Expr): Expr = callApply(objectRef("com.phenan.scalalr.internal.Literal"), List(litType), List(arg)) 291 | 292 | private def constructShiftObj (terminal: Type, from: Type, to: Type, body: Expr): Expr = callApply(objectRef("com.phenan.scalalr.internal.Shift"), List(terminal, from, to), List(body)) 293 | 294 | private def constructReduceObj (terminal: Type, from: Type, to: Type, body: Expr): Expr = callApply(objectRef("com.phenan.scalalr.internal.Reduce"), List(terminal, from, to), List(body)) 295 | 296 | private def constructAcceptObj (from: Type, to: Type, body: Expr): Expr = callApply(objectRef("com.phenan.scalalr.internal.Accept"), List(from, to), List(body)) 297 | 298 | private def shiftType (terminal: Type, from: Type, to: Type): Type = parameterizedType("com.phenan.scalalr.internal.Shift", List(terminal, from, to)) 299 | 300 | private def reduceType (terminal: Type, from: Type, to: Type): Type = parameterizedType("com.phenan.scalalr.internal.Reduce", List(terminal, from, to)) 301 | 302 | private def acceptType (from: Type, to: Type): Type = parameterizedType("com.phenan.scalalr.internal.Accept", List(from, to)) 303 | 304 | private def transitionType (terminal: Type, from: Type, to: Type): Type = parameterizedType("com.phenan.scalalr.internal.Transition", List(terminal, from, to)) 305 | 306 | private def transitionsType (tokens: Type, from: Type, to: Type): Type = parameterizedType("com.phenan.scalalr.internal.Transitions", List(tokens, from, to)) 307 | 308 | private def genericTokenListType: Type = simpleType("com.phenan.scalalr.internal.TokenList") 309 | 310 | private def tokenListType(tokenTypes: Type*): Type = tokenListType(tokenTypes.toList) 311 | 312 | private def tokenListType(tokenTypes: List[Type]): Type = tokenTypes match { 313 | case head :: tail => parameterizedType("com.phenan.scalalr.internal.TokenListCons", List(head, tokenListType(tail))) 314 | case Nil => simpleType("com.phenan.scalalr.internal.TokenListSentinel") 315 | } 316 | 317 | private def singleTokenListObj (tokenType: Type, token: Expr): Expr = { 318 | callApply(objectRef("com.phenan.scalalr.internal.TokenListCons"), List(tokenType, simpleType("com.phenan.scalalr.internal.TokenListSentinel")), List(token, objectRef("com.phenan.scalalr.internal.TokenListSentinel"))) 319 | } 320 | 321 | private def keywordType (k: Keyword): Type = objectType(keywordTokenTypeNames(k)) 322 | 323 | private def keywordObjRef (k: Keyword): Expr = objectRef(keywordTokenTypeNames(k)) 324 | 325 | private lazy val startNodeType: Type = objectType(startNode) 326 | 327 | private lazy val startNodeObjRef: Expr = objectRef(startNode) 328 | 329 | private implicit class ExprOps (e: Expr) { 330 | def callTransit (from: Expr, token: Expr): Expr = methodCall(e, "transit", Nil, List(from, token)) 331 | } 332 | 333 | private lazy val keywordTokenTypeNames: Map[Keyword, String] = automaton.syntax.keywords.map(_ -> generateUniqueName).toMap 334 | private lazy val nodeName: Map[LRClosure, String] = automaton.nodes.map { node => 335 | if (automaton.start == node) node -> "com.phenan.scalalr.internal.StartNode" 336 | else node -> generateUniqueName 337 | }.toMap 338 | 339 | private lazy val startNode: String = nodeName(automaton.start) 340 | } 341 | } 342 | -------------------------------------------------------------------------------- /test/GeneratedCodeSample.scala: -------------------------------------------------------------------------------- 1 | import com.phenan.scalalr._ 2 | import com.phenan.scalalr.internal._ 3 | 4 | object MathDSL2 { 5 | 6 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 7 | 8 | sealed trait Expr 9 | case class Mul (arg0: Term, arg1: Factor) extends Term 10 | sealed trait Term extends Expr 11 | case class Stmts (arg0: Stmt, arg1: Program) extends Program 12 | case class Div (arg0: Term, arg1: Factor) extends Term 13 | case class Num (arg0: Int) extends Factor 14 | sealed trait Factor extends Term 15 | case class Stmt (arg0: Expr) extends Program 16 | case class Paren (arg0: Expr) extends Factor 17 | sealed trait Program 18 | case class Add (arg0: Expr, arg1: Term) extends Expr 19 | case class Sub (arg0: Expr, arg1: Term) extends Expr 20 | 21 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 22 | 23 | case class Node0 [NX] (prev: NX, value: Expr) 24 | case class Node1 [NX] (prev: NX) 25 | case class Node2 [NX] (prev: NX) 26 | case class Node3 [NX] (prev: NX) 27 | case class Node4 [NX] (prev: NX, value: Stmts) 28 | case class Node5 [NX] (prev: NX, value: Expr) 29 | case class Node6 [NX] (prev: NX, value: Stmt) 30 | case class Node7 [NX] (prev: NX, value: Term) 31 | case class Node8 [NX] (prev: NX, value: Program) 32 | case class Node9 [NX] (prev: NX) 33 | case class Node10 [NX] (prev: NX, value: Int) 34 | case class Node11 [NX] (prev: NX, value: Add) 35 | case class Node12 [NX] (prev: NX, value: Factor) 36 | case class Node13 [NX] (prev: NX, value: Sub) 37 | case object Node14 38 | case class Node15 [NX] (prev: NX, value: Term) 39 | case class Node16 [NX] (prev: NX, value: Program) 40 | case class Node17 [NX] (prev: NX, value: Div) 41 | case class Node18 [NX] (prev: NX, value: Term) 42 | case class Node19 [NX] (prev: NX) 43 | case class Node20 [NX] (prev: NX, value: Mul) 44 | case class Node21 [NX] (prev: NX, value: Num) 45 | case class Node22 [NX] (prev: NX) 46 | case class Node23 [NX] (prev: NX, value: Factor) 47 | case class Node24 [NX] (prev: NX, value: Paren) 48 | case class Node25 [NX] (prev: NX, value: Factor) 49 | case class Node26 [NX] (prev: NX) 50 | 51 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 52 | 53 | case object Lp 54 | case object Mul 55 | case object Rp 56 | case object Minus 57 | case object End 58 | case object Plus 59 | case object Div 60 | 61 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 62 | 63 | def int (value: Int): TokenListCons[Literal[Int], TokenListSentinel] = TokenListCons(Literal(value), TokenListSentinel) 64 | 65 | def $$asterisk : TokenListCons[Mul.type, TokenListSentinel] = singleToken(Mul) 66 | def $$parenleft : TokenListCons[Lp.type, TokenListSentinel] = singleToken(Lp) 67 | def $$parenright : TokenListCons[Rp.type, TokenListSentinel] = singleToken(Rp) 68 | def $$hyphen : TokenListCons[Minus.type, TokenListSentinel] = singleToken(Minus) 69 | def end : TokenListCons[End.type, TokenListSentinel] = singleToken(End) 70 | def $$plus : TokenListCons[Plus.type, TokenListSentinel] = singleToken(Plus) 71 | def $$slash : TokenListCons[Div.type, TokenListSentinel] = singleToken(Div) 72 | 73 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 74 | 75 | implicit class start_with_actual_literal_$$plus [T, N1, N2] (value: T) (implicit transition1: Transition[Literal[T], Node14.type, N1], transition2: Transition[Plus.type, N1, N2]) { 76 | def $$plus : N2 = transition2.transit(transition1.transit(Node14, Literal[T](value)), Plus) 77 | def $$plus [U, N3] (value2: U)(implicit transition3: Transition[Literal[U], N2, N3]): N3 = transition3.transit($$plus, Literal[U](value2)) 78 | def $$plus [U <: TokenList, N3] (value2: U)(implicit transitions: Transitions[U, N2, N3]): N3 = transitions.transit($$plus, value2) 79 | } 80 | 81 | implicit class start_with_actual_literal_$$hyphen [T, N1, N2] (value: T) (implicit transition1: Transition[Literal[T], Node14.type, N1], transition2: Transition[Minus.type, N1, N2]) { 82 | def $$hyphen : N2 = transition2.transit(transition1.transit(Node14, Literal[T](value)), Minus) 83 | def $$hyphen [U, N3] (value2: U)(implicit transition3: Transition[Literal[U], N2, N3]): N3 = transition3.transit($$hyphen, Literal[U](value2)) 84 | def $$hyphen [U <: TokenList, N3] (value2: U)(implicit transitions: Transitions[U, N2, N3]): N3 = transitions.transit($$hyphen, value2) 85 | } 86 | 87 | implicit class start_with_actual_literal_$$asterisk [T, N1, N2] (value: T) (implicit transition1: Transition[Literal[T], Node14.type, N1], transition2: Transition[Mul.type, N1, N2]) { 88 | def $$asterisk : N2 = transition2.transit(transition1.transit(Node14, Literal[T](value)), Mul) 89 | def $$asterisk [U, N3] (value2: U)(implicit transition3: Transition[Literal[U], N2, N3]): N3 = transition3.transit($$asterisk, Literal[U](value2)) 90 | def $$asterisk [U <: TokenList, N3] (value2: U)(implicit transitions: Transitions[U, N2, N3]): N3 = transitions.transit($$asterisk, value2) 91 | } 92 | 93 | implicit class start_with_actual_literal_$$slash [T, N1, N2] (value: T) (implicit transition1: Transition[Literal[T], Node14.type, N1], transition2: Transition[Div.type, N1, N2]) { 94 | def $$slash : N2 = transition2.transit(transition1.transit(Node14, Literal[T](value)), Div) 95 | def $$slash [U, N3] (value2: U)(implicit transition3: Transition[Literal[U], N2, N3]): N3 = transition3.transit($$slash, Literal[U](value2)) 96 | def $$slash [U <: TokenList, N3] (value2: U)(implicit transitions: Transitions[U, N2, N3]): N3 = transitions.transit($$slash, value2) 97 | } 98 | 99 | implicit class start_with_actual_literal_$$parenleft [T, N1, N2] (value: T) (implicit transition1: Transition[Literal[T], Node14.type, N1], transition2: Transition[Lp.type, N1, N2]) { 100 | def $$parenleft : N2 = transition2.transit(transition1.transit(Node14, Literal[T](value)), Lp) 101 | def $$parenleft [U, N3] (value2: U)(implicit transition3: Transition[Literal[U], N2, N3]): N3 = transition3.transit($$parenleft, Literal[U](value2)) 102 | def $$parenleft [U <: TokenList, N3] (value2: U)(implicit transitions: Transitions[U, N2, N3]): N3 = transitions.transit($$parenleft, value2) 103 | } 104 | 105 | implicit class start_with_actual_literal_$$parenright [T, N1, N2] (value: T) (implicit transition1: Transition[Literal[T], Node14.type, N1], transition2: Transition[Rp.type, N1, N2]) { 106 | def $$parenright : N2 = transition2.transit(transition1.transit(Node14, Literal[T](value)), Rp) 107 | def $$parenright [U, N3] (value2: U)(implicit transition3: Transition[Literal[U], N2, N3]): N3 = transition3.transit($$parenright, Literal[U](value2)) 108 | def $$parenright [U <: TokenList, N3] (value2: U)(implicit transitions: Transitions[U, N2, N3]): N3 = transitions.transit($$parenright, value2) 109 | } 110 | 111 | implicit class start_with_actual_literal_end [T, N1, N2] (value: T) (implicit transition1: Transition[Literal[T], Node14.type, N1], transition2: Transition[End.type, N1, N2]) { 112 | def end : N2 = transition2.transit(transition1.transit(Node14, Literal[T](value)), End) 113 | def end [U, N3] (value2: U)(implicit transition3: Transition[Literal[U], N2, N3]): N3 = transition3.transit(end, Literal[U](value2)) 114 | def end [U <: TokenList, N3] (value2: U)(implicit transitions: Transitions[U, N2, N3]): N3 = transitions.transit(end, value2) 115 | } 116 | 117 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 118 | 119 | implicit class start_with_function_style_$$plus [L <: TokenList, N1, N2] (tokens: L)(implicit transitions: Transitions[L, Node14.type, N1], transition1: Transition[Plus.type, N1, N2]) { 120 | def $$plus : N2 = transition1.transit(transitions.transit(Node14, tokens), Plus) 121 | def $$plus [U, N3] (value: U)(implicit transition2: Transition[Literal[U], N2, N3]): N3 = transition2.transit($$plus, Literal[U](value)) 122 | def $$plus [U <: TokenList, N3] (value: U)(implicit transitions2: Transitions[U, N2, N3]): N3 = transitions2.transit($$plus, value) 123 | } 124 | 125 | implicit class start_with_function_style_$$hyphen [L <: TokenList, N1, N2] (tokens: L)(implicit transitions: Transitions[L, Node14.type, N1], transition1: Transition[Minus.type, N1, N2]) { 126 | def $$hyphen : N2 = transition1.transit(transitions.transit(Node14, tokens), Minus) 127 | def $$hyphen [U, N3] (value: U)(implicit transition2: Transition[Literal[U], N2, N3]): N3 = transition2.transit($$hyphen, Literal[U](value)) 128 | def $$hyphen [U <: TokenList, N3] (value: U)(implicit transitions2: Transitions[U, N2, N3]): N3 = transitions2.transit($$hyphen, value) 129 | } 130 | 131 | implicit class start_with_function_style_$$asterisk [L <: TokenList, N1, N2] (tokens: L)(implicit transitions: Transitions[L, Node14.type, N1], transition1: Transition[Mul.type, N1, N2]) { 132 | def $$asterisk : N2 = transition1.transit(transitions.transit(Node14, tokens), Mul) 133 | def $$asterisk [U, N3] (value: U)(implicit transition2: Transition[Literal[U], N2, N3]): N3 = transition2.transit($$asterisk, Literal[U](value)) 134 | def $$asterisk [U <: TokenList, N3] (value: U)(implicit transitions2: Transitions[U, N2, N3]): N3 = transitions2.transit($$asterisk, value) 135 | } 136 | 137 | implicit class start_with_function_style_$$slash [L <: TokenList, N1, N2] (tokens: L)(implicit transitions: Transitions[L, Node14.type, N1], transition1: Transition[Div.type, N1, N2]) { 138 | def $$slash : N2 = transition1.transit(transitions.transit(Node14, tokens), Div) 139 | def $$slash [U, N3] (value: U)(implicit transition2: Transition[Literal[U], N2, N3]): N3 = transition2.transit($$slash, Literal[U](value)) 140 | def $$slash [U <: TokenList, N3] (value: U)(implicit transitions2: Transitions[U, N2, N3]): N3 = transitions2.transit($$slash, value) 141 | } 142 | 143 | implicit class start_with_function_style_$$parenleft [L <: TokenList, N1, N2] (tokens: L)(implicit transitions: Transitions[L, Node14.type, N1], transition1: Transition[Lp.type, N1, N2]) { 144 | def $$parenleft : N2 = transition1.transit(transitions.transit(Node14, tokens), Lp) 145 | def $$parenleft [U, N3] (value: U)(implicit transition2: Transition[Literal[U], N2, N3]): N3 = transition2.transit($$parenleft, Literal[U](value)) 146 | def $$parenleft [U <: TokenList, N3] (value: U)(implicit transitions2: Transitions[U, N2, N3]): N3 = transitions2.transit($$parenleft, value) 147 | } 148 | 149 | implicit class start_with_function_style_$$parenright [L <: TokenList, N1, N2] (tokens: L)(implicit transitions: Transitions[L, Node14.type, N1], transition1: Transition[Rp.type, N1, N2]) { 150 | def $$parenright : N2 = transition1.transit(transitions.transit(Node14, tokens), Rp) 151 | def $$parenright [U, N3] (value: U)(implicit transition2: Transition[Literal[U], N2, N3]): N3 = transition2.transit($$parenright, Literal[U](value)) 152 | def $$parenright [U <: TokenList, N3] (value: U)(implicit transitions2: Transitions[U, N2, N3]): N3 = transitions2.transit($$parenright, value) 153 | } 154 | 155 | implicit class start_with_function_style_end [L <: TokenList, N1, N2] (tokens: L)(implicit transitions: Transitions[L, Node14.type, N1], transition1: Transition[End.type, N1, N2]) { 156 | def end : N2 = transition1.transit(transitions.transit(Node14, tokens), End) 157 | def end [U, N3] (value: U)(implicit transition2: Transition[Literal[U], N2, N3]): N3 = transition2.transit(end, Literal[U](value)) 158 | def end [U <: TokenList, N3] (value: U)(implicit transitions2: Transitions[U, N2, N3]): N3 = transitions2.transit(end, value) 159 | } 160 | 161 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 162 | 163 | implicit class transition_$$parenleft [N1, N2] (node: N1) (implicit transition1: Transition[Lp.type, N1, N2]) { 164 | def $$parenleft : N2 = transition1.transit(node, Lp) 165 | def $$parenleft [T, N3] (value: T)(implicit transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$parenleft, Literal[T](value)) 166 | def $$parenleft [H <: TokenList, N3] (tokens: H)(implicit transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$parenleft, tokens) 167 | } 168 | implicit class transition_$$asterisk [N1, N2] (node: N1) (implicit transition1: Transition[Mul.type, N1, N2]) { 169 | def $$asterisk : N2 = transition1.transit(node, Mul) 170 | def $$asterisk [T, N3] (value: T)(implicit transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$asterisk, Literal[T](value)) 171 | def $$asterisk [H <: TokenList, N3] (tokens: H)(implicit transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$asterisk, tokens) 172 | } 173 | implicit class transition_$$parenright [N1, N2] (node: N1) (implicit transition1: Transition[Rp.type, N1, N2]) { 174 | def $$parenright : N2 = transition1.transit(node, Rp) 175 | def $$parenright [T, N3] (value: T)(implicit transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$parenright, Literal[T](value)) 176 | def $$parenright [H <: TokenList, N3] (tokens: H)(implicit transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$parenright, tokens) 177 | } 178 | implicit class transition_$$hyphen [N1, N2] (node: N1) (implicit transition1: Transition[Minus.type, N1, N2]) { 179 | def $$hyphen : N2 = transition1.transit(node, Minus) 180 | def $$hyphen [T, N3] (value: T)(implicit transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$hyphen, Literal[T](value)) 181 | def $$hyphen [H <: TokenList, N3] (tokens: H)(implicit transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$hyphen, tokens) 182 | } 183 | implicit class transition_end [N1, N2] (node: N1) (implicit transition1: Transition[End.type, N1, N2]) { 184 | def end : N2 = transition1.transit(node, End) 185 | def end [T, N3] (value: T)(implicit transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit(end, Literal[T](value)) 186 | def end [H <: TokenList, N3] (tokens: H)(implicit transitions: Transitions[H, N2, N3]): N3 = transitions.transit(end, tokens) 187 | } 188 | implicit class transition_$$plus [N1, N2] (node: N1) (implicit transition1: Transition[Plus.type, N1, N2]) { 189 | def $$plus : N2 = transition1.transit(node, Plus) 190 | def $$plus [T, N3] (value: T)(implicit transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$plus, Literal[T](value)) 191 | def $$plus [H <: TokenList, N3] (tokens: H)(implicit transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$plus, tokens) 192 | } 193 | implicit class transition_$$slash [N1, N2] (node: N1) (implicit transition1: Transition[Div.type, N1, N2]) { 194 | def $$slash : N2 = transition1.transit(node, Div) 195 | def $$slash [T, N3] (value: T)(implicit transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$slash, Literal[T](value)) 196 | def $$slash [H <: TokenList, N3] (tokens: H)(implicit transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$slash, tokens) 197 | } 198 | 199 | /* こちらだとダメ 200 | implicit class transition_keyword [N1] (node: N1) { 201 | def $$parenleft [N2] (implicit transition1: Transition[Lp.type, N1, N2]): N2 = transition1.transit(node, Lp) 202 | def $$parenleft [T, N2, N3] (value: T)(implicit transition1: Transition[Lp.type, N1, N2], transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$parenleft, Literal[T](value)) 203 | def $$parenleft [H <: TokenList, N2, N3] (tokens: H)(implicit transition1: Transition[Lp.type, N1, N2], transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$parenleft, tokens) 204 | 205 | def $$asterisk [N2] (implicit transition1: Transition[Mul.type, N1, N2]): N2 = transition1.transit(node, Mul) 206 | def $$asterisk [H <: TokenList, N2, N3] (tokens: H)(implicit transition1: Transition[Mul.type, N1, N2], transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$asterisk, tokens) 207 | 208 | def $$parenright [N2] (implicit transition1: Transition[Rp.type, N1, N2]): N2 = transition1.transit(node, Rp) 209 | def $$parenright [H <: TokenList, N2, N3] (tokens: H)(implicit transition1: Transition[Rp.type, N1, N2], transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$parenright, tokens) 210 | 211 | def $$hyphen [N2] (implicit transition1: Transition[Minus.type, N1, N2]): N2 = transition1.transit(node, Minus) 212 | def $$hyphen [H <: TokenList, N2, N3] (tokens: H)(implicit transition1: Transition[Minus.type, N1, N2], transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$hyphen, tokens) 213 | 214 | def end [N2] (implicit transition1: Transition[End.type, N1, N2]): N2 = transition1.transit(node, End) 215 | def end [H <: TokenList, N2, N3] (tokens: H)(implicit transition1: Transition[End.type, N1, N2], transitions: Transitions[H, N2, N3]): N3 = transitions.transit(end, tokens) 216 | 217 | def $$plus [N2] (implicit transition1: Transition[Plus.type, N1, N2]): N2 = transition1.transit(node, Plus) 218 | def $$plus [T, N2, N3] (value: T)(implicit transition1: Transition[Plus.type, N1, N2], transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$plus, Literal[T](value)) 219 | def $$plus [H <: TokenList, N2, N3] (tokens: H)(implicit transition1: Transition[Plus.type, N1, N2], transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$plus, tokens) 220 | 221 | def $$slash [N2] (implicit transition1: Transition[Div.type, N1, N2]): N2 = transition1.transit(node, Div) 222 | def $$slash [T, N2, N3] (value: T)(implicit transition1: Transition[Div.type, N1, N2], transition2: Transition[Literal[T], N2, N3]): N3 = transition2.transit($$slash, Literal[T](value)) 223 | def $$slash [H <: TokenList, N2, N3] (tokens: H)(implicit transition1: Transition[Div.type, N1, N2], transitions: Transitions[H, N2, N3]): N3 = transitions.transit($$slash, tokens) 224 | }*/ 225 | 226 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 227 | 228 | implicit class transition_int [N1, N2] (node: N1) (implicit transition: Transition[Literal[Int], N1, N2]) { 229 | def int (value: Int): N2 = transition.transit(node, Literal[Int](value)) 230 | //def apply (value: Int): N2 = transition.transit(node, Node10(Node14, value)) 231 | } 232 | 233 | ////////////////////////////////////////////////////////////////////////////////////////////////////////// 234 | 235 | implicit def node0_shift_node2 [NX] : Shift[End.type, Node0[NX], Node2[Node0[NX]]] = Shift((s, _) => Node2(s)) 236 | implicit def node0_shift_node19 [NX] : Shift[Minus.type, Node0[NX], Node19[Node0[NX]]] = Shift((s, _) => Node19(s)) 237 | implicit def node0_shift_node3 [NX] : Shift[Plus.type, Node0[NX], Node3[Node0[NX]]] = Shift((s, _) => Node3(s)) 238 | implicit def node3_shift_node10 [NX] : Shift[Literal[Int], Node3[NX], Node10[Node3[NX]]] = Shift((s, t) => Node10(s, t.value)) 239 | implicit def node3_shift_node9 [NX] : Shift[Lp.type, Node3[NX], Node9[Node3[NX]]] = Shift((s, _) => Node9(s)) 240 | implicit def node5_shift_node1 [NX] : Shift[Rp.type, Node5[NX], Node1[Node5[NX]]] = Shift((s, _) => Node1(s)) 241 | implicit def node5_shift_node19 [NX] : Shift[Minus.type, Node5[NX], Node19[Node5[NX]]] = Shift((s, _) => Node19(s)) 242 | implicit def node5_shift_node3 [NX] : Shift[Plus.type, Node5[NX], Node3[Node5[NX]]] = Shift((s, _) => Node3(s)) 243 | implicit def node6_shift_node10 [NX] : Shift[Literal[Int], Node6[NX], Node10[Node6[NX]]] = Shift((s, t) => Node10(s, t.value)) 244 | implicit def node6_shift_node9 [NX] : Shift[Lp.type, Node6[NX], Node9[Node6[NX]]] = Shift((s, _) => Node9(s)) 245 | implicit def node7_shift_node22 [NX] : Shift[Div.type, Node7[NX], Node22[Node7[NX]]] = Shift((s, _) => Node22(s)) 246 | implicit def node7_shift_node26 [NX] : Shift[Mul.type, Node7[NX], Node26[Node7[NX]]] = Shift((s, _) => Node26(s)) 247 | implicit def node9_shift_node10 [NX] : Shift[Literal[Int], Node9[NX], Node10[Node9[NX]]] = Shift((s, t) => Node10(s, t.value)) 248 | implicit def node9_shift_node9 [NX] : Shift[Lp.type, Node9[NX], Node9[Node9[NX]]] = Shift((s, _) => Node9(s)) 249 | implicit def node14_shift_node10 : Shift[Literal[Int], Node14.type, Node10[Node14.type]] = Shift((s, t) => Node10[Node14.type](s, t.value)) 250 | implicit def node14_shift_node9 : Shift[Lp.type, Node14.type, Node9[Node14.type]] = Shift((s, _) => Node9[Node14.type](s)) 251 | implicit def node15_shift_node22 [NX] : Shift[Div.type, Node15[NX], Node22[Node15[NX]]] = Shift((s, _) => Node22(s)) 252 | implicit def node15_shift_node26 [NX] : Shift[Mul.type, Node15[NX], Node26[Node15[NX]]] = Shift((s, _) => Node26(s)) 253 | implicit def node18_shift_node22 [NX] : Shift[Div.type, Node18[NX], Node22[Node18[NX]]] = Shift((s, _) => Node22(s)) 254 | implicit def node18_shift_node26 [NX] : Shift[Mul.type, Node18[NX], Node26[Node18[NX]]] = Shift((s, _) => Node26(s)) 255 | implicit def node19_shift_node10 [NX] : Shift[Literal[Int], Node19[NX], Node10[Node19[NX]]] = Shift((s, t) => Node10(s, t.value)) 256 | implicit def node19_shift_node9 [NX] : Shift[Lp.type, Node19[NX], Node9[Node19[NX]]] = Shift((s, _) => Node9(s)) 257 | implicit def node22_shift_node10 [NX] : Shift[Literal[Int], Node22[NX], Node10[Node22[NX]]] = Shift((s, t) => Node10(s, t.value)) 258 | implicit def node22_shift_node9 [NX] : Shift[Lp.type, Node22[NX], Node9[Node22[NX]]] = Shift((s, _) => Node9(s)) 259 | implicit def node26_shift_node10 [NX] : Shift[Literal[Int], Node26[NX], Node10[Node26[NX]]] = Shift((s, t) => Node10(s, t.value)) 260 | implicit def node26_shift_node9 [NX] : Shift[Lp.type, Node26[NX], Node9[Node26[NX]]] = Shift((s, _) => Node9(s)) 261 | implicit def node1_reduce_node24_$$asterisk_3_9_5 [NX] : Reduce[Mul.type, Node1[Node5[Node9[Node3[NX]]]], Node24[Node3[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 262 | implicit def node1_reduce_node24_$$asterisk_6_9_5 [NX] : Reduce[Mul.type, Node1[Node5[Node9[Node6[NX]]]], Node24[Node6[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 263 | implicit def node1_reduce_node24_$$asterisk_14_9_5 : Reduce[Mul.type, Node1[Node5[Node9[Node14.type]]], Node24[Node14.type]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 264 | implicit def node1_reduce_node24_$$asterisk_26_9_5 [NX] : Reduce[Mul.type, Node1[Node5[Node9[Node26[NX]]]], Node24[Node26[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 265 | implicit def node1_reduce_node24_$$asterisk_19_9_5 [NX] : Reduce[Mul.type, Node1[Node5[Node9[Node19[NX]]]], Node24[Node19[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 266 | implicit def node1_reduce_node24_$$asterisk_9_9_5 [NX] : Reduce[Mul.type, Node1[Node5[Node9[Node9[NX]]]], Node24[Node9[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 267 | implicit def node1_reduce_node24_$$asterisk_22_9_5 [NX] : Reduce[Mul.type, Node1[Node5[Node9[Node22[NX]]]], Node24[Node22[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 268 | implicit def node1_reduce_node24_$$parenright_14_9_5 : Reduce[Rp.type, Node1[Node5[Node9[Node14.type]]], Node24[Node14.type]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 269 | implicit def node1_reduce_node24_$$parenright_19_9_5 [NX] : Reduce[Rp.type, Node1[Node5[Node9[Node19[NX]]]], Node24[Node19[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 270 | implicit def node1_reduce_node24_$$parenright_3_9_5 [NX] : Reduce[Rp.type, Node1[Node5[Node9[Node3[NX]]]], Node24[Node3[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 271 | implicit def node1_reduce_node24_$$parenright_9_9_5 [NX] : Reduce[Rp.type, Node1[Node5[Node9[Node9[NX]]]], Node24[Node9[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 272 | implicit def node1_reduce_node24_$$parenright_6_9_5 [NX] : Reduce[Rp.type, Node1[Node5[Node9[Node6[NX]]]], Node24[Node6[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 273 | implicit def node1_reduce_node24_$$parenright_26_9_5 [NX] : Reduce[Rp.type, Node1[Node5[Node9[Node26[NX]]]], Node24[Node26[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 274 | implicit def node1_reduce_node24_$$parenright_22_9_5 [NX] : Reduce[Rp.type, Node1[Node5[Node9[Node22[NX]]]], Node24[Node22[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 275 | implicit def node1_reduce_node24_$$hyphen_6_9_5 [NX] : Reduce[Minus.type, Node1[Node5[Node9[Node6[NX]]]], Node24[Node6[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 276 | implicit def node1_reduce_node24_$$hyphen_22_9_5 [NX] : Reduce[Minus.type, Node1[Node5[Node9[Node22[NX]]]], Node24[Node22[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 277 | implicit def node1_reduce_node24_$$hyphen_9_9_5 [NX] : Reduce[Minus.type, Node1[Node5[Node9[Node9[NX]]]], Node24[Node9[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 278 | implicit def node1_reduce_node24_$$hyphen_3_9_5 [NX] : Reduce[Minus.type, Node1[Node5[Node9[Node3[NX]]]], Node24[Node3[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 279 | implicit def node1_reduce_node24_$$hyphen_26_9_5 [NX] : Reduce[Minus.type, Node1[Node5[Node9[Node26[NX]]]], Node24[Node26[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 280 | implicit def node1_reduce_node24_$$hyphen_14_9_5 : Reduce[Minus.type, Node1[Node5[Node9[Node14.type]]], Node24[Node14.type]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 281 | implicit def node1_reduce_node24_$$hyphen_19_9_5 [NX] : Reduce[Minus.type, Node1[Node5[Node9[Node19[NX]]]], Node24[Node19[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 282 | implicit def node1_reduce_node24_end_19_9_5 [NX] : Reduce[End.type, Node1[Node5[Node9[Node19[NX]]]], Node24[Node19[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 283 | implicit def node1_reduce_node24_end_22_9_5 [NX] : Reduce[End.type, Node1[Node5[Node9[Node22[NX]]]], Node24[Node22[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 284 | implicit def node1_reduce_node24_end_6_9_5 [NX] : Reduce[End.type, Node1[Node5[Node9[Node6[NX]]]], Node24[Node6[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 285 | implicit def node1_reduce_node24_end_26_9_5 [NX] : Reduce[End.type, Node1[Node5[Node9[Node26[NX]]]], Node24[Node26[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 286 | implicit def node1_reduce_node24_end_14_9_5 : Reduce[End.type, Node1[Node5[Node9[Node14.type]]], Node24[Node14.type]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 287 | implicit def node1_reduce_node24_end_3_9_5 [NX] : Reduce[End.type, Node1[Node5[Node9[Node3[NX]]]], Node24[Node3[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 288 | implicit def node1_reduce_node24_end_9_9_5 [NX] : Reduce[End.type, Node1[Node5[Node9[Node9[NX]]]], Node24[Node9[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 289 | implicit def node1_reduce_node24_$$plus_3_9_5 [NX] : Reduce[Plus.type, Node1[Node5[Node9[Node3[NX]]]], Node24[Node3[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 290 | implicit def node1_reduce_node24_$$plus_6_9_5 [NX] : Reduce[Plus.type, Node1[Node5[Node9[Node6[NX]]]], Node24[Node6[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 291 | implicit def node1_reduce_node24_$$plus_22_9_5 [NX] : Reduce[Plus.type, Node1[Node5[Node9[Node22[NX]]]], Node24[Node22[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 292 | implicit def node1_reduce_node24_$$plus_19_9_5 [NX] : Reduce[Plus.type, Node1[Node5[Node9[Node19[NX]]]], Node24[Node19[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 293 | implicit def node1_reduce_node24_$$plus_9_9_5 [NX] : Reduce[Plus.type, Node1[Node5[Node9[Node9[NX]]]], Node24[Node9[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 294 | implicit def node1_reduce_node24_$$plus_26_9_5 [NX] : Reduce[Plus.type, Node1[Node5[Node9[Node26[NX]]]], Node24[Node26[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 295 | implicit def node1_reduce_node24_$$plus_14_9_5 : Reduce[Plus.type, Node1[Node5[Node9[Node14.type]]], Node24[Node14.type]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 296 | implicit def node1_reduce_node24_$$slash_19_9_5 [NX] : Reduce[Div.type, Node1[Node5[Node9[Node19[NX]]]], Node24[Node19[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 297 | implicit def node1_reduce_node24_$$slash_26_9_5 [NX] : Reduce[Div.type, Node1[Node5[Node9[Node26[NX]]]], Node24[Node26[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 298 | implicit def node1_reduce_node24_$$slash_14_9_5 : Reduce[Div.type, Node1[Node5[Node9[Node14.type]]], Node24[Node14.type]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 299 | implicit def node1_reduce_node24_$$slash_22_9_5 [NX] : Reduce[Div.type, Node1[Node5[Node9[Node22[NX]]]], Node24[Node22[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 300 | implicit def node1_reduce_node24_$$slash_3_9_5 [NX] : Reduce[Div.type, Node1[Node5[Node9[Node3[NX]]]], Node24[Node3[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 301 | implicit def node1_reduce_node24_$$slash_6_9_5 [NX] : Reduce[Div.type, Node1[Node5[Node9[Node6[NX]]]], Node24[Node6[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 302 | implicit def node1_reduce_node24_$$slash_9_9_5 [NX] : Reduce[Div.type, Node1[Node5[Node9[Node9[NX]]]], Node24[Node9[NX]]] = Reduce(s => Node24(s.prev.prev.prev, Paren(s.prev.value))) 303 | implicit def node2_reduce_node6_eoi_6_0 [NX] : Reduce[EoI.type, Node2[Node0[Node6[NX]]], Node6[Node6[NX]]] = Reduce(s => Node6(s.prev.prev, Stmt(s.prev.value))) 304 | implicit def node2_reduce_node6_eoi_14_0 : Reduce[EoI.type, Node2[Node0[Node14.type]], Node6[Node14.type]] = Reduce(s => Node6(s.prev.prev, Stmt(s.prev.value))) 305 | implicit def node2_reduce_node6_int_6_0 [NX] : Reduce[Literal[Int], Node2[Node0[Node6[NX]]], Node6[Node6[NX]]] = Reduce(s => Node6(s.prev.prev, Stmt(s.prev.value))) 306 | implicit def node2_reduce_node6_int_14_0 : Reduce[Literal[Int], Node2[Node0[Node14.type]], Node6[Node14.type]] = Reduce(s => Node6(s.prev.prev, Stmt(s.prev.value))) 307 | implicit def node2_reduce_node6_$$parenleft_6_0 [NX] : Reduce[Lp.type, Node2[Node0[Node6[NX]]], Node6[Node6[NX]]] = Reduce(s => Node6(s.prev.prev, Stmt(s.prev.value))) 308 | implicit def node2_reduce_node6_$$parenleft_14_0 : Reduce[Lp.type, Node2[Node0[Node14.type]], Node6[Node14.type]] = Reduce(s => Node6(s.prev.prev, Stmt(s.prev.value))) 309 | implicit def node4_reduce_node16_eoi_6 [NX] : Reduce[EoI.type, Node4[Node6[NX]], Node16[Node6[NX]]] = Reduce(s => Node16(s.prev, s.value)) 310 | implicit def node4_reduce_node8_eoi_14 : Reduce[EoI.type, Node4[Node14.type], Node8[Node14.type]] = Reduce(s => Node8(s.prev, s.value)) 311 | implicit def node6_reduce_node16_eoi_6 [NX] : Reduce[EoI.type, Node6[Node6[NX]], Node16[Node6[NX]]] = Reduce(s => Node16(s.prev, s.value)) 312 | implicit def node6_reduce_node8_eoi_14 : Reduce[EoI.type, Node6[Node14.type], Node8[Node14.type]] = Reduce(s => Node8(s.prev, s.value)) 313 | implicit def node7_reduce_node0_end_6 [NX] : Reduce[End.type, Node7[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 314 | implicit def node7_reduce_node5_end_9 [NX] : Reduce[End.type, Node7[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 315 | implicit def node7_reduce_node0_end_14 : Reduce[End.type, Node7[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 316 | implicit def node7_reduce_node0_$$hyphen_6 [NX] : Reduce[Minus.type, Node7[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 317 | implicit def node7_reduce_node5_$$hyphen_9 [NX] : Reduce[Minus.type, Node7[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 318 | implicit def node7_reduce_node0_$$hyphen_14 : Reduce[Minus.type, Node7[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 319 | implicit def node7_reduce_node0_$$plus_6 [NX] : Reduce[Plus.type, Node7[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 320 | implicit def node7_reduce_node5_$$plus_9 [NX] : Reduce[Plus.type, Node7[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 321 | implicit def node7_reduce_node0_$$plus_14 : Reduce[Plus.type, Node7[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 322 | implicit def node7_reduce_node0_$$parenright_6 [NX] : Reduce[Rp.type, Node7[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 323 | implicit def node7_reduce_node5_$$parenright_9 [NX] : Reduce[Rp.type, Node7[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 324 | implicit def node7_reduce_node0_$$parenright_14 : Reduce[Rp.type, Node7[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 325 | implicit def node8_accept[NX]: Accept[Node8[NX], Program] = Accept(s => s.value) 326 | implicit def node10_reduce_node21_$$asterisk_26 [NX] : Reduce[Mul.type, Node10[Node26[NX]], Node21[Node26[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 327 | implicit def node10_reduce_node21_$$asterisk_19 [NX] : Reduce[Mul.type, Node10[Node19[NX]], Node21[Node19[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 328 | implicit def node10_reduce_node21_$$asterisk_14 : Reduce[Mul.type, Node10[Node14.type], Node21[Node14.type]] = Reduce(s => Node21(s.prev, Num(s.value))) 329 | implicit def node10_reduce_node21_$$asterisk_3 [NX] : Reduce[Mul.type, Node10[Node3[NX]], Node21[Node3[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 330 | implicit def node10_reduce_node21_$$asterisk_6 [NX] : Reduce[Mul.type, Node10[Node6[NX]], Node21[Node6[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 331 | implicit def node10_reduce_node21_$$asterisk_22 [NX] : Reduce[Mul.type, Node10[Node22[NX]], Node21[Node22[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 332 | implicit def node10_reduce_node21_$$asterisk_9 [NX] : Reduce[Mul.type, Node10[Node9[NX]], Node21[Node9[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 333 | implicit def node10_reduce_node21_$$parenright_26 [NX] : Reduce[Rp.type, Node10[Node26[NX]], Node21[Node26[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 334 | implicit def node10_reduce_node21_$$parenright_22 [NX] : Reduce[Rp.type, Node10[Node22[NX]], Node21[Node22[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 335 | implicit def node10_reduce_node21_$$parenright_14 : Reduce[Rp.type, Node10[Node14.type], Node21[Node14.type]] = Reduce(s => Node21(s.prev, Num(s.value))) 336 | implicit def node10_reduce_node21_$$parenright_19 [NX] : Reduce[Rp.type, Node10[Node19[NX]], Node21[Node19[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 337 | implicit def node10_reduce_node21_$$parenright_9 [NX] : Reduce[Rp.type, Node10[Node9[NX]], Node21[Node9[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 338 | implicit def node10_reduce_node21_$$parenright_6 [NX] : Reduce[Rp.type, Node10[Node6[NX]], Node21[Node6[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 339 | implicit def node10_reduce_node21_$$parenright_3 [NX] : Reduce[Rp.type, Node10[Node3[NX]], Node21[Node3[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 340 | implicit def node10_reduce_node21_$$hyphen_22 [NX] : Reduce[Minus.type, Node10[Node22[NX]], Node21[Node22[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 341 | implicit def node10_reduce_node21_$$hyphen_19 [NX] : Reduce[Minus.type, Node10[Node19[NX]], Node21[Node19[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 342 | implicit def node10_reduce_node21_$$hyphen_6 [NX] : Reduce[Minus.type, Node10[Node6[NX]], Node21[Node6[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 343 | implicit def node10_reduce_node21_$$hyphen_3 [NX] : Reduce[Minus.type, Node10[Node3[NX]], Node21[Node3[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 344 | implicit def node10_reduce_node21_$$hyphen_14 : Reduce[Minus.type, Node10[Node14.type], Node21[Node14.type]] = Reduce(s => Node21(s.prev, Num(s.value))) 345 | implicit def node10_reduce_node21_$$hyphen_26 [NX] : Reduce[Minus.type, Node10[Node26[NX]], Node21[Node26[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 346 | implicit def node10_reduce_node21_$$hyphen_9 [NX] : Reduce[Minus.type, Node10[Node9[NX]], Node21[Node9[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 347 | implicit def node10_reduce_node21_end_6 [NX] : Reduce[End.type, Node10[Node6[NX]], Node21[Node6[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 348 | implicit def node10_reduce_node21_end_26 [NX] : Reduce[End.type, Node10[Node26[NX]], Node21[Node26[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 349 | implicit def node10_reduce_node21_end_3 [NX] : Reduce[End.type, Node10[Node3[NX]], Node21[Node3[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 350 | implicit def node10_reduce_node21_end_19 [NX] : Reduce[End.type, Node10[Node19[NX]], Node21[Node19[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 351 | implicit def node10_reduce_node21_end_9 [NX] : Reduce[End.type, Node10[Node9[NX]], Node21[Node9[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 352 | implicit def node10_reduce_node21_end_22 [NX] : Reduce[End.type, Node10[Node22[NX]], Node21[Node22[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 353 | implicit def node10_reduce_node21_end_14 : Reduce[End.type, Node10[Node14.type], Node21[Node14.type]] = Reduce(s => Node21(s.prev, Num(s.value))) 354 | implicit def node10_reduce_node21_$$plus_9 [NX] : Reduce[Plus.type, Node10[Node9[NX]], Node21[Node9[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 355 | implicit def node10_reduce_node21_$$plus_14 : Reduce[Plus.type, Node10[Node14.type], Node21[Node14.type]] = Reduce(s => Node21(s.prev, Num(s.value))) 356 | implicit def node10_reduce_node21_$$plus_6 [NX] : Reduce[Plus.type, Node10[Node6[NX]], Node21[Node6[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 357 | implicit def node10_reduce_node21_$$plus_19 [NX] : Reduce[Plus.type, Node10[Node19[NX]], Node21[Node19[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 358 | implicit def node10_reduce_node21_$$plus_26 [NX] : Reduce[Plus.type, Node10[Node26[NX]], Node21[Node26[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 359 | implicit def node10_reduce_node21_$$plus_3 [NX] : Reduce[Plus.type, Node10[Node3[NX]], Node21[Node3[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 360 | implicit def node10_reduce_node21_$$plus_22 [NX] : Reduce[Plus.type, Node10[Node22[NX]], Node21[Node22[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 361 | implicit def node10_reduce_node21_$$slash_22 [NX] : Reduce[Div.type, Node10[Node22[NX]], Node21[Node22[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 362 | implicit def node10_reduce_node21_$$slash_26 [NX] : Reduce[Div.type, Node10[Node26[NX]], Node21[Node26[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 363 | implicit def node10_reduce_node21_$$slash_14 : Reduce[Div.type, Node10[Node14.type], Node21[Node14.type]] = Reduce(s => Node21(s.prev, Num(s.value))) 364 | implicit def node10_reduce_node21_$$slash_6 [NX] : Reduce[Div.type, Node10[Node6[NX]], Node21[Node6[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 365 | implicit def node10_reduce_node21_$$slash_3 [NX] : Reduce[Div.type, Node10[Node3[NX]], Node21[Node3[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 366 | implicit def node10_reduce_node21_$$slash_9 [NX] : Reduce[Div.type, Node10[Node9[NX]], Node21[Node9[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 367 | implicit def node10_reduce_node21_$$slash_19 [NX] : Reduce[Div.type, Node10[Node19[NX]], Node21[Node19[NX]]] = Reduce(s => Node21(s.prev, Num(s.value))) 368 | implicit def node11_reduce_node0_end_6 [NX] : Reduce[End.type, Node11[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 369 | implicit def node11_reduce_node5_end_9 [NX] : Reduce[End.type, Node11[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 370 | implicit def node11_reduce_node0_end_14 : Reduce[End.type, Node11[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 371 | implicit def node11_reduce_node0_$$hyphen_6 [NX] : Reduce[Minus.type, Node11[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 372 | implicit def node11_reduce_node5_$$hyphen_9 [NX] : Reduce[Minus.type, Node11[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 373 | implicit def node11_reduce_node0_$$hyphen_14 : Reduce[Minus.type, Node11[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 374 | implicit def node11_reduce_node0_$$plus_6 [NX] : Reduce[Plus.type, Node11[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 375 | implicit def node11_reduce_node5_$$plus_9 [NX] : Reduce[Plus.type, Node11[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 376 | implicit def node11_reduce_node0_$$plus_14 : Reduce[Plus.type, Node11[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 377 | implicit def node11_reduce_node0_$$parenright_6 [NX] : Reduce[Rp.type, Node11[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 378 | implicit def node11_reduce_node5_$$parenright_9 [NX] : Reduce[Rp.type, Node11[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 379 | implicit def node11_reduce_node0_$$parenright_14 : Reduce[Rp.type, Node11[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 380 | implicit def node12_reduce_node20_$$asterisk_6_7_26 [NX] : Reduce[Mul.type, Node12[Node26[Node7[Node6[NX]]]], Node20[Node6[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 381 | implicit def node12_reduce_node20_$$asterisk_3_15_26 [NX] : Reduce[Mul.type, Node12[Node26[Node15[Node3[NX]]]], Node20[Node3[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 382 | implicit def node12_reduce_node20_$$asterisk_14_7_26 : Reduce[Mul.type, Node12[Node26[Node7[Node14.type]]], Node20[Node14.type]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 383 | implicit def node12_reduce_node20_$$asterisk_19_18_26 [NX] : Reduce[Mul.type, Node12[Node26[Node18[Node19[NX]]]], Node20[Node19[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 384 | implicit def node12_reduce_node20_$$asterisk_9_7_26 [NX] : Reduce[Mul.type, Node12[Node26[Node7[Node9[NX]]]], Node20[Node9[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 385 | implicit def node12_reduce_node20_$$parenright_19_18_26 [NX] : Reduce[Rp.type, Node12[Node26[Node18[Node19[NX]]]], Node20[Node19[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 386 | implicit def node12_reduce_node20_$$parenright_6_7_26 [NX] : Reduce[Rp.type, Node12[Node26[Node7[Node6[NX]]]], Node20[Node6[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 387 | implicit def node12_reduce_node20_$$parenright_14_7_26 : Reduce[Rp.type, Node12[Node26[Node7[Node14.type]]], Node20[Node14.type]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 388 | implicit def node12_reduce_node20_$$parenright_9_7_26 [NX] : Reduce[Rp.type, Node12[Node26[Node7[Node9[NX]]]], Node20[Node9[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 389 | implicit def node12_reduce_node20_$$parenright_3_15_26 [NX] : Reduce[Rp.type, Node12[Node26[Node15[Node3[NX]]]], Node20[Node3[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 390 | implicit def node12_reduce_node20_$$hyphen_3_15_26 [NX] : Reduce[Minus.type, Node12[Node26[Node15[Node3[NX]]]], Node20[Node3[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 391 | implicit def node12_reduce_node20_$$hyphen_9_7_26 [NX] : Reduce[Minus.type, Node12[Node26[Node7[Node9[NX]]]], Node20[Node9[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 392 | implicit def node12_reduce_node20_$$hyphen_19_18_26 [NX] : Reduce[Minus.type, Node12[Node26[Node18[Node19[NX]]]], Node20[Node19[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 393 | implicit def node12_reduce_node20_$$hyphen_14_7_26 : Reduce[Minus.type, Node12[Node26[Node7[Node14.type]]], Node20[Node14.type]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 394 | implicit def node12_reduce_node20_$$hyphen_6_7_26 [NX] : Reduce[Minus.type, Node12[Node26[Node7[Node6[NX]]]], Node20[Node6[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 395 | implicit def node12_reduce_node20_end_6_7_26 [NX] : Reduce[End.type, Node12[Node26[Node7[Node6[NX]]]], Node20[Node6[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 396 | implicit def node12_reduce_node20_end_9_7_26 [NX] : Reduce[End.type, Node12[Node26[Node7[Node9[NX]]]], Node20[Node9[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 397 | implicit def node12_reduce_node20_end_14_7_26 : Reduce[End.type, Node12[Node26[Node7[Node14.type]]], Node20[Node14.type]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 398 | implicit def node12_reduce_node20_end_3_15_26 [NX] : Reduce[End.type, Node12[Node26[Node15[Node3[NX]]]], Node20[Node3[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 399 | implicit def node12_reduce_node20_end_19_18_26 [NX] : Reduce[End.type, Node12[Node26[Node18[Node19[NX]]]], Node20[Node19[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 400 | implicit def node12_reduce_node20_$$plus_3_15_26 [NX] : Reduce[Plus.type, Node12[Node26[Node15[Node3[NX]]]], Node20[Node3[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 401 | implicit def node12_reduce_node20_$$plus_9_7_26 [NX] : Reduce[Plus.type, Node12[Node26[Node7[Node9[NX]]]], Node20[Node9[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 402 | implicit def node12_reduce_node20_$$plus_14_7_26 : Reduce[Plus.type, Node12[Node26[Node7[Node14.type]]], Node20[Node14.type]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 403 | implicit def node12_reduce_node20_$$plus_6_7_26 [NX] : Reduce[Plus.type, Node12[Node26[Node7[Node6[NX]]]], Node20[Node6[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 404 | implicit def node12_reduce_node20_$$plus_19_18_26 [NX] : Reduce[Plus.type, Node12[Node26[Node18[Node19[NX]]]], Node20[Node19[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 405 | implicit def node12_reduce_node20_$$slash_3_15_26 [NX] : Reduce[Div.type, Node12[Node26[Node15[Node3[NX]]]], Node20[Node3[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 406 | implicit def node12_reduce_node20_$$slash_19_18_26 [NX] : Reduce[Div.type, Node12[Node26[Node18[Node19[NX]]]], Node20[Node19[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 407 | implicit def node12_reduce_node20_$$slash_9_7_26 [NX] : Reduce[Div.type, Node12[Node26[Node7[Node9[NX]]]], Node20[Node9[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 408 | implicit def node12_reduce_node20_$$slash_6_7_26 [NX] : Reduce[Div.type, Node12[Node26[Node7[Node6[NX]]]], Node20[Node6[NX]]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 409 | implicit def node12_reduce_node20_$$slash_14_7_26 : Reduce[Div.type, Node12[Node26[Node7[Node14.type]]], Node20[Node14.type]] = Reduce(s => Node20(s.prev.prev.prev, Mul(s.prev.prev.value, s.value))) 410 | implicit def node13_reduce_node0_$$parenright_6 [NX] : Reduce[Rp.type, Node13[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 411 | implicit def node13_reduce_node5_$$parenright_9 [NX] : Reduce[Rp.type, Node13[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 412 | implicit def node13_reduce_node0_$$parenright_14 : Reduce[Rp.type, Node13[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 413 | implicit def node13_reduce_node0_$$hyphen_6 [NX] : Reduce[Minus.type, Node13[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 414 | implicit def node13_reduce_node5_$$hyphen_9 [NX] : Reduce[Minus.type, Node13[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 415 | implicit def node13_reduce_node0_$$hyphen_14 : Reduce[Minus.type, Node13[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 416 | implicit def node13_reduce_node0_$$plus_6 [NX] : Reduce[Plus.type, Node13[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 417 | implicit def node13_reduce_node5_$$plus_9 [NX] : Reduce[Plus.type, Node13[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 418 | implicit def node13_reduce_node0_$$plus_14 : Reduce[Plus.type, Node13[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 419 | implicit def node13_reduce_node0_end_6 [NX] : Reduce[End.type, Node13[Node6[NX]], Node0[Node6[NX]]] = Reduce(s => Node0(s.prev, s.value)) 420 | implicit def node13_reduce_node5_end_9 [NX] : Reduce[End.type, Node13[Node9[NX]], Node5[Node9[NX]]] = Reduce(s => Node5(s.prev, s.value)) 421 | implicit def node13_reduce_node0_end_14 : Reduce[End.type, Node13[Node14.type], Node0[Node14.type]] = Reduce(s => Node0(s.prev, s.value)) 422 | implicit def node15_reduce_node11_end_6_0_3 [NX] : Reduce[End.type, Node15[Node3[Node0[Node6[NX]]]], Node11[Node6[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 423 | implicit def node15_reduce_node11_end_14_0_3 : Reduce[End.type, Node15[Node3[Node0[Node14.type]]], Node11[Node14.type]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 424 | implicit def node15_reduce_node11_end_9_5_3 [NX] : Reduce[End.type, Node15[Node3[Node5[Node9[NX]]]], Node11[Node9[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 425 | implicit def node15_reduce_node11_$$hyphen_6_0_3 [NX] : Reduce[Minus.type, Node15[Node3[Node0[Node6[NX]]]], Node11[Node6[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 426 | implicit def node15_reduce_node11_$$hyphen_14_0_3 : Reduce[Minus.type, Node15[Node3[Node0[Node14.type]]], Node11[Node14.type]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 427 | implicit def node15_reduce_node11_$$hyphen_9_5_3 [NX] : Reduce[Minus.type, Node15[Node3[Node5[Node9[NX]]]], Node11[Node9[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 428 | implicit def node15_reduce_node11_$$plus_6_0_3 [NX] : Reduce[Plus.type, Node15[Node3[Node0[Node6[NX]]]], Node11[Node6[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 429 | implicit def node15_reduce_node11_$$plus_14_0_3 : Reduce[Plus.type, Node15[Node3[Node0[Node14.type]]], Node11[Node14.type]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 430 | implicit def node15_reduce_node11_$$plus_9_5_3 [NX] : Reduce[Plus.type, Node15[Node3[Node5[Node9[NX]]]], Node11[Node9[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 431 | implicit def node15_reduce_node11_$$parenright_6_0_3 [NX] : Reduce[Rp.type, Node15[Node3[Node0[Node6[NX]]]], Node11[Node6[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 432 | implicit def node15_reduce_node11_$$parenright_14_0_3 : Reduce[Rp.type, Node15[Node3[Node0[Node14.type]]], Node11[Node14.type]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 433 | implicit def node15_reduce_node11_$$parenright_9_5_3 [NX] : Reduce[Rp.type, Node15[Node3[Node5[Node9[NX]]]], Node11[Node9[NX]]] = Reduce(s => Node11(s.prev.prev.prev, Add(s.prev.prev.value, s.value))) 434 | implicit def node16_reduce_node4_eoi_6_6 [NX] : Reduce[EoI.type, Node16[Node6[Node6[NX]]], Node4[Node6[NX]]] = Reduce(s => Node4(s.prev.prev, Stmts(s.prev.value, s.value))) 435 | implicit def node16_reduce_node4_eoi_14_6 : Reduce[EoI.type, Node16[Node6[Node14.type]], Node4[Node14.type]] = Reduce(s => Node4(s.prev.prev, Stmts(s.prev.value, s.value))) 436 | implicit def node17_reduce_node7_$$asterisk_9 [NX] : Reduce[Mul.type, Node17[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 437 | implicit def node17_reduce_node18_$$asterisk_19 [NX] : Reduce[Mul.type, Node17[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 438 | implicit def node17_reduce_node7_$$asterisk_6 [NX] : Reduce[Mul.type, Node17[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 439 | implicit def node17_reduce_node15_$$asterisk_3 [NX] : Reduce[Mul.type, Node17[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 440 | implicit def node17_reduce_node7_$$asterisk_14 : Reduce[Mul.type, Node17[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 441 | implicit def node17_reduce_node7_$$parenright_6 [NX] : Reduce[Rp.type, Node17[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 442 | implicit def node17_reduce_node18_$$parenright_19 [NX] : Reduce[Rp.type, Node17[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 443 | implicit def node17_reduce_node15_$$parenright_3 [NX] : Reduce[Rp.type, Node17[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 444 | implicit def node17_reduce_node7_$$parenright_9 [NX] : Reduce[Rp.type, Node17[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 445 | implicit def node17_reduce_node7_$$parenright_14 : Reduce[Rp.type, Node17[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 446 | implicit def node17_reduce_node7_$$hyphen_14 : Reduce[Minus.type, Node17[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 447 | implicit def node17_reduce_node18_$$hyphen_19 [NX] : Reduce[Minus.type, Node17[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 448 | implicit def node17_reduce_node15_$$hyphen_3 [NX] : Reduce[Minus.type, Node17[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 449 | implicit def node17_reduce_node7_$$hyphen_9 [NX] : Reduce[Minus.type, Node17[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 450 | implicit def node17_reduce_node7_$$hyphen_6 [NX] : Reduce[Minus.type, Node17[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 451 | implicit def node17_reduce_node7_end_9 [NX] : Reduce[End.type, Node17[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 452 | implicit def node17_reduce_node15_end_3 [NX] : Reduce[End.type, Node17[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 453 | implicit def node17_reduce_node7_end_14 : Reduce[End.type, Node17[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 454 | implicit def node17_reduce_node18_end_19 [NX] : Reduce[End.type, Node17[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 455 | implicit def node17_reduce_node7_end_6 [NX] : Reduce[End.type, Node17[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 456 | implicit def node17_reduce_node15_$$plus_3 [NX] : Reduce[Plus.type, Node17[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 457 | implicit def node17_reduce_node7_$$plus_9 [NX] : Reduce[Plus.type, Node17[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 458 | implicit def node17_reduce_node7_$$plus_14 : Reduce[Plus.type, Node17[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 459 | implicit def node17_reduce_node7_$$plus_6 [NX] : Reduce[Plus.type, Node17[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 460 | implicit def node17_reduce_node18_$$plus_19 [NX] : Reduce[Plus.type, Node17[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 461 | implicit def node17_reduce_node7_$$slash_9 [NX] : Reduce[Div.type, Node17[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 462 | implicit def node17_reduce_node18_$$slash_19 [NX] : Reduce[Div.type, Node17[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 463 | implicit def node17_reduce_node7_$$slash_14 : Reduce[Div.type, Node17[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 464 | implicit def node17_reduce_node15_$$slash_3 [NX] : Reduce[Div.type, Node17[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 465 | implicit def node17_reduce_node7_$$slash_6 [NX] : Reduce[Div.type, Node17[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 466 | implicit def node18_reduce_node13_end_6_0_19 [NX] : Reduce[End.type, Node18[Node19[Node0[Node6[NX]]]], Node13[Node6[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 467 | implicit def node18_reduce_node13_end_14_0_19 : Reduce[End.type, Node18[Node19[Node0[Node14.type]]], Node13[Node14.type]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 468 | implicit def node18_reduce_node13_end_9_5_19 [NX] : Reduce[End.type, Node18[Node19[Node5[Node9[NX]]]], Node13[Node9[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 469 | implicit def node18_reduce_node13_$$hyphen_6_0_19 [NX] : Reduce[Minus.type, Node18[Node19[Node0[Node6[NX]]]], Node13[Node6[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 470 | implicit def node18_reduce_node13_$$hyphen_14_0_19 : Reduce[Minus.type, Node18[Node19[Node0[Node14.type]]], Node13[Node14.type]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 471 | implicit def node18_reduce_node13_$$hyphen_9_5_19 [NX] : Reduce[Minus.type, Node18[Node19[Node5[Node9[NX]]]], Node13[Node9[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 472 | implicit def node18_reduce_node13_$$plus_6_0_19 [NX] : Reduce[Plus.type, Node18[Node19[Node0[Node6[NX]]]], Node13[Node6[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 473 | implicit def node18_reduce_node13_$$plus_14_0_19 : Reduce[Plus.type, Node18[Node19[Node0[Node14.type]]], Node13[Node14.type]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 474 | implicit def node18_reduce_node13_$$plus_9_5_19 [NX] : Reduce[Plus.type, Node18[Node19[Node5[Node9[NX]]]], Node13[Node9[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 475 | implicit def node18_reduce_node13_$$parenright_6_0_19 [NX] : Reduce[Rp.type, Node18[Node19[Node0[Node6[NX]]]], Node13[Node6[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 476 | implicit def node18_reduce_node13_$$parenright_14_0_19 : Reduce[Rp.type, Node18[Node19[Node0[Node14.type]]], Node13[Node14.type]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 477 | implicit def node18_reduce_node13_$$parenright_9_5_19 [NX] : Reduce[Rp.type, Node18[Node19[Node5[Node9[NX]]]], Node13[Node9[NX]]] = Reduce(s => Node13(s.prev.prev.prev, Sub(s.prev.prev.value, s.value))) 478 | implicit def node20_reduce_node7_$$asterisk_14 : Reduce[Mul.type, Node20[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 479 | implicit def node20_reduce_node15_$$asterisk_3 [NX] : Reduce[Mul.type, Node20[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 480 | implicit def node20_reduce_node7_$$asterisk_6 [NX] : Reduce[Mul.type, Node20[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 481 | implicit def node20_reduce_node18_$$asterisk_19 [NX] : Reduce[Mul.type, Node20[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 482 | implicit def node20_reduce_node7_$$asterisk_9 [NX] : Reduce[Mul.type, Node20[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 483 | implicit def node20_reduce_node15_$$parenright_3 [NX] : Reduce[Rp.type, Node20[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 484 | implicit def node20_reduce_node7_$$parenright_9 [NX] : Reduce[Rp.type, Node20[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 485 | implicit def node20_reduce_node7_$$parenright_6 [NX] : Reduce[Rp.type, Node20[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 486 | implicit def node20_reduce_node7_$$parenright_14 : Reduce[Rp.type, Node20[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 487 | implicit def node20_reduce_node18_$$parenright_19 [NX] : Reduce[Rp.type, Node20[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 488 | implicit def node20_reduce_node7_$$hyphen_14 : Reduce[Minus.type, Node20[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 489 | implicit def node20_reduce_node7_$$hyphen_6 [NX] : Reduce[Minus.type, Node20[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 490 | implicit def node20_reduce_node18_$$hyphen_19 [NX] : Reduce[Minus.type, Node20[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 491 | implicit def node20_reduce_node7_$$hyphen_9 [NX] : Reduce[Minus.type, Node20[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 492 | implicit def node20_reduce_node15_$$hyphen_3 [NX] : Reduce[Minus.type, Node20[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 493 | implicit def node20_reduce_node18_end_19 [NX] : Reduce[End.type, Node20[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 494 | implicit def node20_reduce_node7_end_6 [NX] : Reduce[End.type, Node20[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 495 | implicit def node20_reduce_node7_end_14 : Reduce[End.type, Node20[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 496 | implicit def node20_reduce_node7_end_9 [NX] : Reduce[End.type, Node20[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 497 | implicit def node20_reduce_node15_end_3 [NX] : Reduce[End.type, Node20[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 498 | implicit def node20_reduce_node7_$$plus_6 [NX] : Reduce[Plus.type, Node20[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 499 | implicit def node20_reduce_node18_$$plus_19 [NX] : Reduce[Plus.type, Node20[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 500 | implicit def node20_reduce_node15_$$plus_3 [NX] : Reduce[Plus.type, Node20[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 501 | implicit def node20_reduce_node7_$$plus_14 : Reduce[Plus.type, Node20[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 502 | implicit def node20_reduce_node7_$$plus_9 [NX] : Reduce[Plus.type, Node20[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 503 | implicit def node20_reduce_node7_$$slash_9 [NX] : Reduce[Div.type, Node20[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 504 | implicit def node20_reduce_node18_$$slash_19 [NX] : Reduce[Div.type, Node20[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 505 | implicit def node20_reduce_node7_$$slash_14 : Reduce[Div.type, Node20[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 506 | implicit def node20_reduce_node7_$$slash_6 [NX] : Reduce[Div.type, Node20[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 507 | implicit def node20_reduce_node15_$$slash_3 [NX] : Reduce[Div.type, Node20[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 508 | implicit def node21_reduce_node23_$$asterisk_14 : Reduce[Mul.type, Node21[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 509 | implicit def node21_reduce_node23_$$asterisk_3 [NX] : Reduce[Mul.type, Node21[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 510 | implicit def node21_reduce_node12_$$asterisk_26 [NX] : Reduce[Mul.type, Node21[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 511 | implicit def node21_reduce_node23_$$asterisk_19 [NX] : Reduce[Mul.type, Node21[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 512 | implicit def node21_reduce_node25_$$asterisk_22 [NX] : Reduce[Mul.type, Node21[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 513 | implicit def node21_reduce_node23_$$asterisk_9 [NX] : Reduce[Mul.type, Node21[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 514 | implicit def node21_reduce_node23_$$asterisk_6 [NX] : Reduce[Mul.type, Node21[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 515 | implicit def node21_reduce_node25_$$parenright_22 [NX] : Reduce[Rp.type, Node21[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 516 | implicit def node21_reduce_node23_$$parenright_3 [NX] : Reduce[Rp.type, Node21[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 517 | implicit def node21_reduce_node23_$$parenright_19 [NX] : Reduce[Rp.type, Node21[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 518 | implicit def node21_reduce_node23_$$parenright_14 : Reduce[Rp.type, Node21[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 519 | implicit def node21_reduce_node23_$$parenright_6 [NX] : Reduce[Rp.type, Node21[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 520 | implicit def node21_reduce_node12_$$parenright_26 [NX] : Reduce[Rp.type, Node21[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 521 | implicit def node21_reduce_node23_$$parenright_9 [NX] : Reduce[Rp.type, Node21[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 522 | implicit def node21_reduce_node12_$$hyphen_26 [NX] : Reduce[Minus.type, Node21[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 523 | implicit def node21_reduce_node25_$$hyphen_22 [NX] : Reduce[Minus.type, Node21[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 524 | implicit def node21_reduce_node23_$$hyphen_19 [NX] : Reduce[Minus.type, Node21[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 525 | implicit def node21_reduce_node23_$$hyphen_9 [NX] : Reduce[Minus.type, Node21[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 526 | implicit def node21_reduce_node23_$$hyphen_3 [NX] : Reduce[Minus.type, Node21[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 527 | implicit def node21_reduce_node23_$$hyphen_6 [NX] : Reduce[Minus.type, Node21[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 528 | implicit def node21_reduce_node23_$$hyphen_14 : Reduce[Minus.type, Node21[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 529 | implicit def node21_reduce_node25_end_22 [NX] : Reduce[End.type, Node21[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 530 | implicit def node21_reduce_node23_end_6 [NX] : Reduce[End.type, Node21[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 531 | implicit def node21_reduce_node12_end_26 [NX] : Reduce[End.type, Node21[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 532 | implicit def node21_reduce_node23_end_3 [NX] : Reduce[End.type, Node21[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 533 | implicit def node21_reduce_node23_end_19 [NX] : Reduce[End.type, Node21[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 534 | implicit def node21_reduce_node23_end_14 : Reduce[End.type, Node21[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 535 | implicit def node21_reduce_node23_end_9 [NX] : Reduce[End.type, Node21[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 536 | implicit def node21_reduce_node23_$$plus_19 [NX] : Reduce[Plus.type, Node21[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 537 | implicit def node21_reduce_node12_$$plus_26 [NX] : Reduce[Plus.type, Node21[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 538 | implicit def node21_reduce_node23_$$plus_6 [NX] : Reduce[Plus.type, Node21[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 539 | implicit def node21_reduce_node23_$$plus_9 [NX] : Reduce[Plus.type, Node21[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 540 | implicit def node21_reduce_node23_$$plus_14 : Reduce[Plus.type, Node21[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 541 | implicit def node21_reduce_node23_$$plus_3 [NX] : Reduce[Plus.type, Node21[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 542 | implicit def node21_reduce_node25_$$plus_22 [NX] : Reduce[Plus.type, Node21[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 543 | implicit def node21_reduce_node23_$$slash_14 : Reduce[Div.type, Node21[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 544 | implicit def node21_reduce_node23_$$slash_9 [NX] : Reduce[Div.type, Node21[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 545 | implicit def node21_reduce_node23_$$slash_6 [NX] : Reduce[Div.type, Node21[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 546 | implicit def node21_reduce_node23_$$slash_19 [NX] : Reduce[Div.type, Node21[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 547 | implicit def node21_reduce_node25_$$slash_22 [NX] : Reduce[Div.type, Node21[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 548 | implicit def node21_reduce_node23_$$slash_3 [NX] : Reduce[Div.type, Node21[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 549 | implicit def node21_reduce_node12_$$slash_26 [NX] : Reduce[Div.type, Node21[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 550 | implicit def node23_reduce_node18_$$asterisk_19 [NX] : Reduce[Mul.type, Node23[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 551 | implicit def node23_reduce_node7_$$asterisk_14 : Reduce[Mul.type, Node23[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 552 | implicit def node23_reduce_node7_$$asterisk_6 [NX] : Reduce[Mul.type, Node23[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 553 | implicit def node23_reduce_node15_$$asterisk_3 [NX] : Reduce[Mul.type, Node23[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 554 | implicit def node23_reduce_node7_$$asterisk_9 [NX] : Reduce[Mul.type, Node23[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 555 | implicit def node23_reduce_node7_$$parenright_14 : Reduce[Rp.type, Node23[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 556 | implicit def node23_reduce_node15_$$parenright_3 [NX] : Reduce[Rp.type, Node23[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 557 | implicit def node23_reduce_node18_$$parenright_19 [NX] : Reduce[Rp.type, Node23[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 558 | implicit def node23_reduce_node7_$$parenright_9 [NX] : Reduce[Rp.type, Node23[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 559 | implicit def node23_reduce_node7_$$parenright_6 [NX] : Reduce[Rp.type, Node23[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 560 | implicit def node23_reduce_node18_$$hyphen_19 [NX] : Reduce[Minus.type, Node23[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 561 | implicit def node23_reduce_node7_$$hyphen_6 [NX] : Reduce[Minus.type, Node23[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 562 | implicit def node23_reduce_node15_$$hyphen_3 [NX] : Reduce[Minus.type, Node23[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 563 | implicit def node23_reduce_node7_$$hyphen_9 [NX] : Reduce[Minus.type, Node23[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 564 | implicit def node23_reduce_node7_$$hyphen_14 : Reduce[Minus.type, Node23[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 565 | implicit def node23_reduce_node7_end_6 [NX] : Reduce[End.type, Node23[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 566 | implicit def node23_reduce_node7_end_9 [NX] : Reduce[End.type, Node23[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 567 | implicit def node23_reduce_node15_end_3 [NX] : Reduce[End.type, Node23[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 568 | implicit def node23_reduce_node7_end_14 : Reduce[End.type, Node23[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 569 | implicit def node23_reduce_node18_end_19 [NX] : Reduce[End.type, Node23[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 570 | implicit def node23_reduce_node7_$$plus_14 : Reduce[Plus.type, Node23[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 571 | implicit def node23_reduce_node7_$$plus_6 [NX] : Reduce[Plus.type, Node23[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 572 | implicit def node23_reduce_node18_$$plus_19 [NX] : Reduce[Plus.type, Node23[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 573 | implicit def node23_reduce_node15_$$plus_3 [NX] : Reduce[Plus.type, Node23[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 574 | implicit def node23_reduce_node7_$$plus_9 [NX] : Reduce[Plus.type, Node23[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 575 | implicit def node23_reduce_node18_$$slash_19 [NX] : Reduce[Div.type, Node23[Node19[NX]], Node18[Node19[NX]]] = Reduce(s => Node18(s.prev, s.value)) 576 | implicit def node23_reduce_node7_$$slash_6 [NX] : Reduce[Div.type, Node23[Node6[NX]], Node7[Node6[NX]]] = Reduce(s => Node7(s.prev, s.value)) 577 | implicit def node23_reduce_node7_$$slash_14 : Reduce[Div.type, Node23[Node14.type], Node7[Node14.type]] = Reduce(s => Node7(s.prev, s.value)) 578 | implicit def node23_reduce_node15_$$slash_3 [NX] : Reduce[Div.type, Node23[Node3[NX]], Node15[Node3[NX]]] = Reduce(s => Node15(s.prev, s.value)) 579 | implicit def node23_reduce_node7_$$slash_9 [NX] : Reduce[Div.type, Node23[Node9[NX]], Node7[Node9[NX]]] = Reduce(s => Node7(s.prev, s.value)) 580 | implicit def node24_reduce_node23_$$asterisk_3 [NX] : Reduce[Mul.type, Node24[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 581 | implicit def node24_reduce_node25_$$asterisk_22 [NX] : Reduce[Mul.type, Node24[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 582 | implicit def node24_reduce_node23_$$asterisk_19 [NX] : Reduce[Mul.type, Node24[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 583 | implicit def node24_reduce_node23_$$asterisk_6 [NX] : Reduce[Mul.type, Node24[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 584 | implicit def node24_reduce_node23_$$asterisk_9 [NX] : Reduce[Mul.type, Node24[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 585 | implicit def node24_reduce_node12_$$asterisk_26 [NX] : Reduce[Mul.type, Node24[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 586 | implicit def node24_reduce_node23_$$asterisk_14 : Reduce[Mul.type, Node24[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 587 | implicit def node24_reduce_node23_$$parenright_6 [NX] : Reduce[Rp.type, Node24[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 588 | implicit def node24_reduce_node12_$$parenright_26 [NX] : Reduce[Rp.type, Node24[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 589 | implicit def node24_reduce_node23_$$parenright_19 [NX] : Reduce[Rp.type, Node24[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 590 | implicit def node24_reduce_node23_$$parenright_14 : Reduce[Rp.type, Node24[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 591 | implicit def node24_reduce_node23_$$parenright_3 [NX] : Reduce[Rp.type, Node24[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 592 | implicit def node24_reduce_node23_$$parenright_9 [NX] : Reduce[Rp.type, Node24[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 593 | implicit def node24_reduce_node25_$$parenright_22 [NX] : Reduce[Rp.type, Node24[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 594 | implicit def node24_reduce_node25_$$hyphen_22 [NX] : Reduce[Minus.type, Node24[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 595 | implicit def node24_reduce_node23_$$hyphen_3 [NX] : Reduce[Minus.type, Node24[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 596 | implicit def node24_reduce_node23_$$hyphen_6 [NX] : Reduce[Minus.type, Node24[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 597 | implicit def node24_reduce_node23_$$hyphen_19 [NX] : Reduce[Minus.type, Node24[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 598 | implicit def node24_reduce_node23_$$hyphen_9 [NX] : Reduce[Minus.type, Node24[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 599 | implicit def node24_reduce_node12_$$hyphen_26 [NX] : Reduce[Minus.type, Node24[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 600 | implicit def node24_reduce_node23_$$hyphen_14 : Reduce[Minus.type, Node24[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 601 | implicit def node24_reduce_node23_end_3 [NX] : Reduce[End.type, Node24[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 602 | implicit def node24_reduce_node25_end_22 [NX] : Reduce[End.type, Node24[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 603 | implicit def node24_reduce_node23_end_19 [NX] : Reduce[End.type, Node24[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 604 | implicit def node24_reduce_node23_end_6 [NX] : Reduce[End.type, Node24[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 605 | implicit def node24_reduce_node12_end_26 [NX] : Reduce[End.type, Node24[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 606 | implicit def node24_reduce_node23_end_9 [NX] : Reduce[End.type, Node24[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 607 | implicit def node24_reduce_node23_end_14 : Reduce[End.type, Node24[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 608 | implicit def node24_reduce_node23_$$plus_14 : Reduce[Plus.type, Node24[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 609 | implicit def node24_reduce_node25_$$plus_22 [NX] : Reduce[Plus.type, Node24[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 610 | implicit def node24_reduce_node23_$$plus_6 [NX] : Reduce[Plus.type, Node24[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 611 | implicit def node24_reduce_node23_$$plus_9 [NX] : Reduce[Plus.type, Node24[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 612 | implicit def node24_reduce_node23_$$plus_19 [NX] : Reduce[Plus.type, Node24[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 613 | implicit def node24_reduce_node23_$$plus_3 [NX] : Reduce[Plus.type, Node24[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 614 | implicit def node24_reduce_node12_$$plus_26 [NX] : Reduce[Plus.type, Node24[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 615 | implicit def node24_reduce_node12_$$slash_26 [NX] : Reduce[Div.type, Node24[Node26[NX]], Node12[Node26[NX]]] = Reduce(s => Node12(s.prev, s.value)) 616 | implicit def node24_reduce_node25_$$slash_22 [NX] : Reduce[Div.type, Node24[Node22[NX]], Node25[Node22[NX]]] = Reduce(s => Node25(s.prev, s.value)) 617 | implicit def node24_reduce_node23_$$slash_9 [NX] : Reduce[Div.type, Node24[Node9[NX]], Node23[Node9[NX]]] = Reduce(s => Node23(s.prev, s.value)) 618 | implicit def node24_reduce_node23_$$slash_14 : Reduce[Div.type, Node24[Node14.type], Node23[Node14.type]] = Reduce(s => Node23(s.prev, s.value)) 619 | implicit def node24_reduce_node23_$$slash_6 [NX] : Reduce[Div.type, Node24[Node6[NX]], Node23[Node6[NX]]] = Reduce(s => Node23(s.prev, s.value)) 620 | implicit def node24_reduce_node23_$$slash_19 [NX] : Reduce[Div.type, Node24[Node19[NX]], Node23[Node19[NX]]] = Reduce(s => Node23(s.prev, s.value)) 621 | implicit def node24_reduce_node23_$$slash_3 [NX] : Reduce[Div.type, Node24[Node3[NX]], Node23[Node3[NX]]] = Reduce(s => Node23(s.prev, s.value)) 622 | implicit def node25_reduce_node17_$$asterisk_9_7_22 [NX] : Reduce[Mul.type, Node25[Node22[Node7[Node9[NX]]]], Node17[Node9[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 623 | implicit def node25_reduce_node17_$$asterisk_6_7_22 [NX] : Reduce[Mul.type, Node25[Node22[Node7[Node6[NX]]]], Node17[Node6[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 624 | implicit def node25_reduce_node17_$$asterisk_19_18_22 [NX] : Reduce[Mul.type, Node25[Node22[Node18[Node19[NX]]]], Node17[Node19[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 625 | implicit def node25_reduce_node17_$$asterisk_3_15_22 [NX] : Reduce[Mul.type, Node25[Node22[Node15[Node3[NX]]]], Node17[Node3[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 626 | implicit def node25_reduce_node17_$$asterisk_14_7_22 : Reduce[Mul.type, Node25[Node22[Node7[Node14.type]]], Node17[Node14.type]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 627 | implicit def node25_reduce_node17_$$parenright_6_7_22 [NX] : Reduce[Rp.type, Node25[Node22[Node7[Node6[NX]]]], Node17[Node6[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 628 | implicit def node25_reduce_node17_$$parenright_9_7_22 [NX] : Reduce[Rp.type, Node25[Node22[Node7[Node9[NX]]]], Node17[Node9[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 629 | implicit def node25_reduce_node17_$$parenright_14_7_22 : Reduce[Rp.type, Node25[Node22[Node7[Node14.type]]], Node17[Node14.type]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 630 | implicit def node25_reduce_node17_$$parenright_19_18_22 [NX] : Reduce[Rp.type, Node25[Node22[Node18[Node19[NX]]]], Node17[Node19[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 631 | implicit def node25_reduce_node17_$$parenright_3_15_22 [NX] : Reduce[Rp.type, Node25[Node22[Node15[Node3[NX]]]], Node17[Node3[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 632 | implicit def node25_reduce_node17_$$hyphen_6_7_22 [NX] : Reduce[Minus.type, Node25[Node22[Node7[Node6[NX]]]], Node17[Node6[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 633 | implicit def node25_reduce_node17_$$hyphen_19_18_22 [NX] : Reduce[Minus.type, Node25[Node22[Node18[Node19[NX]]]], Node17[Node19[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 634 | implicit def node25_reduce_node17_$$hyphen_14_7_22 : Reduce[Minus.type, Node25[Node22[Node7[Node14.type]]], Node17[Node14.type]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 635 | implicit def node25_reduce_node17_$$hyphen_9_7_22 [NX] : Reduce[Minus.type, Node25[Node22[Node7[Node9[NX]]]], Node17[Node9[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 636 | implicit def node25_reduce_node17_$$hyphen_3_15_22 [NX] : Reduce[Minus.type, Node25[Node22[Node15[Node3[NX]]]], Node17[Node3[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 637 | implicit def node25_reduce_node17_end_6_7_22 [NX] : Reduce[End.type, Node25[Node22[Node7[Node6[NX]]]], Node17[Node6[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 638 | implicit def node25_reduce_node17_end_9_7_22 [NX] : Reduce[End.type, Node25[Node22[Node7[Node9[NX]]]], Node17[Node9[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 639 | implicit def node25_reduce_node17_end_14_7_22 : Reduce[End.type, Node25[Node22[Node7[Node14.type]]], Node17[Node14.type]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 640 | implicit def node25_reduce_node17_end_19_18_22 [NX] : Reduce[End.type, Node25[Node22[Node18[Node19[NX]]]], Node17[Node19[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 641 | implicit def node25_reduce_node17_end_3_15_22 [NX] : Reduce[End.type, Node25[Node22[Node15[Node3[NX]]]], Node17[Node3[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 642 | implicit def node25_reduce_node17_$$plus_3_15_22 [NX] : Reduce[Plus.type, Node25[Node22[Node15[Node3[NX]]]], Node17[Node3[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 643 | implicit def node25_reduce_node17_$$plus_14_7_22 : Reduce[Plus.type, Node25[Node22[Node7[Node14.type]]], Node17[Node14.type]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 644 | implicit def node25_reduce_node17_$$plus_6_7_22 [NX] : Reduce[Plus.type, Node25[Node22[Node7[Node6[NX]]]], Node17[Node6[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 645 | implicit def node25_reduce_node17_$$plus_9_7_22 [NX] : Reduce[Plus.type, Node25[Node22[Node7[Node9[NX]]]], Node17[Node9[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 646 | implicit def node25_reduce_node17_$$plus_19_18_22 [NX] : Reduce[Plus.type, Node25[Node22[Node18[Node19[NX]]]], Node17[Node19[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 647 | implicit def node25_reduce_node17_$$slash_3_15_22 [NX] : Reduce[Div.type, Node25[Node22[Node15[Node3[NX]]]], Node17[Node3[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 648 | implicit def node25_reduce_node17_$$slash_19_18_22 [NX] : Reduce[Div.type, Node25[Node22[Node18[Node19[NX]]]], Node17[Node19[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 649 | implicit def node25_reduce_node17_$$slash_9_7_22 [NX] : Reduce[Div.type, Node25[Node22[Node7[Node9[NX]]]], Node17[Node9[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 650 | implicit def node25_reduce_node17_$$slash_14_7_22 : Reduce[Div.type, Node25[Node22[Node7[Node14.type]]], Node17[Node14.type]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 651 | implicit def node25_reduce_node17_$$slash_6_7_22 [NX] : Reduce[Div.type, Node25[Node22[Node7[Node6[NX]]]], Node17[Node6[NX]]] = Reduce(s => Node17(s.prev.prev.prev, Div(s.prev.prev.value, s.value))) 652 | 653 | } 654 | 655 | object Main2 { 656 | import MathDSL2._ 657 | 658 | def main (args: Array[String]): Unit = { 659 | 660 | val program: Program = int(10) $$plus int(2) $$asterisk $$parenleft int (10) $$slash int(5) $$parenright end 661 | println(program) 662 | 663 | val program2: Program = int(10).$$plus.int(2).$$asterisk.$$parenleft.int(10).$$slash.int(5).$$parenright.end 664 | println(program2) 665 | 666 | val program3: Program = (10) $$plus (2) $$asterisk $$parenleft (10) $$slash (5) $$parenright end 667 | println(program3) 668 | 669 | val program4: Program = 10 $$plus 2 $$asterisk $$parenleft (10) $$slash 5 $$parenright end 670 | println(program4) 671 | 672 | val program5: Program = (10).$$plus(2).$$asterisk.$$parenleft(10).$$slash(5).$$parenright.end 673 | println(program5) 674 | 675 | val program6: Program = $$parenleft (10) $$plus (2) $$parenright $$asterisk $$parenleft $$parenleft $$parenleft (10) $$slash (5) $$parenright $$parenright $$parenright end 676 | println(program6) 677 | 678 | val program7: Program = $$parenleft (10) $$plus (2) $$parenright $$asterisk $$parenleft $$parenleft (10) $$slash (5) $$parenright $$parenright end $$semicolon 679 | println(program7) 680 | 681 | /**/ 682 | } 683 | } 684 | --------------------------------------------------------------------------------