├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE ├── README.md ├── chap03-arith ├── chap03-arith.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap05-untyped ├── chap05-untyped.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap08-tyarith ├── chap08-tyarith.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap09-simplebool ├── chap09-simplebool.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap11-fullsimple ├── chap11-fullsimple.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap13-simpleref ├── chap13-simpleref.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap14-simpleerror ├── chap14-simpleerror.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap15-simplesub ├── chap15-simplesub.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap20-fullequirec ├── chap20-fullequirec.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap20-fullisorec ├── chap20-fullisorec.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap22-simplerecon ├── chap22-simplerecon.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap23-fullpoly ├── chap23-fullpoly.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap26-fullfsub ├── chap26-fullfsub.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap29-fullomega ├── chap29-fullomega.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f ├── chap31-fullfomsub ├── chap31-fullfomsub.mbti ├── core.mbt ├── lexer.mbtx ├── main.mbt ├── moon.pkg.json ├── parser.mbty ├── support.mbt ├── syntax.mbt └── test.f └── moon.mod.json /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: check 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | jobs: 10 | stable-check: 11 | strategy: 12 | matrix: 13 | os: [ubuntu-latest, macos-latest, macos-13] 14 | fail-fast: false 15 | runs-on: ${{ matrix.os }} 16 | continue-on-error: false 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: install 21 | run: | 22 | curl -fsSL https://cli.moonbitlang.com/install/unix.sh | bash 23 | echo "$HOME/.moon/bin" >> $GITHUB_PATH 24 | 25 | - name: moon version 26 | run: | 27 | moon version --all 28 | moonrun --version 29 | 30 | - name: moon check 31 | run: moon check --deny-warn 32 | 33 | - name: moon info 34 | run: | 35 | moon info --target wasm,wasm-gc,js,native 36 | git diff --exit-code 37 | 38 | - name: format diff 39 | run: | 40 | moon fmt --block-style 41 | git diff --exit-code 42 | 43 | - name: moon test 44 | run: | 45 | moon test --target all 46 | moon test --release --target all 47 | moon test --target native 48 | moon test --target native --release 49 | 50 | bleeding-check: 51 | continue-on-error: true 52 | strategy: 53 | matrix: 54 | os: [macos-latest, ubuntu-latest, macos-13] 55 | runs-on: ${{ matrix.os }} 56 | steps: 57 | - uses: actions/checkout@v4 58 | 59 | - name: install 60 | run: | 61 | curl -fsSL https://cli.moonbitlang.com/install/unix.sh | bash -s bleeding 62 | echo "$HOME/.moon/bin" >> $GITHUB_PATH 63 | 64 | - name: moon version 65 | run: | 66 | moon version --all 67 | moonrun --version 68 | 69 | - name: moon check 70 | run: moon check --deny-warn 71 | 72 | - name: moon test 73 | run: | 74 | moon test --target all 75 | moon test --release --target all 76 | moon test --target native 77 | moon test --target native --release 78 | 79 | - name: moon info 80 | run: | 81 | moon info --target wasm,wasm-gc,js,native 82 | git diff 83 | 84 | - name: format diff 85 | run: | 86 | moon fmt 87 | git diff 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .mooncakes/ 3 | parser.mbt 4 | parser.mbt.map.json 5 | lexer.mbt 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Di Wang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TAPL 2 | 3 | [MoonBit](https://www.moonbitlang.com/) Port of implementations for Pierce's [Types and Programming Languages](https://www.cis.upenn.edu/~bcpierce/tapl/). 4 | 5 | The port here slightly diverges from the [original implementations](https://www.cis.upenn.edu/~bcpierce/tapl/resources.html#checkers) to better suit the needs of the [Design Principles of Programming Languages](https://pku-dppl.github.io/2025/english.html) course at Peking University. 6 | - [chap03-arith](chap03-arith) for Chapter 3: Untyped Arithmetic Expressions. An untyped calculus with Booleans and natural numbers. 7 | - [chap05-untyped](chap05-untyped) for Chapter 5: The Untyped Lambda-Calculus. An untyped lambda calculus with Booleans and natural numbers. 8 | - [chap08-tyarith](chap08-tyarith) for Chapter 8: Typed Arithmetic Expressions. A typed calculus with Booleans and natural numbers. 9 | - [chap09-simplebool](chap09-simplebool) for Chapter 9: Simply Typed Lambda-Calculus. A simply-typed lambda calculus with Booleans and natural numbers. 10 | - [chap11-fullsimple](chap11-fullsimple) for Chapter 11: Simple Extensions. A simply-typed lambda calculus with Booleans, natural numbers, units, records, variants, strings, and decimal numbers. 11 | - [chap13-simpleref](chap13-simpleref) for Chapter 13: References. A simply-typed lambda calculus with Booleans, natural numbers, and references. 12 | - [chap14-simpleerror](chap14-simpleerror) for Chapter 14: Exceptions. A simply-typed lambda calculus with Booleans, natural numbers, and errors. Note that the implementation uses subtyping and the bottom type to allow polymorphic typing of errors. 13 | - [chap15-simplesub](chap15-simplesub) for Chapter 15: Subtyping. A simply-typed lambda calculus with subtyping, as well as Booleans, natural numbers, and records. 14 | - [chap20-fullequirec](chap20-fullequirec) for Chapter 20: Recursive Types. A simply-typed lambda calculus with equi-recursive types, as well as Booleans, natural numbers, units, records, variants, strings, and decimal numbers. 15 | - [chap20-fullisorec](chap20-fullisorec) for Chapter 20: Recursive Types. A simply-typed lambda calculus with iso-recursive types, as well as Booleans, natural numbers, units, records, variants, strings, and decimal numbers. 16 | - [chap22-simplerecon](chap22-simplerecon) for Chapter 22: Type Reconstruction. A simply-typed lambda calculus with type inference, as well as Booleans and natural numbers. 17 | - [chap23-fullpoly](chap23-fullpoly) for Chapter 23: Universal Types. A System-F calculus with Booleans, natural numbers, units, records, variants, strings, and decimal numbers. 18 | - [chap26-fullfsub](chap26-fullfsub) for Chapter 26: Bounded Quantification. A System-F calculus with subtyping, as well as Booleans, natural numbers, units, records, variants, strings, and decimal numbers. 19 | - [chap29-fullomega](chap29-fullomega) for Chapter 29: Type Operators and Kinding. A System-F calculus with kinding, as well as Booleans, natural numbers, units, records, variants, strings, and decimal numbers. 20 | - [chap31-fullfomsub](chap31-fullfomsub) for Chapter 31: Higher-Order Subtyping. A System-F calculus with kinding and subtyping, as well as Booleans, natural numbers, units, records, variants, strings, and decimal numbers. 21 | 22 | ## Prerequisites 23 | 24 | Follow the instruction [here](https://docs.moonbitlang.com/en/latest/tutorial/tour.html#installation) to install MoonBit's toolchain. 25 | The recommended setup is to install MoonBit's VS Code extension and then perform the 'Install moonbit toolchain' action in VS Code. 26 | 27 | ## Usage 28 | 29 | Go to the root directory and build all the checkers: 30 | ``` 31 | moon build 32 | ``` 33 | 34 | Run a particular checker on an input file: 35 | ``` 36 | moon run chap11-fullsimple chap11-fullsimple/test.f 37 | ``` 38 | -------------------------------------------------------------------------------- /chap03-arith/chap03-arith.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap03-arith 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> @list.T[Command]!ParseError 13 | 14 | // Types and methods 15 | type Command 16 | 17 | type ErrorWithInfo 18 | 19 | type Info 20 | 21 | pub(all) struct LexEngine { 22 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 23 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 24 | start_tags : Array[Int] 25 | code_blocks_n : Int 26 | } 27 | impl LexEngine { 28 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 29 | } 30 | 31 | type Lexbuf 32 | impl Lexbuf { 33 | from_string(String) -> Self 34 | } 35 | 36 | pub(all) type Lexer Lexbuf 37 | impl Lexer { 38 | next_token(Self) -> (Token, Pos, Pos) 39 | } 40 | 41 | type NoRuleApplies 42 | 43 | pub type! ParseError { 44 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 45 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 46 | } 47 | impl Show for ParseError 48 | 49 | type Pos 50 | 51 | type Term 52 | 53 | pub(all) enum Token { 54 | EOF 55 | ELSE 56 | FALSE 57 | IF 58 | IMPORT 59 | ISZERO 60 | PRED 61 | SUCC 62 | THEN 63 | TRUE 64 | INTV(Int) 65 | STRINGV(String) 66 | LPAREN 67 | RPAREN 68 | SEMI 69 | } 70 | impl Token { 71 | kind(Self) -> TokenKind 72 | } 73 | impl Show for Token 74 | 75 | pub(all) enum TokenKind { 76 | TK_EOF 77 | TK_ELSE 78 | TK_FALSE 79 | TK_IF 80 | TK_IMPORT 81 | TK_ISZERO 82 | TK_PRED 83 | TK_SUCC 84 | TK_THEN 85 | TK_TRUE 86 | TK_INTV 87 | TK_STRINGV 88 | TK_LPAREN 89 | TK_RPAREN 90 | TK_SEMI 91 | } 92 | impl Show for TokenKind 93 | 94 | // Type aliases 95 | pub typealias Position = Pos 96 | 97 | // Traits 98 | 99 | -------------------------------------------------------------------------------- /chap03-arith/core.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | fn is_numerical(self : Term) -> Bool { 3 | match self { 4 | Zero(_) => true 5 | Succ(_, t1) => t1.is_numerical() 6 | _ => false 7 | } 8 | } 9 | 10 | ///| 11 | fn is_val(self : Term) -> Bool { 12 | match self { 13 | True(_) => true 14 | False(_) => true 15 | _ => self.is_numerical() 16 | } 17 | } 18 | 19 | ///| 20 | type! NoRuleApplies 21 | 22 | ///| 23 | fn eval1(self : Term) -> Term!NoRuleApplies { 24 | match self { 25 | True(_) => raise NoRuleApplies 26 | False(_) => raise NoRuleApplies 27 | If(info, t1, t2, t3) => 28 | if t1.is_val() { 29 | match t1 { 30 | True(_) => t2 31 | False(_) => t3 32 | _ => raise NoRuleApplies 33 | } 34 | } else { 35 | let t1_prime = t1.eval1!() 36 | If(info, t1_prime, t2, t3) 37 | } 38 | Zero(_) => raise NoRuleApplies 39 | Succ(info, t1) => 40 | if t1.is_val() { 41 | raise NoRuleApplies 42 | } else { 43 | let t1_prime = t1.eval1!() 44 | Succ(info, t1_prime) 45 | } 46 | Pred(info, t1) => 47 | if t1.is_val() { 48 | match t1 { 49 | Zero(_) => Zero(UNKNOWN) 50 | Succ(_, t11) => 51 | if t11.is_numerical() { 52 | t11 53 | } else { 54 | raise NoRuleApplies 55 | } 56 | _ => raise NoRuleApplies 57 | } 58 | } else { 59 | let t1_prime = t1.eval1!() 60 | Pred(info, t1_prime) 61 | } 62 | IsZero(info, t1) => 63 | if t1.is_val() { 64 | match t1 { 65 | Zero(_) => True(UNKNOWN) 66 | Succ(_, t11) => 67 | if t11.is_numerical() { 68 | False(UNKNOWN) 69 | } else { 70 | raise NoRuleApplies 71 | } 72 | _ => raise NoRuleApplies 73 | } 74 | } else { 75 | let t1_prime = t1.eval1!() 76 | IsZero(info, t1_prime) 77 | } 78 | } 79 | } 80 | 81 | ///| 82 | fn eval(self : Term) -> Term { 83 | loop self { 84 | t => 85 | try { 86 | continue t.eval1!() 87 | } catch { 88 | NoRuleApplies => break t 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /chap03-arith/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 55 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 56 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 57 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 58 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 59 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 60 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 61 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 62 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 63 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 64 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 65 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 66 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 67 | _ as t => { 68 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 69 | token(lexbuf) 70 | } 71 | } 72 | } 73 | 74 | rule comment() -> Unit { 75 | parse { 76 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 77 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 78 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 79 | [^ '\n'] => { comment(lexbuf) } 80 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 81 | } 82 | } 83 | 84 | rule string() -> (Token, Position, Position) { 85 | parse { 86 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 87 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 88 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 89 | eof as t => { 90 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 91 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 92 | } 93 | _ as t => { string_builder.write_string(t); string(lexbuf) } 94 | } 95 | } 96 | 97 | { 98 | pub fn new_lexer(input : String) -> Lexer { 99 | Lexbuf::from_string(input) 100 | } 101 | 102 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 103 | token(self._) 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /chap03-arith/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(cmd : Command) -> Unit { 6 | match cmd { 7 | Import(fname) => process_file!(fname) 8 | Eval(_, t) => { 9 | let t_prime = t.eval() 10 | println("\{t_prime.to_string()}") 11 | } 12 | } 13 | } 14 | 15 | ///| 16 | fn process_file!(fname : String) -> Unit { 17 | if already_imported.contains(fname) { 18 | () 19 | } else { 20 | already_imported.push(fname) 21 | let cmds = parse_file!(fname) 22 | loop cmds { 23 | Nil => break 24 | Cons(c, rest) => { 25 | process_command!(c) 26 | continue rest 27 | } 28 | } 29 | } 30 | } 31 | 32 | ///| 33 | fn parse_file!(fname : String) -> @immut/list.T[Command] { 34 | try { 35 | let code = @fs.read_file_to_string!(fname) 36 | let lexer = new_lexer(code) 37 | reset_lex(fname) 38 | let tokens = [] 39 | while true { 40 | let elem = lexer.next_token() 41 | tokens.push(elem) 42 | match elem.0 { 43 | EOF => break 44 | _ => continue 45 | } 46 | } 47 | let result = toplevel!(tokens) 48 | if has_lex_error.val { 49 | error_info!("") 50 | } else { 51 | result 52 | } 53 | } catch { 54 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 55 | ErrorWithInfo(_) as e => raise e 56 | UnexpectedToken(t, (start, end), _) => 57 | if has_lex_error.val { 58 | error_info!("") 59 | } else { 60 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 61 | } 62 | _ => panic() 63 | } 64 | } 65 | 66 | ///| 67 | fn main { 68 | let argv = @sys.get_cli_args()[2:] 69 | try { 70 | if argv.length() != 1 { 71 | error_info!("you must specify exactly one input file") 72 | } else { 73 | let fname = argv[0] 74 | process_file!(fname) 75 | } 76 | } catch { 77 | ErrorWithInfo((msg, info)) => 78 | if not(msg.is_empty()) { 79 | println("Error:\{info} \{msg}") 80 | } 81 | _ => panic() 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /chap03-arith/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap03-arith/parser.mbty: -------------------------------------------------------------------------------- 1 | %derive Token 2 | %derive ParseError 3 | %position 4 | %start toplevel 5 | 6 | %token EOF 7 | %token ELSE "else" 8 | %token FALSE "false" 9 | %token IF "if" 10 | %token IMPORT "import" 11 | %token ISZERO "iszero" 12 | %token PRED "pred" 13 | %token SUCC "succ" 14 | %token THEN "then" 15 | %token TRUE "true" 16 | %token INTV 17 | %token STRINGV 18 | %token LPAREN "(" 19 | %token RPAREN ")" 20 | %token SEMI ";" 21 | 22 | %type toplevel 23 | %type command 24 | %type term 25 | %type app_term 26 | %type atom_term 27 | 28 | %% 29 | 30 | toplevel 31 | : EOF { Nil } 32 | | command ";" toplevel { Cons($1, $3) } 33 | ; 34 | 35 | command 36 | : "import" STRINGV { Import($2) } 37 | | term { Eval(FI($startpos, $endpos), $1) } 38 | ; 39 | 40 | term 41 | : app_term { $1 } 42 | | "if" term "then" term "else" term { If(FI($startpos, $endpos), $2, $4, $6) } 43 | ; 44 | 45 | app_term 46 | : atom_term { $1 } 47 | | "succ" atom_term { Succ(FI($startpos, $endpos), $2) } 48 | | "pred" atom_term { Pred(FI($startpos, $endpos), $2) } 49 | | "iszero" atom_term { IsZero(FI($startpos, $endpos), $2) } 50 | ; 51 | 52 | atom_term 53 | : "(" term ")" { $2 } 54 | | "true" { True(FI($startpos, $endpos)) } 55 | | "false" { False(FI($startpos, $endpos)) } 56 | | INTV { 57 | let info = FI($startpos, $endpos) 58 | loop ($1, Zero(info)) { 59 | (0, acc) => break acc 60 | (n, acc) => continue (n - 1, Succ(info, acc)) 61 | } 62 | } 63 | ; 64 | -------------------------------------------------------------------------------- /chap03-arith/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = @immut/list.T[Command] 38 | 39 | ///| 40 | typealias ParseCommand = Command 41 | 42 | ///| 43 | typealias ParseTerm = Term 44 | -------------------------------------------------------------------------------- /chap03-arith/syntax.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | enum Term { 3 | True(Info) 4 | False(Info) 5 | If(Info, Term, Term, Term) 6 | Zero(Info) 7 | Succ(Info, Term) 8 | Pred(Info, Term) 9 | IsZero(Info, Term) 10 | } 11 | 12 | ///| 13 | enum Command { 14 | Import(String) 15 | Eval(Info, Term) 16 | } 17 | 18 | ///| 19 | fn Term::to_string(self : Term) -> String { 20 | let logger = StringBuilder::new() 21 | self.output(logger) 22 | logger.to_string() 23 | } 24 | 25 | ///| 26 | fn Term::output(self : Term, logger : &Logger) -> Unit { 27 | match self { 28 | If(_, t1, t2, t3) => { 29 | logger.write_string("if ") 30 | t1.output(logger) 31 | logger.write_string(" then ") 32 | t2.output(logger) 33 | logger.write_string(" else") 34 | t3.output(logger) 35 | } 36 | _ => self.output_app(logger) 37 | } 38 | } 39 | 40 | ///| 41 | fn Term::output_app(self : Term, logger : &Logger) -> Unit { 42 | match self { 43 | Pred(_, t1) => { 44 | logger.write_string("pred ") 45 | t1.output_atom(logger) 46 | } 47 | IsZero(_, t1) => { 48 | logger.write_string("iszero ") 49 | t1.output_atom(logger) 50 | } 51 | _ => self.output_atom(logger) 52 | } 53 | } 54 | 55 | ///| 56 | fn Term::output_atom(self : Term, logger : &Logger) -> Unit { 57 | match self { 58 | True(_) => logger.write_string("true") 59 | False(_) => logger.write_string("false") 60 | Zero(_) => logger.write_string("0") 61 | Succ(_, t1) => 62 | loop (t1, 1) { 63 | (Zero(_), n) => { 64 | logger.write_string("\{n}") 65 | break 66 | } 67 | (Succ(_, s), n) => continue (s, n + 1) 68 | _ => { 69 | logger.write_string("(succ ") 70 | t1.output_atom(logger) 71 | logger.write_string(")") 72 | } 73 | } 74 | _ => { 75 | logger.write_string("(") 76 | self.output(logger) 77 | logger.write_string(")") 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /chap03-arith/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | -------------------------------------------------------------------------------- /chap05-untyped/chap05-untyped.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap05-untyped 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | ELSE 60 | FALSE 61 | IF 62 | IMPORT 63 | ISZERO 64 | LAMBDA 65 | PRED 66 | SUCC 67 | THEN 68 | TRUE 69 | LCID(String) 70 | INTV(Int) 71 | STRINGV(String) 72 | DOT 73 | EQ 74 | LPAREN 75 | RPAREN 76 | SEMI 77 | SLASH 78 | USCORE 79 | } 80 | impl Token { 81 | kind(Self) -> TokenKind 82 | } 83 | impl Show for Token 84 | 85 | pub(all) enum TokenKind { 86 | TK_EOF 87 | TK_ELSE 88 | TK_FALSE 89 | TK_IF 90 | TK_IMPORT 91 | TK_ISZERO 92 | TK_LAMBDA 93 | TK_PRED 94 | TK_SUCC 95 | TK_THEN 96 | TK_TRUE 97 | TK_LCID 98 | TK_INTV 99 | TK_STRINGV 100 | TK_DOT 101 | TK_EQ 102 | TK_LPAREN 103 | TK_RPAREN 104 | TK_SEMI 105 | TK_SLASH 106 | TK_USCORE 107 | } 108 | impl Show for TokenKind 109 | 110 | // Type aliases 111 | pub typealias Position = Pos 112 | 113 | // Traits 114 | 115 | -------------------------------------------------------------------------------- /chap05-untyped/core.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | fn is_numerical(self : Term) -> Bool { 3 | match self { 4 | Zero(_) => true 5 | Succ(_, t1) => t1.is_numerical() 6 | _ => false 7 | } 8 | } 9 | 10 | ///| 11 | fn is_val(self : Term) -> Bool { 12 | match self { 13 | True(_) => true 14 | False(_) => true 15 | Abs(_) => true 16 | _ => self.is_numerical() 17 | } 18 | } 19 | 20 | ///| 21 | type! NoRuleApplies 22 | 23 | ///| 24 | fn eval1(self : Term, ctx : Context) -> Term!NoRuleApplies { 25 | match self { 26 | True(_) => raise NoRuleApplies 27 | False(_) => raise NoRuleApplies 28 | If(info, t1, t2, t3) => 29 | if t1.is_val() { 30 | match t1 { 31 | True(_) => t2 32 | False(_) => t3 33 | _ => raise NoRuleApplies 34 | } 35 | } else { 36 | let t1_prime = t1.eval1!(ctx) 37 | If(info, t1_prime, t2, t3) 38 | } 39 | Zero(_) => raise NoRuleApplies 40 | Succ(info, t1) => 41 | if t1.is_val() { 42 | raise NoRuleApplies 43 | } else { 44 | let t1_prime = t1.eval1!(ctx) 45 | Succ(info, t1_prime) 46 | } 47 | Pred(info, t1) => 48 | if t1.is_val() { 49 | match t1 { 50 | Zero(_) => Zero(UNKNOWN) 51 | Succ(_, t11) => 52 | if t11.is_numerical() { 53 | t11 54 | } else { 55 | raise NoRuleApplies 56 | } 57 | _ => raise NoRuleApplies 58 | } 59 | } else { 60 | let t1_prime = t1.eval1!(ctx) 61 | Pred(info, t1_prime) 62 | } 63 | IsZero(info, t1) => 64 | if t1.is_val() { 65 | match t1 { 66 | Zero(_) => True(UNKNOWN) 67 | Succ(_, t11) => 68 | if t11.is_numerical() { 69 | False(UNKNOWN) 70 | } else { 71 | raise NoRuleApplies 72 | } 73 | _ => raise NoRuleApplies 74 | } 75 | } else { 76 | let t1_prime = t1.eval1!(ctx) 77 | IsZero(info, t1_prime) 78 | } 79 | Var(_, x, _) => 80 | match ctx.get_binding(x) { 81 | TmAbb(t) => t 82 | _ => raise NoRuleApplies 83 | } 84 | Abs(_) => raise NoRuleApplies 85 | App(info, t1, t2) => 86 | if t1.is_val() { 87 | if t2.is_val() { 88 | match t1 { 89 | Abs(_, _, t12) => t12.subst_top(t2) 90 | _ => raise NoRuleApplies 91 | } 92 | } else { 93 | let t2_prime = t2.eval1!(ctx) 94 | App(info, t1, t2_prime) 95 | } 96 | } else { 97 | let t1_prime = t1.eval1!(ctx) 98 | App(info, t1_prime, t2) 99 | } 100 | } 101 | } 102 | 103 | ///| 104 | fn eval(self : Term, ctx : Context) -> Term { 105 | loop self { 106 | t => 107 | try { 108 | continue t.eval1!(ctx) 109 | } catch { 110 | NoRuleApplies => break t 111 | } 112 | } 113 | } 114 | 115 | ///| 116 | fn Binding::eval(self : Binding, ctx : Context) -> Binding { 117 | match self { 118 | Name => Name 119 | TmAbb(t) => { 120 | let t_prime = t.eval(ctx) 121 | TmAbb(t_prime) 122 | } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /chap05-untyped/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 55 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 56 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 57 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 58 | "lambda" as t => { (LAMBDA, newpos($startpos(t)), newpos($endpos(t))) } 59 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 60 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 61 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 62 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 63 | (['a'-'z' '_'] ['A'-'Z' 'a'-'z' '_' '0'-'9' '\'']*) as t => { (LCID(t), newpos($startpos(t)), newpos($endpos(t))) } 64 | "." as t => { (DOT, newpos($startpos(t)), newpos($endpos(t))) } 65 | "=" as t => { (EQ, newpos($startpos(t)), newpos($endpos(t))) } 66 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 67 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 68 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 69 | "/" as t => { (SLASH, newpos($startpos(t)), newpos($endpos(t))) } 70 | "_" as t => { (USCORE, newpos($startpos(t)), newpos($endpos(t))) } 71 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 72 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 73 | _ as t => { 74 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 75 | token(lexbuf) 76 | } 77 | } 78 | } 79 | 80 | rule comment() -> Unit { 81 | parse { 82 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 83 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 84 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 85 | [^ '\n'] => { comment(lexbuf) } 86 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 87 | } 88 | } 89 | 90 | rule string() -> (Token, Position, Position) { 91 | parse { 92 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 93 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 94 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 95 | eof as t => { 96 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 97 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 98 | } 99 | _ as t => { string_builder.write_string(t); string(lexbuf) } 100 | } 101 | } 102 | 103 | { 104 | pub fn new_lexer(input : String) -> Lexer { 105 | Lexbuf::from_string(input) 106 | } 107 | 108 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 109 | token(self._) 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /chap05-untyped/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let t_prime = t.eval(ctx) 10 | println(t_prime.to_string(ctx)) 11 | ctx 12 | } 13 | Bind(_, x, bind) => { 14 | let bind_prime = bind.eval(ctx) 15 | println("\{x} \{bind_prime.to_string(ctx)}") 16 | ctx.add_binding(x, bind_prime) 17 | } 18 | } 19 | } 20 | 21 | ///| 22 | fn process_file!(ctx : Context, fname : String) -> Context { 23 | if already_imported.contains(fname) { 24 | ctx 25 | } else { 26 | already_imported.push(fname) 27 | let cmds = parse_file!(ctx, fname) 28 | loop (ctx, cmds) { 29 | (ctx, Nil) => break ctx 30 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 31 | } 32 | } 33 | } 34 | 35 | ///| 36 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 37 | try { 38 | let code = @fs.read_file_to_string!(fname) 39 | let lexer = new_lexer(code) 40 | reset_lex(fname) 41 | let tokens = [] 42 | while true { 43 | let elem = lexer.next_token() 44 | tokens.push(elem) 45 | match elem.0 { 46 | EOF => break 47 | _ => continue 48 | } 49 | } 50 | let result = toplevel!(tokens) 51 | if has_lex_error.val { 52 | error_info!("") 53 | } else { 54 | result!(ctx).0 55 | } 56 | } catch { 57 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 58 | ErrorWithInfo(_) as e => raise e 59 | UnexpectedToken(t, (start, end), _) => 60 | if has_lex_error.val { 61 | error_info!("") 62 | } else { 63 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 64 | } 65 | _ => panic() 66 | } 67 | } 68 | 69 | ///| 70 | fn main { 71 | let argv = @sys.get_cli_args()[2:] 72 | try { 73 | if argv.length() != 1 { 74 | error_info!("you must specify exactly one input file") 75 | } else { 76 | let fname = argv[0] 77 | ignore(process_file!(Context::empty(), fname)) 78 | } 79 | } catch { 80 | ErrorWithInfo((msg, info)) => 81 | if not(msg.is_empty()) { 82 | println("Error:\{info} \{msg}") 83 | } 84 | _ => panic() 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /chap05-untyped/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap05-untyped/parser.mbty: -------------------------------------------------------------------------------- 1 | %derive Token 2 | %derive ParseError 3 | %position 4 | %start toplevel 5 | 6 | %token EOF 7 | %token ELSE "else" 8 | %token FALSE "false" 9 | %token IF "if" 10 | %token IMPORT "import" 11 | %token ISZERO "iszero" 12 | %token LAMBDA "lambda" 13 | %token PRED "pred" 14 | %token SUCC "succ" 15 | %token THEN "then" 16 | %token TRUE "true" 17 | %token LCID 18 | %token INTV 19 | %token STRINGV 20 | %token DOT "." 21 | %token EQ "=" 22 | %token LPAREN "(" 23 | %token RPAREN ")" 24 | %token SEMI ";" 25 | %token SLASH "/" 26 | %token USCORE "_" 27 | 28 | %type toplevel 29 | %type command 30 | %type binder 31 | %type term 32 | %type app_term 33 | %type atom_term 34 | 35 | %% 36 | 37 | toplevel 38 | : EOF { fn(ctx) { (Nil, ctx) } } 39 | | command ";" toplevel { 40 | fn(ctx) { 41 | let (cmd, ctx1) = $1!(ctx) 42 | let (cmds, ctx2) = $3!(ctx1) 43 | (Cons(cmd, cmds), ctx2) 44 | } 45 | } 46 | ; 47 | 48 | command 49 | : "import" STRINGV { fn(ctx) { (Import($2), ctx) } } 50 | | term { fn(ctx) { (Eval(FI($startpos, $endpos), $1!(ctx)), ctx) } } 51 | | LCID binder { fn(ctx) { (Bind(FI($startpos, $endpos), $1, $2!(ctx)), ctx.add_name($1)) } } 52 | ; 53 | 54 | binder 55 | : "/" { fn(_) { Name } } 56 | | "=" term { fn(ctx) { TmAbb($2!(ctx)) } } 57 | ; 58 | 59 | term 60 | : app_term { $1 } 61 | | "if" term "then" term "else" term { fn(ctx) { If(FI($startpos, $endpos), $2!(ctx), $4!(ctx), $6!(ctx)) } } 62 | | "lambda" LCID "." term { 63 | fn(ctx) { 64 | let ctx1 = ctx.add_name($2) 65 | Abs(FI($startpos, $endpos), $2, $4!(ctx1)) 66 | } 67 | } 68 | | "lambda" "_" "." term { 69 | fn(ctx) { 70 | let ctx1 = ctx.add_name("_") 71 | Abs(FI($startpos, $endpos), "_", $4!(ctx1)) 72 | } 73 | } 74 | ; 75 | 76 | app_term 77 | : atom_term { $1 } 78 | | "succ" atom_term { fn(ctx) { Succ(FI($startpos, $endpos), $2!(ctx)) } } 79 | | "pred" atom_term { fn(ctx) { Pred(FI($startpos, $endpos), $2!(ctx)) } } 80 | | "iszero" atom_term { fn(ctx) { IsZero(FI($startpos, $endpos), $2!(ctx)) } } 81 | | app_term atom_term { fn(ctx) { App(FI($startpos, $endpos), $1!(ctx), $2!(ctx)) } } 82 | ; 83 | 84 | atom_term 85 | : "(" term ")" { $2 } 86 | | "true" { fn(_) { True(FI($startpos, $endpos)) } } 87 | | "false" { fn(_) { False(FI($startpos, $endpos)) } } 88 | | INTV { 89 | fn(_) { 90 | let info = FI($startpos, $endpos) 91 | loop ($1, Zero(info)) { 92 | (0, acc) => break acc 93 | (n, acc) => continue (n - 1, Succ(info, acc)) 94 | } 95 | } 96 | } 97 | | LCID { fn(ctx) { Var(FI($startpos, $endpos), ctx.name_to_index!($1, FI($startpos, $endpos)), ctx.length()) } } 98 | ; 99 | -------------------------------------------------------------------------------- /chap05-untyped/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | -------------------------------------------------------------------------------- /chap05-untyped/syntax.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | enum Term { 3 | True(Info) 4 | False(Info) 5 | If(Info, Term, Term, Term) 6 | Zero(Info) 7 | Succ(Info, Term) 8 | Pred(Info, Term) 9 | IsZero(Info, Term) 10 | Var(Info, Int, Int) 11 | Abs(Info, String, Term) 12 | App(Info, Term, Term) 13 | } 14 | 15 | ///| 16 | enum Binding { 17 | Name 18 | TmAbb(Term) 19 | } 20 | 21 | ///| 22 | enum Command { 23 | Import(String) 24 | Eval(Info, Term) 25 | Bind(Info, String, Binding) 26 | } 27 | 28 | ///| 29 | type Context @immut/list.T[(String, Binding)] 30 | 31 | ///| 32 | fn Context::empty() -> Context { 33 | Nil 34 | } 35 | 36 | ///| 37 | fn Context::length(self : Context) -> Int { 38 | self._.length() 39 | } 40 | 41 | ///| 42 | fn Context::add_binding(self : Context, x : String, bind : Binding) -> Context { 43 | Cons((x, bind), self._) 44 | } 45 | 46 | ///| 47 | fn Context::add_name(self : Context, x : String) -> Context { 48 | self.add_binding(x, Name) 49 | } 50 | 51 | ///| 52 | fn Context::name_to_index!(self : Context, x : String, info : Info) -> Int { 53 | loop (0, self._) { 54 | (_, Nil) => error_info!("identifier \"\{x}\" is unbound", info~) 55 | (i, Cons((y, _), rest)) => if y == x { i } else { continue (i + 1, rest) } 56 | } 57 | } 58 | 59 | ///| 60 | fn Context::index_to_name(self : Context, x : Int) -> String { 61 | self._.unsafe_nth(x).0 62 | } 63 | 64 | ///| 65 | fn Context::is_name_bound(self : Context, x : String) -> Bool { 66 | self._.any(fn(b) { x == b.0 }) 67 | } 68 | 69 | ///| 70 | fn Context::pick_fresh_name(self : Context, x : String) -> (Context, String) { 71 | loop x { 72 | x => 73 | if self.is_name_bound(x) { 74 | continue x + "'" 75 | } else { 76 | break (self.add_name(x), x) 77 | } 78 | } 79 | } 80 | 81 | ///| 82 | fn Context::get_binding(self : Context, x : Int) -> Binding { 83 | self._.unsafe_nth(x).1.shift(x + 1) 84 | } 85 | 86 | ///| 87 | fn Term::map( 88 | self : Term, 89 | onvar : (Info, Int, Int, Int) -> Term, 90 | c : Int 91 | ) -> Term { 92 | match self { 93 | True(info) => True(info) 94 | False(info) => False(info) 95 | If(info, t1, t2, t3) => 96 | If(info, t1.map(onvar, c), t2.map(onvar, c), t3.map(onvar, c)) 97 | Zero(info) => Zero(info) 98 | Succ(info, t1) => Succ(info, t1.map(onvar, c)) 99 | Pred(info, t1) => Pred(info, t1.map(onvar, c)) 100 | IsZero(info, t1) => IsZero(info, t1.map(onvar, c)) 101 | Var(info, x, n) => onvar(info, c, x, n) 102 | Abs(info, x, t2) => Abs(info, x, t2.map(onvar, c + 1)) 103 | App(info, t1, t2) => App(info, t1.map(onvar, c), t2.map(onvar, c)) 104 | } 105 | } 106 | 107 | ///| 108 | fn Term::shift_above(self : Term, d : Int, c : Int) -> Term { 109 | self.map( 110 | fn(info, c, x, n) { 111 | if x >= c { 112 | Var(info, x + d, n + d) 113 | } else { 114 | Var(info, x, n + d) 115 | } 116 | }, 117 | c, 118 | ) 119 | } 120 | 121 | ///| 122 | fn Term::shift(self : Term, d : Int) -> Term { 123 | self.shift_above(d, 0) 124 | } 125 | 126 | ///| 127 | fn Term::subst(self : Term, j : Int, s : Term) -> Term { 128 | self.map( 129 | fn(info, c, x, n) { if x == j + c { s.shift(c) } else { Var(info, x, n) } }, 130 | 0, 131 | ) 132 | } 133 | 134 | ///| 135 | fn Term::subst_top(self : Term, s : Term) -> Term { 136 | self.subst(0, s.shift(1)).shift(-1) 137 | } 138 | 139 | ///| 140 | fn Term::to_string(self : Term, ctx : Context) -> String { 141 | let logger = StringBuilder::new() 142 | self.output(ctx, logger) 143 | logger.to_string() 144 | } 145 | 146 | ///| 147 | fn Term::output(self : Term, ctx : Context, logger : &Logger) -> Unit { 148 | match self { 149 | If(_, t1, t2, t3) => { 150 | logger.write_string("if ") 151 | t1.output(ctx, logger) 152 | logger.write_string(" then ") 153 | t2.output(ctx, logger) 154 | logger.write_string(" else") 155 | t3.output(ctx, logger) 156 | } 157 | Abs(_, x, t2) => { 158 | let (ctx1, x1) = ctx.pick_fresh_name(x) 159 | logger.write_string("lambda \{x1}. ") 160 | t2.output(ctx1, logger) 161 | } 162 | _ => self.output_app(ctx, logger) 163 | } 164 | } 165 | 166 | ///| 167 | fn Term::output_app(self : Term, ctx : Context, logger : &Logger) -> Unit { 168 | match self { 169 | Pred(_, t1) => { 170 | logger.write_string("pred ") 171 | t1.output_atom(ctx, logger) 172 | } 173 | IsZero(_, t1) => { 174 | logger.write_string("iszero ") 175 | t1.output_atom(ctx, logger) 176 | } 177 | App(_, t1, t2) => { 178 | t1.output_app(ctx, logger) 179 | logger.write_string(" ") 180 | t2.output_atom(ctx, logger) 181 | } 182 | _ => self.output_atom(ctx, logger) 183 | } 184 | } 185 | 186 | ///| 187 | fn Term::output_atom(self : Term, ctx : Context, logger : &Logger) -> Unit { 188 | match self { 189 | True(_) => logger.write_string("true") 190 | False(_) => logger.write_string("false") 191 | Zero(_) => logger.write_string("0") 192 | Succ(_, t1) => 193 | loop (t1, 1) { 194 | (Zero(_), n) => { 195 | logger.write_string("\{n}") 196 | break 197 | } 198 | (Succ(_, s), n) => continue (s, n + 1) 199 | _ => { 200 | logger.write_string("(succ ") 201 | t1.output_atom(ctx, logger) 202 | logger.write_string(")") 203 | } 204 | } 205 | Var(_, x, _) => logger.write_string(ctx.index_to_name(x)) 206 | _ => { 207 | logger.write_string("(") 208 | self.output(ctx, logger) 209 | logger.write_string(")") 210 | } 211 | } 212 | } 213 | 214 | ///| 215 | fn Binding::shift(self : Binding, d : Int) -> Binding { 216 | match self { 217 | Name => Name 218 | TmAbb(t) => TmAbb(t.shift(d)) 219 | } 220 | } 221 | 222 | ///| 223 | fn Binding::to_string(self : Binding, ctx : Context) -> String { 224 | match self { 225 | Name => "/" 226 | TmAbb(t) => "= \{t.to_string(ctx)}" 227 | } 228 | } 229 | -------------------------------------------------------------------------------- /chap05-untyped/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | 10 | x/; 11 | x; 12 | 13 | lambda x. x; 14 | (lambda x. x) (lambda x. x x); 15 | 16 | id = lambda x. x; 17 | id (id (lambda z. id z)); 18 | 19 | tru = lambda t. lambda f. t; 20 | fls = lambda t. lambda f. f; 21 | 22 | test = lambda l. lambda m. lambda n. l m n; 23 | test tru 33 44; 24 | 25 | and = lambda b. lambda c. b c fls; 26 | and tru tru; 27 | and tru fls; 28 | 29 | pair = lambda f. lambda s. lambda b. b f s; 30 | fst = lambda p. p tru; 31 | snd = lambda p. p fls; 32 | fst (pair 33 44); 33 | 34 | c0 = lambda s. lambda z. z; 35 | c1 = lambda s. lambda z. s z; 36 | c2 = lambda s. lambda z. s (s z); 37 | c3 = lambda s. lambda z. s (s (s z)); 38 | 39 | scc = lambda n. lambda s. lambda z. s (n s z); 40 | scc c1; 41 | 42 | plus = lambda m. lambda n. lambda s. lambda z. m s (n s z); 43 | 44 | times = lambda m. lambda n. m (plus n) c0; 45 | times c2 c2; 46 | 47 | iszro = lambda m. m (lambda _. fls) tru; 48 | iszro c1; 49 | iszro (times c0 c2); 50 | 51 | zz = pair c0 c0; 52 | ss = lambda p. pair (snd p) (plus c1 (snd p)); 53 | prd = lambda m. fst (m ss zz); 54 | 55 | realbool = lambda b. b true false; 56 | realbool tru; 57 | 58 | churchbool = lambda b. if b then tru else fls; 59 | 60 | realnat = lambda m. m (lambda x. succ x) 0; 61 | realnat c3; 62 | realnat (times c2 c2); 63 | 64 | /* omega = (lambda x. x x) (lambda x. x x); */ 65 | 66 | fix = lambda f. (lambda x. f (lambda y. x x y)) (lambda x. f (lambda y. x x y)); 67 | 68 | g = lambda fct. lambda n. if realbool(iszro(n)) then c1 else (times n (fct (prd n))); 69 | factorial = fix g; 70 | realnat (factorial c3); 71 | realnat (factorial (scc (scc c3))); 72 | -------------------------------------------------------------------------------- /chap08-tyarith/chap08-tyarith.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap08-tyarith 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> @list.T[Command]!ParseError 13 | 14 | // Types and methods 15 | type Command 16 | 17 | type ErrorWithInfo 18 | 19 | type Info 20 | 21 | pub(all) struct LexEngine { 22 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 23 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 24 | start_tags : Array[Int] 25 | code_blocks_n : Int 26 | } 27 | impl LexEngine { 28 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 29 | } 30 | 31 | type Lexbuf 32 | impl Lexbuf { 33 | from_string(String) -> Self 34 | } 35 | 36 | pub(all) type Lexer Lexbuf 37 | impl Lexer { 38 | next_token(Self) -> (Token, Pos, Pos) 39 | } 40 | 41 | type NoRuleApplies 42 | 43 | pub type! ParseError { 44 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 45 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 46 | } 47 | impl Show for ParseError 48 | 49 | type Pos 50 | 51 | type Term 52 | 53 | pub(all) enum Token { 54 | EOF 55 | ELSE 56 | FALSE 57 | IF 58 | IMPORT 59 | ISZERO 60 | PRED 61 | SUCC 62 | THEN 63 | TRUE 64 | INTV(Int) 65 | STRINGV(String) 66 | LPAREN 67 | RPAREN 68 | SEMI 69 | } 70 | impl Token { 71 | kind(Self) -> TokenKind 72 | } 73 | impl Show for Token 74 | 75 | pub(all) enum TokenKind { 76 | TK_EOF 77 | TK_ELSE 78 | TK_FALSE 79 | TK_IF 80 | TK_IMPORT 81 | TK_ISZERO 82 | TK_PRED 83 | TK_SUCC 84 | TK_THEN 85 | TK_TRUE 86 | TK_INTV 87 | TK_STRINGV 88 | TK_LPAREN 89 | TK_RPAREN 90 | TK_SEMI 91 | } 92 | impl Show for TokenKind 93 | 94 | type Type 95 | impl Eq for Type 96 | impl Show for Type 97 | 98 | // Type aliases 99 | pub typealias Position = Pos 100 | 101 | // Traits 102 | 103 | -------------------------------------------------------------------------------- /chap08-tyarith/core.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | fn is_numerical(self : Term) -> Bool { 3 | match self { 4 | Zero(_) => true 5 | Succ(_, t1) => t1.is_numerical() 6 | _ => false 7 | } 8 | } 9 | 10 | ///| 11 | fn is_val(self : Term) -> Bool { 12 | match self { 13 | True(_) => true 14 | False(_) => true 15 | _ => self.is_numerical() 16 | } 17 | } 18 | 19 | ///| 20 | type! NoRuleApplies 21 | 22 | ///| 23 | fn eval1(self : Term) -> Term!NoRuleApplies { 24 | match self { 25 | True(_) => raise NoRuleApplies 26 | False(_) => raise NoRuleApplies 27 | If(info, t1, t2, t3) => 28 | if t1.is_val() { 29 | match t1 { 30 | True(_) => t2 31 | False(_) => t3 32 | _ => raise NoRuleApplies 33 | } 34 | } else { 35 | let t1_prime = t1.eval1!() 36 | If(info, t1_prime, t2, t3) 37 | } 38 | Zero(_) => raise NoRuleApplies 39 | Succ(info, t1) => 40 | if t1.is_val() { 41 | raise NoRuleApplies 42 | } else { 43 | let t1_prime = t1.eval1!() 44 | Succ(info, t1_prime) 45 | } 46 | Pred(info, t1) => 47 | if t1.is_val() { 48 | match t1 { 49 | Zero(_) => Zero(UNKNOWN) 50 | Succ(_, t11) => 51 | if t11.is_numerical() { 52 | t11 53 | } else { 54 | raise NoRuleApplies 55 | } 56 | _ => raise NoRuleApplies 57 | } 58 | } else { 59 | let t1_prime = t1.eval1!() 60 | Pred(info, t1_prime) 61 | } 62 | IsZero(info, t1) => 63 | if t1.is_val() { 64 | match t1 { 65 | Zero(_) => True(UNKNOWN) 66 | Succ(_, t11) => 67 | if t11.is_numerical() { 68 | False(UNKNOWN) 69 | } else { 70 | raise NoRuleApplies 71 | } 72 | _ => raise NoRuleApplies 73 | } 74 | } else { 75 | let t1_prime = t1.eval1!() 76 | IsZero(info, t1_prime) 77 | } 78 | } 79 | } 80 | 81 | ///| 82 | fn eval(self : Term) -> Term { 83 | loop self { 84 | t => 85 | try { 86 | continue t.eval1!() 87 | } catch { 88 | NoRuleApplies => break t 89 | } 90 | } 91 | } 92 | 93 | ///| 94 | fn derive_type!(self : Term) -> Type { 95 | match self { 96 | True(_) => Bool 97 | False(_) => Bool 98 | If(info, t1, t2, t3) => 99 | if t1.derive_type!() == Bool { 100 | let ty2 = t2.derive_type!() 101 | let ty3 = t3.derive_type!() 102 | if ty2 == ty3 { 103 | ty2 104 | } else { 105 | error_info!("arms of conditional have different types", info~) 106 | } 107 | } else { 108 | error_info!("guard of conditional not a boolean", info~) 109 | } 110 | Zero(_) => Nat 111 | Succ(info, t1) => 112 | if t1.derive_type!() == Nat { 113 | Nat 114 | } else { 115 | error_info!("argument of succ is not a number", info~) 116 | } 117 | Pred(info, t1) => 118 | if t1.derive_type!() == Nat { 119 | Nat 120 | } else { 121 | error_info!("argument of pred is not a number", info~) 122 | } 123 | IsZero(info, t1) => 124 | if t1.derive_type!() == Nat { 125 | Bool 126 | } else { 127 | error_info!("argument of iszero is not a number", info~) 128 | } 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /chap08-tyarith/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 55 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 56 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 57 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 58 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 59 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 60 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 61 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 62 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 63 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 64 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 65 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 66 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 67 | _ as t => { 68 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 69 | token(lexbuf) 70 | } 71 | } 72 | } 73 | 74 | rule comment() -> Unit { 75 | parse { 76 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 77 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 78 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 79 | [^ '\n'] => { comment(lexbuf) } 80 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 81 | } 82 | } 83 | 84 | rule string() -> (Token, Position, Position) { 85 | parse { 86 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 87 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 88 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 89 | eof as t => { 90 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 91 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 92 | } 93 | _ as t => { string_builder.write_string(t); string(lexbuf) } 94 | } 95 | } 96 | 97 | { 98 | pub fn new_lexer(input : String) -> Lexer { 99 | Lexbuf::from_string(input) 100 | } 101 | 102 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 103 | token(self._) 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /chap08-tyarith/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(cmd : Command) -> Unit { 6 | match cmd { 7 | Import(fname) => process_file!(fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!() 10 | let t_prime = t.eval() 11 | println("\{t_prime.to_string()}\n : \{ty}") 12 | } 13 | } 14 | } 15 | 16 | ///| 17 | fn process_file!(fname : String) -> Unit { 18 | if already_imported.contains(fname) { 19 | () 20 | } else { 21 | already_imported.push(fname) 22 | let cmds = parse_file!(fname) 23 | loop cmds { 24 | Nil => break 25 | Cons(c, rest) => { 26 | process_command!(c) 27 | continue rest 28 | } 29 | } 30 | } 31 | } 32 | 33 | ///| 34 | fn parse_file!(fname : String) -> @immut/list.T[Command] { 35 | try { 36 | let code = @fs.read_file_to_string!(fname) 37 | let lexer = new_lexer(code) 38 | reset_lex(fname) 39 | let tokens = [] 40 | while true { 41 | let elem = lexer.next_token() 42 | tokens.push(elem) 43 | match elem.0 { 44 | EOF => break 45 | _ => continue 46 | } 47 | } 48 | let result = toplevel!(tokens) 49 | if has_lex_error.val { 50 | error_info!("") 51 | } else { 52 | result 53 | } 54 | } catch { 55 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 56 | ErrorWithInfo(_) as e => raise e 57 | UnexpectedToken(t, (start, end), _) => 58 | if has_lex_error.val { 59 | error_info!("") 60 | } else { 61 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 62 | } 63 | _ => panic() 64 | } 65 | } 66 | 67 | ///| 68 | fn main { 69 | let argv = @sys.get_cli_args()[2:] 70 | try { 71 | if argv.length() != 1 { 72 | error_info!("you must specify exactly one input file") 73 | } else { 74 | let fname = argv[0] 75 | process_file!(fname) 76 | } 77 | } catch { 78 | ErrorWithInfo((msg, info)) => 79 | if not(msg.is_empty()) { 80 | println("Error:\{info} \{msg}") 81 | } 82 | _ => panic() 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /chap08-tyarith/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap08-tyarith/parser.mbty: -------------------------------------------------------------------------------- 1 | %derive Token 2 | %derive ParseError 3 | %position 4 | %start toplevel 5 | 6 | %token EOF 7 | %token ELSE "else" 8 | %token FALSE "false" 9 | %token IF "if" 10 | %token IMPORT "import" 11 | %token ISZERO "iszero" 12 | %token PRED "pred" 13 | %token SUCC "succ" 14 | %token THEN "then" 15 | %token TRUE "true" 16 | %token INTV 17 | %token STRINGV 18 | %token LPAREN "(" 19 | %token RPAREN ")" 20 | %token SEMI ";" 21 | 22 | %type toplevel 23 | %type command 24 | %type term 25 | %type app_term 26 | %type atom_term 27 | 28 | %% 29 | 30 | toplevel 31 | : EOF { Nil } 32 | | command ";" toplevel { Cons($1, $3) } 33 | ; 34 | 35 | command 36 | : "import" STRINGV { Import($2) } 37 | | term { Eval(FI($startpos, $endpos), $1) } 38 | ; 39 | 40 | term 41 | : app_term { $1 } 42 | | "if" term "then" term "else" term { If(FI($startpos, $endpos), $2, $4, $6) } 43 | ; 44 | 45 | app_term 46 | : atom_term { $1 } 47 | | "succ" atom_term { Succ(FI($startpos, $endpos), $2) } 48 | | "pred" atom_term { Pred(FI($startpos, $endpos), $2) } 49 | | "iszero" atom_term { IsZero(FI($startpos, $endpos), $2) } 50 | ; 51 | 52 | atom_term 53 | : "(" term ")" { $2 } 54 | | "true" { True(FI($startpos, $endpos)) } 55 | | "false" { False(FI($startpos, $endpos)) } 56 | | INTV { 57 | let info = FI($startpos, $endpos) 58 | loop ($1, Zero(info)) { 59 | (0, acc) => break acc 60 | (n, acc) => continue (n - 1, Succ(info, acc)) 61 | } 62 | } 63 | ; 64 | -------------------------------------------------------------------------------- /chap08-tyarith/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = @immut/list.T[Command] 38 | 39 | ///| 40 | typealias ParseCommand = Command 41 | 42 | ///| 43 | typealias ParseTerm = Term 44 | -------------------------------------------------------------------------------- /chap08-tyarith/syntax.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | enum Type { 3 | Bool 4 | Nat 5 | } derive(Show, Eq) 6 | 7 | ///| 8 | enum Term { 9 | True(Info) 10 | False(Info) 11 | If(Info, Term, Term, Term) 12 | Zero(Info) 13 | Succ(Info, Term) 14 | Pred(Info, Term) 15 | IsZero(Info, Term) 16 | } 17 | 18 | ///| 19 | enum Command { 20 | Import(String) 21 | Eval(Info, Term) 22 | } 23 | 24 | ///| 25 | fn Term::to_string(self : Term) -> String { 26 | let logger = StringBuilder::new() 27 | self.output(logger) 28 | logger.to_string() 29 | } 30 | 31 | ///| 32 | fn Term::output(self : Term, logger : &Logger) -> Unit { 33 | match self { 34 | If(_, t1, t2, t3) => { 35 | logger.write_string("if ") 36 | t1.output(logger) 37 | logger.write_string(" then ") 38 | t2.output(logger) 39 | logger.write_string(" else") 40 | t3.output(logger) 41 | } 42 | _ => self.output_app(logger) 43 | } 44 | } 45 | 46 | ///| 47 | fn Term::output_app(self : Term, logger : &Logger) -> Unit { 48 | match self { 49 | Pred(_, t1) => { 50 | logger.write_string("pred ") 51 | t1.output_atom(logger) 52 | } 53 | IsZero(_, t1) => { 54 | logger.write_string("iszero ") 55 | t1.output_atom(logger) 56 | } 57 | _ => self.output_atom(logger) 58 | } 59 | } 60 | 61 | ///| 62 | fn Term::output_atom(self : Term, logger : &Logger) -> Unit { 63 | match self { 64 | True(_) => logger.write_string("true") 65 | False(_) => logger.write_string("false") 66 | Zero(_) => logger.write_string("0") 67 | Succ(_, t1) => 68 | loop (t1, 1) { 69 | (Zero(_), n) => { 70 | logger.write_string("\{n}") 71 | break 72 | } 73 | (Succ(_, s), n) => continue (s, n + 1) 74 | _ => { 75 | logger.write_string("(succ ") 76 | t1.output_atom(logger) 77 | logger.write_string(")") 78 | } 79 | } 80 | _ => { 81 | logger.write_string("(") 82 | self.output(logger) 83 | logger.write_string(")") 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /chap08-tyarith/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | -------------------------------------------------------------------------------- /chap09-simplebool/chap09-simplebool.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap09-simplebool 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | ELSE 60 | FALSE 61 | IF 62 | IMPORT 63 | ISZERO 64 | LAMBDA 65 | PRED 66 | SUCC 67 | THEN 68 | TRUE 69 | UBOOL 70 | UNAT 71 | LCID(String) 72 | INTV(Int) 73 | STRINGV(String) 74 | ARROW 75 | COLON 76 | DOT 77 | EQ 78 | LPAREN 79 | RPAREN 80 | SEMI 81 | USCORE 82 | } 83 | impl Token { 84 | kind(Self) -> TokenKind 85 | } 86 | impl Show for Token 87 | 88 | pub(all) enum TokenKind { 89 | TK_EOF 90 | TK_ELSE 91 | TK_FALSE 92 | TK_IF 93 | TK_IMPORT 94 | TK_ISZERO 95 | TK_LAMBDA 96 | TK_PRED 97 | TK_SUCC 98 | TK_THEN 99 | TK_TRUE 100 | TK_UBOOL 101 | TK_UNAT 102 | TK_LCID 103 | TK_INTV 104 | TK_STRINGV 105 | TK_ARROW 106 | TK_COLON 107 | TK_DOT 108 | TK_EQ 109 | TK_LPAREN 110 | TK_RPAREN 111 | TK_SEMI 112 | TK_USCORE 113 | } 114 | impl Show for TokenKind 115 | 116 | type Type 117 | impl Eq for Type 118 | 119 | // Type aliases 120 | pub typealias Position = Pos 121 | 122 | // Traits 123 | 124 | -------------------------------------------------------------------------------- /chap09-simplebool/core.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | fn is_numerical(self : Term) -> Bool { 3 | match self { 4 | Zero(_) => true 5 | Succ(_, t1) => t1.is_numerical() 6 | _ => false 7 | } 8 | } 9 | 10 | ///| 11 | fn is_val(self : Term) -> Bool { 12 | match self { 13 | True(_) => true 14 | False(_) => true 15 | Abs(_) => true 16 | _ => self.is_numerical() 17 | } 18 | } 19 | 20 | ///| 21 | type! NoRuleApplies 22 | 23 | ///| 24 | fn eval1(self : Term, ctx : Context) -> Term!NoRuleApplies { 25 | match self { 26 | True(_) => raise NoRuleApplies 27 | False(_) => raise NoRuleApplies 28 | If(info, t1, t2, t3) => 29 | if t1.is_val() { 30 | match t1 { 31 | True(_) => t2 32 | False(_) => t3 33 | _ => raise NoRuleApplies 34 | } 35 | } else { 36 | let t1_prime = t1.eval1!(ctx) 37 | If(info, t1_prime, t2, t3) 38 | } 39 | Zero(_) => raise NoRuleApplies 40 | Succ(info, t1) => 41 | if t1.is_val() { 42 | raise NoRuleApplies 43 | } else { 44 | let t1_prime = t1.eval1!(ctx) 45 | Succ(info, t1_prime) 46 | } 47 | Pred(info, t1) => 48 | if t1.is_val() { 49 | match t1 { 50 | Zero(_) => Zero(UNKNOWN) 51 | Succ(_, t11) => 52 | if t11.is_numerical() { 53 | t11 54 | } else { 55 | raise NoRuleApplies 56 | } 57 | _ => raise NoRuleApplies 58 | } 59 | } else { 60 | let t1_prime = t1.eval1!(ctx) 61 | Pred(info, t1_prime) 62 | } 63 | IsZero(info, t1) => 64 | if t1.is_val() { 65 | match t1 { 66 | Zero(_) => True(UNKNOWN) 67 | Succ(_, t11) => 68 | if t11.is_numerical() { 69 | False(UNKNOWN) 70 | } else { 71 | raise NoRuleApplies 72 | } 73 | _ => raise NoRuleApplies 74 | } 75 | } else { 76 | let t1_prime = t1.eval1!(ctx) 77 | IsZero(info, t1_prime) 78 | } 79 | Var(_, x, _) => 80 | match ctx.get_binding(x) { 81 | TmAbb(t, _) => t 82 | _ => raise NoRuleApplies 83 | } 84 | Abs(_) => raise NoRuleApplies 85 | App(info, t1, t2) => 86 | if t1.is_val() { 87 | if t2.is_val() { 88 | match t1 { 89 | Abs(_, _, _, t12) => t12.subst_top(t2) 90 | _ => raise NoRuleApplies 91 | } 92 | } else { 93 | let t2_prime = t2.eval1!(ctx) 94 | App(info, t1, t2_prime) 95 | } 96 | } else { 97 | let t1_prime = t1.eval1!(ctx) 98 | App(info, t1_prime, t2) 99 | } 100 | } 101 | } 102 | 103 | ///| 104 | fn eval(self : Term, ctx : Context) -> Term { 105 | loop self { 106 | t => 107 | try { 108 | continue t.eval1!(ctx) 109 | } catch { 110 | NoRuleApplies => break t 111 | } 112 | } 113 | } 114 | 115 | ///| 116 | fn derive_type!(self : Term, ctx : Context) -> Type { 117 | match self { 118 | True(_) => Bool 119 | False(_) => Bool 120 | If(info, t1, t2, t3) => 121 | if t1.derive_type!(ctx) == Bool { 122 | let ty2 = t2.derive_type!(ctx) 123 | let ty3 = t3.derive_type!(ctx) 124 | if ty2 == ty3 { 125 | ty2 126 | } else { 127 | error_info!("arms of conditional have different types", info~) 128 | } 129 | } else { 130 | error_info!("guard of conditional not a boolean", info~) 131 | } 132 | Zero(_) => Nat 133 | Succ(info, t1) => 134 | if t1.derive_type!(ctx) == Nat { 135 | Nat 136 | } else { 137 | error_info!("argument of succ is not a number", info~) 138 | } 139 | Pred(info, t1) => 140 | if t1.derive_type!(ctx) == Nat { 141 | Nat 142 | } else { 143 | error_info!("argument of pred is not a number", info~) 144 | } 145 | IsZero(info, t1) => 146 | if t1.derive_type!(ctx) == Nat { 147 | Bool 148 | } else { 149 | error_info!("argument of iszero is not a number", info~) 150 | } 151 | Var(_, x, _) => ctx.get_type(x) 152 | Abs(_, x, ty1, t2) => { 153 | let ctx1 = ctx.add_binding(x, Var(ty1)) 154 | let ty2 = t2.derive_type!(ctx1) 155 | Arr(ty1, ty2) 156 | } 157 | App(info, t1, t2) => { 158 | let ty1 = t1.derive_type!(ctx) 159 | let ty2 = t2.derive_type!(ctx) 160 | match ty1 { 161 | Arr(ty11, ty12) => 162 | if ty2 == ty11 { 163 | ty12 164 | } else { 165 | error_info!("parameter type mismatch", info~) 166 | } 167 | _ => error_info!("arrow type expected", info~) 168 | } 169 | } 170 | } 171 | } 172 | 173 | ///| 174 | fn Binding::eval(self : Binding, ctx : Context) -> Binding { 175 | match self { 176 | Name => Name 177 | TmAbb(t, ty_opt) => { 178 | let t_prime = t.eval(ctx) 179 | TmAbb(t_prime, ty_opt) 180 | } 181 | Var(ty) => Var(ty) 182 | } 183 | } 184 | 185 | ///| 186 | fn Binding::derive_type!(self : Binding, ctx : Context, info : Info) -> Binding { 187 | match self { 188 | Name => Name 189 | TmAbb(t, None) => { 190 | let ty = t.derive_type!(ctx) 191 | TmAbb(t, Some(ty)) 192 | } 193 | TmAbb(t, Some(ty)) => 194 | if t.derive_type!(ctx) == ty { 195 | TmAbb(t, Some(ty)) 196 | } else { 197 | error_info!("type of binding does not match declared type", info~) 198 | } 199 | Var(ty) => Var(ty) 200 | } 201 | } 202 | -------------------------------------------------------------------------------- /chap09-simplebool/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 55 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 56 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 57 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 58 | "lambda" as t => { (LAMBDA, newpos($startpos(t)), newpos($endpos(t))) } 59 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 60 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 61 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 62 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 63 | "Bool" as t => { (UBOOL, newpos($startpos(t)), newpos($endpos(t))) } 64 | "Nat" as t => { (UNAT, newpos($startpos(t)), newpos($endpos(t))) } 65 | (['a'-'z' '_'] ['A'-'Z' 'a'-'z' '_' '0'-'9' '\'']*) as t => { (LCID(t), newpos($startpos(t)), newpos($endpos(t))) } 66 | "->" as t => { (ARROW, newpos($startpos(t)), newpos($endpos(t))) } 67 | ":" as t => { (COLON, newpos($startpos(t)), newpos($endpos(t))) } 68 | "." as t => { (DOT, newpos($startpos(t)), newpos($endpos(t))) } 69 | "=" as t => { (EQ, newpos($startpos(t)), newpos($endpos(t))) } 70 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 71 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 72 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 73 | "_" as t => { (USCORE, newpos($startpos(t)), newpos($endpos(t))) } 74 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 75 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 76 | _ as t => { 77 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 78 | token(lexbuf) 79 | } 80 | } 81 | } 82 | 83 | rule comment() -> Unit { 84 | parse { 85 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 86 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 87 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 88 | [^ '\n'] => { comment(lexbuf) } 89 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 90 | } 91 | } 92 | 93 | rule string() -> (Token, Position, Position) { 94 | parse { 95 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 96 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 97 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 98 | eof as t => { 99 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 100 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 101 | } 102 | _ as t => { string_builder.write_string(t); string(lexbuf) } 103 | } 104 | } 105 | 106 | { 107 | pub fn new_lexer(input : String) -> Lexer { 108 | Lexbuf::from_string(input) 109 | } 110 | 111 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 112 | token(self._) 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /chap09-simplebool/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string()}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | } 21 | } 22 | 23 | ///| 24 | fn process_file!(ctx : Context, fname : String) -> Context { 25 | if already_imported.contains(fname) { 26 | ctx 27 | } else { 28 | already_imported.push(fname) 29 | let cmds = parse_file!(ctx, fname) 30 | loop (ctx, cmds) { 31 | (ctx, Nil) => break ctx 32 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 33 | } 34 | } 35 | } 36 | 37 | ///| 38 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 39 | try { 40 | let code = @fs.read_file_to_string!(fname) 41 | let lexer = new_lexer(code) 42 | reset_lex(fname) 43 | let tokens = [] 44 | while true { 45 | let elem = lexer.next_token() 46 | tokens.push(elem) 47 | match elem.0 { 48 | EOF => break 49 | _ => continue 50 | } 51 | } 52 | let result = toplevel!(tokens) 53 | if has_lex_error.val { 54 | error_info!("") 55 | } else { 56 | result!(ctx).0 57 | } 58 | } catch { 59 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 60 | ErrorWithInfo(_) as e => raise e 61 | UnexpectedToken(t, (start, end), _) => 62 | if has_lex_error.val { 63 | error_info!("") 64 | } else { 65 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 66 | } 67 | _ => panic() 68 | } 69 | } 70 | 71 | ///| 72 | fn main { 73 | let argv = @sys.get_cli_args()[2:] 74 | try { 75 | if argv.length() != 1 { 76 | error_info!("you must specify exactly one input file") 77 | } else { 78 | let fname = argv[0] 79 | ignore(process_file!(Context::empty(), fname)) 80 | } 81 | } catch { 82 | ErrorWithInfo((msg, info)) => 83 | if not(msg.is_empty()) { 84 | println("Error:\{info} \{msg}") 85 | } 86 | _ => panic() 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /chap09-simplebool/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap09-simplebool/parser.mbty: -------------------------------------------------------------------------------- 1 | %derive Token 2 | %derive ParseError 3 | %position 4 | %start toplevel 5 | 6 | %token EOF 7 | %token ELSE "else" 8 | %token FALSE "false" 9 | %token IF "if" 10 | %token IMPORT "import" 11 | %token ISZERO "iszero" 12 | %token LAMBDA "lambda" 13 | %token PRED "pred" 14 | %token SUCC "succ" 15 | %token THEN "then" 16 | %token TRUE "true" 17 | %token UBOOL "Bool" 18 | %token UNAT "Nat" 19 | %token LCID 20 | %token INTV 21 | %token STRINGV 22 | %token ARROW "->" 23 | %token COLON ":" 24 | %token DOT "." 25 | %token EQ "=" 26 | %token LPAREN "(" 27 | %token RPAREN ")" 28 | %token SEMI ";" 29 | %token USCORE "_" 30 | 31 | %type toplevel 32 | %type command 33 | %type binder 34 | %type term 35 | %type app_term 36 | %type atom_term 37 | %type type 38 | %type atom_type 39 | 40 | %% 41 | 42 | toplevel 43 | : EOF { fn(ctx) { (Nil, ctx) } } 44 | | command ";" toplevel { 45 | fn(ctx) { 46 | let (cmd, ctx1) = $1!(ctx) 47 | let (cmds, ctx2) = $3!(ctx1) 48 | (Cons(cmd, cmds), ctx2) 49 | } 50 | } 51 | ; 52 | 53 | command 54 | : "import" STRINGV { fn(ctx) { (Import($2), ctx) } } 55 | | term { fn(ctx) { (Eval(FI($startpos, $endpos), $1!(ctx)), ctx) } } 56 | | LCID binder { fn(ctx) { (Bind(FI($startpos, $endpos), $1, $2!(ctx)), ctx.add_name($1)) } } 57 | ; 58 | 59 | binder 60 | : "=" term { fn(ctx) { TmAbb($2!(ctx), None) } } 61 | | ":" type { fn(ctx) { Var($2(ctx)) } } 62 | ; 63 | 64 | term 65 | : app_term { $1 } 66 | | "if" term "then" term "else" term { fn(ctx) { If(FI($startpos, $endpos), $2!(ctx), $4!(ctx), $6!(ctx)) } } 67 | | "lambda" LCID ":" type "." term { 68 | fn(ctx) { 69 | let ctx1 = ctx.add_name($2) 70 | Abs(FI($startpos, $endpos), $2, $4(ctx), $6!(ctx1)) 71 | } 72 | } 73 | | "lambda" "_" ":" type "." term { 74 | fn(ctx) { 75 | let ctx1 = ctx.add_name("_") 76 | Abs(FI($startpos, $endpos), "_", $4(ctx), $6!(ctx1)) 77 | } 78 | } 79 | ; 80 | 81 | app_term 82 | : atom_term { $1 } 83 | | "succ" atom_term { fn(ctx) { Succ(FI($startpos, $endpos), $2!(ctx)) } } 84 | | "pred" atom_term { fn(ctx) { Pred(FI($startpos, $endpos), $2!(ctx)) } } 85 | | "iszero" atom_term { fn(ctx) { IsZero(FI($startpos, $endpos), $2!(ctx)) } } 86 | | app_term atom_term { fn(ctx) { App(FI($startpos, $endpos), $1!(ctx), $2!(ctx)) } } 87 | ; 88 | 89 | atom_term 90 | : "(" term ")" { $2 } 91 | | "true" { fn(_) { True(FI($startpos, $endpos)) } } 92 | | "false" { fn(_) { False(FI($startpos, $endpos)) } } 93 | | INTV { 94 | fn(_) { 95 | let info = FI($startpos, $endpos) 96 | loop ($1, Zero(info)) { 97 | (0, acc) => break acc 98 | (n, acc) => continue (n - 1, Succ(info, acc)) 99 | } 100 | } 101 | } 102 | | LCID { fn(ctx) { Var(FI($startpos, $endpos), ctx.name_to_index!($1, FI($startpos, $endpos)), ctx.length()) } } 103 | ; 104 | 105 | type 106 | : atom_type { $1 } 107 | | atom_type "->" type { fn(ctx) { Arr($1(ctx), $3(ctx)) } } 108 | ; 109 | 110 | atom_type 111 | : "(" type ")" { $2 } 112 | | "Bool" { fn(_) { Bool } } 113 | | "Nat" { fn(_) { Nat } } 114 | ; 115 | -------------------------------------------------------------------------------- /chap09-simplebool/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseType = (Context) -> Type 50 | -------------------------------------------------------------------------------- /chap09-simplebool/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | 10 | x : Bool; 11 | x; 12 | 13 | lambda x:Bool. x; 14 | (lambda x:Bool->Bool. if x false then true else false) 15 | (lambda x:Bool. if x then false else true); 16 | 17 | id = lambda x:Bool. x; 18 | id (id ((lambda z:Bool. id z) true)); 19 | 20 | tru = lambda t:Nat. lambda f:Nat. t; 21 | fls = lambda t:Nat. lambda f:Nat. f; 22 | 23 | test = lambda l:Nat->Nat->Nat. lambda m:Nat. lambda n:Nat. l m n; 24 | test tru 33 44; 25 | 26 | pair = lambda f:Nat. lambda s:Nat. lambda b:Nat->Nat->Nat. b f s; 27 | fst = lambda p:(Nat->Nat->Nat)->Nat. p tru; 28 | snd = lambda p:(Nat->Nat->Nat)->Nat. p fls; 29 | fst (pair 33 44); 30 | 31 | c0 = lambda s:Nat->Nat. lambda z:Nat. z; 32 | c1 = lambda s:Nat->Nat. lambda z:Nat. s z; 33 | c2 = lambda s:Nat->Nat. lambda z:Nat. s (s z); 34 | c3 = lambda s:Nat->Nat. lambda z:Nat. s (s (s z)); 35 | 36 | scc = lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. s (n s z); 37 | scc c1; 38 | 39 | plus = lambda m:(Nat->Nat)->Nat->Nat. lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. m s (n s z); 40 | plus c2 c2; 41 | 42 | realnat = lambda m:(Nat->Nat)->Nat->Nat. m (lambda x:Nat. succ x) 0; 43 | realnat c3; 44 | realnat (plus c2 c2); 45 | -------------------------------------------------------------------------------- /chap11-fullsimple/chap11-fullsimple.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap11-fullsimple 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | AS 60 | CASE 61 | ELSE 62 | FALSE 63 | FIX 64 | IF 65 | IMPORT 66 | IN 67 | ISZERO 68 | LAMBDA 69 | LETREC 70 | LET 71 | OF 72 | PRED 73 | SUCC 74 | THEN 75 | TIMESDOUBLE 76 | TRUE 77 | UNIT 78 | UBOOL 79 | UDOUBLE 80 | UNAT 81 | USTRING 82 | UUNIT 83 | LCID(String) 84 | UCID(String) 85 | INTV(Int) 86 | DOUBLEV(Double) 87 | STRINGV(String) 88 | DDARROW 89 | ARROW 90 | COLON 91 | COMMA 92 | DOT 93 | EQ 94 | GT 95 | LCURLY 96 | LPAREN 97 | LT 98 | RCURLY 99 | RPAREN 100 | SEMI 101 | USCORE 102 | VBAR 103 | } 104 | impl Token { 105 | kind(Self) -> TokenKind 106 | } 107 | impl Show for Token 108 | 109 | pub(all) enum TokenKind { 110 | TK_EOF 111 | TK_AS 112 | TK_CASE 113 | TK_ELSE 114 | TK_FALSE 115 | TK_FIX 116 | TK_IF 117 | TK_IMPORT 118 | TK_IN 119 | TK_ISZERO 120 | TK_LAMBDA 121 | TK_LETREC 122 | TK_LET 123 | TK_OF 124 | TK_PRED 125 | TK_SUCC 126 | TK_THEN 127 | TK_TIMESDOUBLE 128 | TK_TRUE 129 | TK_UNIT 130 | TK_UBOOL 131 | TK_UDOUBLE 132 | TK_UNAT 133 | TK_USTRING 134 | TK_UUNIT 135 | TK_LCID 136 | TK_UCID 137 | TK_INTV 138 | TK_DOUBLEV 139 | TK_STRINGV 140 | TK_DDARROW 141 | TK_ARROW 142 | TK_COLON 143 | TK_COMMA 144 | TK_DOT 145 | TK_EQ 146 | TK_GT 147 | TK_LCURLY 148 | TK_LPAREN 149 | TK_LT 150 | TK_RCURLY 151 | TK_RPAREN 152 | TK_SEMI 153 | TK_USCORE 154 | TK_VBAR 155 | } 156 | impl Show for TokenKind 157 | 158 | type Type 159 | 160 | // Type aliases 161 | pub typealias Position = Pos 162 | 163 | // Traits 164 | 165 | -------------------------------------------------------------------------------- /chap11-fullsimple/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string(ctx)}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | } 21 | } 22 | 23 | ///| 24 | fn process_file!(ctx : Context, fname : String) -> Context { 25 | if already_imported.contains(fname) { 26 | ctx 27 | } else { 28 | already_imported.push(fname) 29 | let cmds = parse_file!(ctx, fname) 30 | loop (ctx, cmds) { 31 | (ctx, Nil) => break ctx 32 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 33 | } 34 | } 35 | } 36 | 37 | ///| 38 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 39 | try { 40 | let code = @fs.read_file_to_string!(fname) 41 | let lexer = new_lexer(code) 42 | reset_lex(fname) 43 | let tokens = [] 44 | while true { 45 | let elem = lexer.next_token() 46 | tokens.push(elem) 47 | match elem.0 { 48 | EOF => break 49 | _ => continue 50 | } 51 | } 52 | let result = toplevel!(tokens) 53 | if has_lex_error.val { 54 | error_info!("") 55 | } else { 56 | result!(ctx).0 57 | } 58 | } catch { 59 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 60 | ErrorWithInfo(_) as e => raise e 61 | UnexpectedToken(t, (start, end), _) => 62 | if has_lex_error.val { 63 | error_info!("") 64 | } else { 65 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 66 | } 67 | _ => panic() 68 | } 69 | } 70 | 71 | ///| 72 | fn main { 73 | let argv = @sys.get_cli_args()[2:] 74 | try { 75 | if argv.length() != 1 { 76 | error_info!("you must specify exactly one input file") 77 | } else { 78 | let fname = argv[0] 79 | ignore(process_file!(Context::empty(), fname)) 80 | } 81 | } catch { 82 | ErrorWithInfo((msg, info)) => 83 | if not(msg.is_empty()) { 84 | println("Error:\{info} \{msg}") 85 | } 86 | _ => panic() 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /chap11-fullsimple/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap11-fullsimple/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseCases = (Context) -> @immut/list.T[(String, (String, Term))]!Error 50 | 51 | ///| 52 | typealias ParseCase = (Context) -> (String, (String, Term))!Error 53 | 54 | ///| 55 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 56 | 57 | ///| 58 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 59 | 60 | ///| 61 | typealias ParseType = (Context) -> Type!Error 62 | 63 | ///| 64 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)]!Error 65 | 66 | ///| 67 | typealias ParseLabelType = (Context, Int) -> (String, Type)!Error 68 | -------------------------------------------------------------------------------- /chap11-fullsimple/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | 10 | x : Bool; 11 | x; 12 | 13 | lambda x:Bool. x; 14 | (lambda x:Bool->Bool. if x false then true else false) 15 | (lambda x:Bool. if x then false else true); 16 | 17 | id = lambda x:Bool. x; 18 | id (id ((lambda z:Bool. id z) true)); 19 | 20 | CBool = Nat->Nat->Nat; 21 | 22 | tru = (lambda t:Nat. lambda f:Nat. t) as CBool; 23 | fls = (lambda t:Nat. lambda f:Nat. f) as CBool; 24 | 25 | test = lambda l:CBool. lambda m:Nat. lambda n:Nat. l m n; 26 | test tru 33 44; 27 | 28 | CPair = CBool->Nat; 29 | 30 | pair = lambda f:Nat. lambda s:Nat. (lambda b:CBool. b f s) as CPair; 31 | fst = lambda p:CPair. p tru; 32 | snd = lambda p:CPair. p fls; 33 | fst (pair 33 44); 34 | 35 | CNat = (Nat->Nat)->Nat->Nat; 36 | 37 | c0 = (lambda s:Nat->Nat. lambda z:Nat. z) as CNat; 38 | c1 = (lambda s:Nat->Nat. lambda z:Nat. s z) as CNat; 39 | c2 = (lambda s:Nat->Nat. lambda z:Nat. s (s z)) as CNat; 40 | c3 = (lambda s:Nat->Nat. lambda z:Nat. s (s (s z))) as CNat; 41 | 42 | scc = lambda n:CNat. (lambda s:Nat->Nat. lambda z:Nat. s (n s z)) as CNat; 43 | scc c1; 44 | 45 | plus = lambda m:CNat. lambda n:CNat. (lambda s:Nat->Nat. lambda z:Nat. m s (n s z)) as CNat; 46 | plus c2 c2; 47 | 48 | realnat = lambda m:CNat. m (lambda x:Nat. succ x) 0; 49 | realnat c3; 50 | realnat (plus c2 c2); 51 | 52 | lambda x:A. x; 53 | lambda x:B. x; 54 | lambda f:A->A. lambda x:A. f(f(x)); 55 | 56 | unit; 57 | (unit; 42); 58 | 59 | UU = Unit->Unit; 60 | (lambda f:UU. f unit) (lambda x:Unit. x); 61 | (lambda f:Unit->Unit. f) as UU->UU; 62 | 63 | {pred 4, if true then false else false}; 64 | {pred 4, if true then false else false}.1; 65 | 66 | (lambda x:{Nat, Nat}. x.2) {pred 4, pred 5}; 67 | 68 | {partno=55, cost=30}; 69 | {cost=30, partno=55}; 70 | 71 | PhysicalAddr = {firstlast:String, addr:String}; 72 | VirtualAddr = {name:String, email:String}; 73 | 74 | pa = {firstlast="AC", addr="EG"}; 75 | va = {name="AC", email="AC@edu"}; 76 | 77 | Addr = ; 78 | a = as Addr; 79 | 80 | getName = lambda a:Addr. 81 | case a of 82 | ==> x.firstlast 83 | | ==> y.name; 84 | 85 | equal = fix (lambda eq:Nat->Nat->Bool. lambda x:Nat. lambda y:Nat. 86 | if iszero(x) then 87 | if iszero(y) then true 88 | else false 89 | else 90 | if iszero(y) then false 91 | else eq (pred x) (pred y) 92 | ); 93 | 94 | OptionNat = ; 95 | Table = Nat->OptionNat; 96 | emptyTable = lambda n:Nat. as OptionNat; 97 | extendTable = 98 | lambda t:Table. lambda m:Nat. lambda v:Nat. 99 | (lambda n:Nat. 100 | if equal n m then as OptionNat 101 | else t n) as Table; 102 | 103 | Weekday = ; 104 | nextBusinessDay = lambda w:Weekday. 105 | case w of ==> as Weekday 106 | | ==> as Weekday 107 | | ==> as Weekday 108 | | ==> as Weekday 109 | | ==> as Weekday; 110 | 111 | nextBusinessDay ( as Weekday); 112 | nextBusinessDay ( as Weekday); 113 | 114 | DollarAmount = ; 115 | EuroAmount = ; 116 | 117 | dollars2euros = 118 | lambda d:DollarAmount. 119 | case d of ==> as EuroAmount; 120 | euros2dollars = 121 | lambda e:EuroAmount. 122 | case e of ==> as DollarAmount; 123 | 124 | mybankbalance = as DollarAmount; 125 | euros2dollars (dollars2euros mybankbalance); 126 | 127 | ff = 128 | lambda ie:Nat->Bool. 129 | lambda x:Nat. 130 | if iszero x then true 131 | else if iszero (pred x) then false 132 | else ie (pred (pred x)); 133 | iseven = fix ff; 134 | iseven 7; 135 | 136 | ff = 137 | lambda ieio:{iseven:Nat->Bool, isodd:Nat->Bool}. 138 | {iseven = lambda x:Nat. if iszero x then true else ieio.isodd (pred x), 139 | isodd = lambda x:Nat. if iszero x then false else ieio.iseven (pred x)}; 140 | r = fix ff; 141 | iseven = r.iseven; 142 | iseven 7; 143 | 144 | diverge = lambda _:Unit. fix (lambda x:T. x); 145 | -------------------------------------------------------------------------------- /chap13-simpleref/chap13-simpleref.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap13-simpleref 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Store 56 | 57 | type Term 58 | 59 | pub(all) enum Token { 60 | EOF 61 | ELSE 62 | FALSE 63 | FIX 64 | IF 65 | IMPORT 66 | ISZERO 67 | LAMBDA 68 | PRED 69 | REF 70 | SUCC 71 | THEN 72 | TRUE 73 | UNIT 74 | UBOOL 75 | UNAT 76 | UREF 77 | UUNIT 78 | LCID(String) 79 | INTV(Int) 80 | STRINGV(String) 81 | ARROW 82 | COLONEQ 83 | BANG 84 | COLON 85 | DOT 86 | EQ 87 | LPAREN 88 | RPAREN 89 | SEMI 90 | USCORE 91 | } 92 | impl Token { 93 | kind(Self) -> TokenKind 94 | } 95 | impl Show for Token 96 | 97 | pub(all) enum TokenKind { 98 | TK_EOF 99 | TK_ELSE 100 | TK_FALSE 101 | TK_FIX 102 | TK_IF 103 | TK_IMPORT 104 | TK_ISZERO 105 | TK_LAMBDA 106 | TK_PRED 107 | TK_REF 108 | TK_SUCC 109 | TK_THEN 110 | TK_TRUE 111 | TK_UNIT 112 | TK_UBOOL 113 | TK_UNAT 114 | TK_UREF 115 | TK_UUNIT 116 | TK_LCID 117 | TK_INTV 118 | TK_STRINGV 119 | TK_ARROW 120 | TK_COLONEQ 121 | TK_BANG 122 | TK_COLON 123 | TK_DOT 124 | TK_EQ 125 | TK_LPAREN 126 | TK_RPAREN 127 | TK_SEMI 128 | TK_USCORE 129 | } 130 | impl Show for TokenKind 131 | 132 | type Type 133 | impl Eq for Type 134 | 135 | // Type aliases 136 | pub typealias Position = Pos 137 | 138 | // Traits 139 | 140 | -------------------------------------------------------------------------------- /chap13-simpleref/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 55 | "fix" as t => { (FIX, newpos($startpos(t)), newpos($endpos(t))) } 56 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 57 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 58 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 59 | "lambda" as t => { (LAMBDA, newpos($startpos(t)), newpos($endpos(t))) } 60 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 61 | "ref" as t => { (REF, newpos($startpos(t)), newpos($endpos(t))) } 62 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 63 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 64 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 65 | "unit" as t => { (UNIT, newpos($startpos(t)), newpos($endpos(t))) } 66 | "Bool" as t => { (UBOOL, newpos($startpos(t)), newpos($endpos(t))) } 67 | "Nat" as t => { (UNAT, newpos($startpos(t)), newpos($endpos(t))) } 68 | "Ref" as t => { (UREF, newpos($startpos(t)), newpos($endpos(t))) } 69 | "Unit" as t => { (UUNIT, newpos($startpos(t)), newpos($endpos(t))) } 70 | (['a'-'z' '_'] ['A'-'Z' 'a'-'z' '_' '0'-'9' '\'']*) as t => { (LCID(t), newpos($startpos(t)), newpos($endpos(t))) } 71 | "->" as t => { (ARROW, newpos($startpos(t)), newpos($endpos(t))) } 72 | ":=" as t => { (COLONEQ, newpos($startpos(t)), newpos($endpos(t))) } 73 | "!" as t => { (BANG, newpos($startpos(t)), newpos($endpos(t))) } 74 | ":" as t => { (COLON, newpos($startpos(t)), newpos($endpos(t))) } 75 | "." as t => { (DOT, newpos($startpos(t)), newpos($endpos(t))) } 76 | "=" as t => { (EQ, newpos($startpos(t)), newpos($endpos(t))) } 77 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 78 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 79 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 80 | "_" as t => { (USCORE, newpos($startpos(t)), newpos($endpos(t))) } 81 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 82 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 83 | _ as t => { 84 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 85 | token(lexbuf) 86 | } 87 | } 88 | } 89 | 90 | rule comment() -> Unit { 91 | parse { 92 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 93 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 94 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 95 | [^ '\n'] => { comment(lexbuf) } 96 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 97 | } 98 | } 99 | 100 | rule string() -> (Token, Position, Position) { 101 | parse { 102 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 103 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 104 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 105 | eof as t => { 106 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 107 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 108 | } 109 | _ as t => { string_builder.write_string(t); string(lexbuf) } 110 | } 111 | } 112 | 113 | { 114 | pub fn new_lexer(input : String) -> Lexer { 115 | Lexbuf::from_string(input) 116 | } 117 | 118 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 119 | token(self._) 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /chap13-simpleref/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, store : Store, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, store, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx, store) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string()}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx, store) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | } 21 | } 22 | 23 | ///| 24 | fn process_file!(ctx : Context, store : Store, fname : String) -> Context { 25 | if already_imported.contains(fname) { 26 | ctx 27 | } else { 28 | already_imported.push(fname) 29 | let cmds = parse_file!(ctx, fname) 30 | loop (ctx, cmds) { 31 | (ctx, Nil) => break ctx 32 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, store, c), rest) 33 | } 34 | } 35 | } 36 | 37 | ///| 38 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 39 | try { 40 | let code = @fs.read_file_to_string!(fname) 41 | let lexer = new_lexer(code) 42 | reset_lex(fname) 43 | let tokens = [] 44 | while true { 45 | let elem = lexer.next_token() 46 | tokens.push(elem) 47 | match elem.0 { 48 | EOF => break 49 | _ => continue 50 | } 51 | } 52 | let result = toplevel!(tokens) 53 | if has_lex_error.val { 54 | error_info!("") 55 | } else { 56 | result!(ctx).0 57 | } 58 | } catch { 59 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 60 | ErrorWithInfo(_) as e => raise e 61 | UnexpectedToken(t, (start, end), _) => 62 | if has_lex_error.val { 63 | error_info!("") 64 | } else { 65 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 66 | } 67 | _ => panic() 68 | } 69 | } 70 | 71 | ///| 72 | fn main { 73 | let argv = @sys.get_cli_args()[2:] 74 | try { 75 | if argv.length() != 1 { 76 | error_info!("you must specify exactly one input file") 77 | } else { 78 | let fname = argv[0] 79 | ignore(process_file!(Context::empty(), Store::new(), fname)) 80 | } 81 | } catch { 82 | ErrorWithInfo((msg, info)) => 83 | if not(msg.is_empty()) { 84 | println("Error:\{info} \{msg}") 85 | } 86 | _ => panic() 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /chap13-simpleref/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap13-simpleref/parser.mbty: -------------------------------------------------------------------------------- 1 | %derive Token 2 | %derive ParseError 3 | %position 4 | %start toplevel 5 | 6 | %token EOF 7 | %token ELSE "else" 8 | %token FALSE "false" 9 | %token FIX "fix" 10 | %token IF "if" 11 | %token IMPORT "import" 12 | %token ISZERO "iszero" 13 | %token LAMBDA "lambda" 14 | %token PRED "pred" 15 | %token REF "ref" 16 | %token SUCC "succ" 17 | %token THEN "then" 18 | %token TRUE "true" 19 | %token UNIT "unit" 20 | %token UBOOL "Bool" 21 | %token UNAT "Nat" 22 | %token UREF "Ref" 23 | %token UUNIT "Unit" 24 | %token LCID 25 | %token INTV 26 | %token STRINGV 27 | %token ARROW "->" 28 | %token COLONEQ ":=" 29 | %token BANG "!" 30 | %token COLON ":" 31 | %token DOT "." 32 | %token EQ "=" 33 | %token LPAREN "(" 34 | %token RPAREN ")" 35 | %token SEMI ";" 36 | %token USCORE "_" 37 | 38 | %type toplevel 39 | %type command 40 | %type binder 41 | %type term 42 | %type app_term 43 | %type term_seq 44 | %type atom_term 45 | %type type 46 | %type arr_type 47 | %type atom_type 48 | 49 | %% 50 | 51 | toplevel 52 | : EOF { fn(ctx) { (Nil, ctx) } } 53 | | command ";" toplevel { 54 | fn(ctx) { 55 | let (cmd, ctx1) = $1!(ctx) 56 | let (cmds, ctx2) = $3!(ctx1) 57 | (Cons(cmd, cmds), ctx2) 58 | } 59 | } 60 | ; 61 | 62 | command 63 | : "import" STRINGV { fn(ctx) { (Import($2), ctx) } } 64 | | term { fn(ctx) { (Eval(FI($startpos, $endpos), $1!(ctx)), ctx) } } 65 | | LCID binder { fn(ctx) { (Bind(FI($startpos, $endpos), $1, $2!(ctx)), ctx.add_name($1)) } } 66 | ; 67 | 68 | binder 69 | : "=" term { fn(ctx) { TmAbb($2!(ctx), None) } } 70 | | ":" type { fn(ctx) { Var($2(ctx)) } } 71 | ; 72 | 73 | term 74 | : app_term { $1 } 75 | | "if" term "then" term "else" term { fn(ctx) { If(FI($startpos, $endpos), $2!(ctx), $4!(ctx), $6!(ctx)) } } 76 | | "lambda" LCID ":" type "." term { 77 | fn(ctx) { 78 | let ctx1 = ctx.add_name($2) 79 | Abs(FI($startpos, $endpos), $2, $4(ctx), $6!(ctx1)) 80 | } 81 | } 82 | | "lambda" "_" ":" type "." term { 83 | fn(ctx) { 84 | let ctx1 = ctx.add_name("_") 85 | Abs(FI($startpos, $endpos), "_", $4(ctx), $6!(ctx1)) 86 | } 87 | } 88 | | app_term ":=" app_term { fn(ctx) { Assign(FI($startpos, $endpos), $1!(ctx), $3!(ctx)) } } 89 | ; 90 | 91 | app_term 92 | : atom_term { $1 } 93 | | "succ" atom_term { fn(ctx) { Succ(FI($startpos, $endpos), $2!(ctx)) } } 94 | | "pred" atom_term { fn(ctx) { Pred(FI($startpos, $endpos), $2!(ctx)) } } 95 | | "iszero" atom_term { fn(ctx) { IsZero(FI($startpos, $endpos), $2!(ctx)) } } 96 | | app_term atom_term { fn(ctx) { App(FI($startpos, $endpos), $1!(ctx), $2!(ctx)) } } 97 | | "fix" atom_term { fn(ctx) { Fix(FI($startpos, $endpos), $2!(ctx)) } } 98 | | "ref" atom_term { fn(ctx) { Ref(FI($startpos, $endpos), $2!(ctx)) } } 99 | | "!" atom_term { fn(ctx) { Deref(FI($startpos, $endpos), $2!(ctx)) } } 100 | ; 101 | 102 | term_seq 103 | : term { $1 } 104 | | term ";" term_seq { 105 | fn(ctx) { 106 | let info = FI($startpos, $endpos) 107 | App(info, Abs(info, "_", Unit, $3!(ctx.add_name("_"))), $1!(ctx)) 108 | } 109 | } 110 | ; 111 | 112 | atom_term 113 | : "(" term_seq ")" { $2 } 114 | | "true" { fn(_) { True(FI($startpos, $endpos)) } } 115 | | "false" { fn(_) { False(FI($startpos, $endpos)) } } 116 | | INTV { 117 | fn(_) { 118 | let info = FI($startpos, $endpos) 119 | loop ($1, Zero(info)) { 120 | (0, acc) => break acc 121 | (n, acc) => continue (n - 1, Succ(info, acc)) 122 | } 123 | } 124 | } 125 | | LCID { fn(ctx) { Var(FI($startpos, $endpos), ctx.name_to_index!($1, FI($startpos, $endpos)), ctx.length()) } } 126 | | "unit" { fn(_) { Unit(FI($startpos, $endpos)) } } 127 | ; 128 | 129 | type 130 | : arr_type { $1 } 131 | | "Ref" atom_type { fn(ctx) { Ref($2(ctx)) } } 132 | ; 133 | 134 | arr_type 135 | : atom_type { $1 } 136 | | atom_type "->" arr_type { fn(ctx) { Arr($1(ctx), $3(ctx)) } } 137 | ; 138 | 139 | atom_type 140 | : "(" type ")" { $2 } 141 | | "Bool" { fn(_) { Bool } } 142 | | "Nat" { fn(_) { Nat } } 143 | | "Unit" { fn(_) { Unit } } 144 | ; 145 | -------------------------------------------------------------------------------- /chap13-simpleref/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseType = (Context) -> Type 50 | -------------------------------------------------------------------------------- /chap13-simpleref/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | 10 | x : Bool; 11 | x; 12 | 13 | lambda x:Bool. x; 14 | (lambda x:Bool->Bool. if x false then true else false) 15 | (lambda x:Bool. if x then false else true); 16 | 17 | id = lambda x:Bool. x; 18 | id (id ((lambda z:Bool. id z) true)); 19 | 20 | tru = lambda t:Nat. lambda f:Nat. t; 21 | fls = lambda t:Nat. lambda f:Nat. f; 22 | 23 | test = lambda l:Nat->Nat->Nat. lambda m:Nat. lambda n:Nat. l m n; 24 | test tru 33 44; 25 | 26 | pair = lambda f:Nat. lambda s:Nat. lambda b:Nat->Nat->Nat. b f s; 27 | fst = lambda p:(Nat->Nat->Nat)->Nat. p tru; 28 | snd = lambda p:(Nat->Nat->Nat)->Nat. p fls; 29 | fst (pair 33 44); 30 | 31 | c0 = lambda s:Nat->Nat. lambda z:Nat. z; 32 | c1 = lambda s:Nat->Nat. lambda z:Nat. s z; 33 | c2 = lambda s:Nat->Nat. lambda z:Nat. s (s z); 34 | c3 = lambda s:Nat->Nat. lambda z:Nat. s (s (s z)); 35 | 36 | scc = lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. s (n s z); 37 | scc c1; 38 | 39 | plus = lambda m:(Nat->Nat)->Nat->Nat. lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. m s (n s z); 40 | plus c2 c2; 41 | 42 | realnat = lambda m:(Nat->Nat)->Nat->Nat. m (lambda x:Nat. succ x) 0; 43 | realnat c3; 44 | realnat (plus c2 c2); 45 | 46 | r = ref 5; 47 | !r; 48 | r := 7; 49 | !r; 50 | 51 | (r := succ(!r); !r); 52 | (lambda _:Unit. !r) (r := succ(!r)); 53 | 54 | (r := succ(!r); r := succ(!r); r := succ(!r); r := succ(!r); !r); 55 | 56 | s = r; 57 | s := 82; 58 | !r; 59 | 60 | c = ref 0; 61 | incc = lambda x:Unit. (c := succ(!c); !c); 62 | decc = lambda x:Unit. (c := pred(!c); !c); 63 | 64 | incc unit; 65 | decc unit; 66 | 67 | equal = fix (lambda eq:Nat->Nat->Bool. lambda x:Nat. lambda y:Nat. 68 | if iszero(x) then 69 | if iszero(y) then true 70 | else false 71 | else 72 | if iszero(y) then false 73 | else eq (pred x) (pred y) 74 | ); 75 | 76 | newarray = lambda _:Unit. ref (lambda n:Nat. 0); 77 | lookup = lambda a:Ref(Nat->Nat). lambda n:Nat. (!a) n; 78 | update = 79 | lambda a:Ref(Nat->Nat). lambda m:Nat. lambda v:Nat. 80 | (lambda oldf:Nat->Nat. a := (lambda n:Nat. if equal m n then v else oldf n)) (!a); 81 | 82 | a = newarray unit; 83 | lookup a 0; 84 | lookup a 2; 85 | update a 0 5; 86 | update a 1 4; 87 | update a 2 3; 88 | lookup a 0; 89 | lookup a 1; 90 | lookup a 2; 91 | -------------------------------------------------------------------------------- /chap14-simpleerror/chap14-simpleerror.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap14-simpleerror 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | ELSE 60 | ERROR 61 | FALSE 62 | IF 63 | IMPORT 64 | ISZERO 65 | LAMBDA 66 | PRED 67 | SUCC 68 | THEN 69 | TRUE 70 | TRY 71 | WITH 72 | UBOOL 73 | UBOT 74 | UNAT 75 | UTOP 76 | LCID(String) 77 | INTV(Int) 78 | STRINGV(String) 79 | ARROW 80 | COLON 81 | DOT 82 | EQ 83 | LPAREN 84 | RPAREN 85 | SEMI 86 | USCORE 87 | } 88 | impl Token { 89 | kind(Self) -> TokenKind 90 | } 91 | impl Show for Token 92 | 93 | pub(all) enum TokenKind { 94 | TK_EOF 95 | TK_ELSE 96 | TK_ERROR 97 | TK_FALSE 98 | TK_IF 99 | TK_IMPORT 100 | TK_ISZERO 101 | TK_LAMBDA 102 | TK_PRED 103 | TK_SUCC 104 | TK_THEN 105 | TK_TRUE 106 | TK_TRY 107 | TK_WITH 108 | TK_UBOOL 109 | TK_UBOT 110 | TK_UNAT 111 | TK_UTOP 112 | TK_LCID 113 | TK_INTV 114 | TK_STRINGV 115 | TK_ARROW 116 | TK_COLON 117 | TK_DOT 118 | TK_EQ 119 | TK_LPAREN 120 | TK_RPAREN 121 | TK_SEMI 122 | TK_USCORE 123 | } 124 | impl Show for TokenKind 125 | 126 | type Type 127 | impl Eq for Type 128 | 129 | // Type aliases 130 | pub typealias Position = Pos 131 | 132 | // Traits 133 | 134 | -------------------------------------------------------------------------------- /chap14-simpleerror/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "error" as t => { (ERROR, newpos($startpos(t)), newpos($endpos(t))) } 55 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 56 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 57 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 58 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 59 | "lambda" as t => { (LAMBDA, newpos($startpos(t)), newpos($endpos(t))) } 60 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 61 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 62 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 63 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 64 | "try" as t => { (TRY, newpos($startpos(t)), newpos($endpos(t))) } 65 | "with" as t => { (WITH, newpos($startpos(t)), newpos($endpos(t))) } 66 | "Bool" as t => { (UBOOL, newpos($startpos(t)), newpos($endpos(t))) } 67 | "Bot" as t => { (UBOT, newpos($startpos(t)), newpos($endpos(t))) } 68 | "Nat" as t => { (UNAT, newpos($startpos(t)), newpos($endpos(t))) } 69 | "Top" as t => { (UTOP, newpos($startpos(t)), newpos($endpos(t))) } 70 | (['a'-'z' '_'] ['A'-'Z' 'a'-'z' '_' '0'-'9' '\'']*) as t => { (LCID(t), newpos($startpos(t)), newpos($endpos(t))) } 71 | "->" as t => { (ARROW, newpos($startpos(t)), newpos($endpos(t))) } 72 | ":" as t => { (COLON, newpos($startpos(t)), newpos($endpos(t))) } 73 | "." as t => { (DOT, newpos($startpos(t)), newpos($endpos(t))) } 74 | "=" as t => { (EQ, newpos($startpos(t)), newpos($endpos(t))) } 75 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 76 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 77 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 78 | "_" as t => { (USCORE, newpos($startpos(t)), newpos($endpos(t))) } 79 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 80 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 81 | _ as t => { 82 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 83 | token(lexbuf) 84 | } 85 | } 86 | } 87 | 88 | rule comment() -> Unit { 89 | parse { 90 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 91 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 92 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 93 | [^ '\n'] => { comment(lexbuf) } 94 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 95 | } 96 | } 97 | 98 | rule string() -> (Token, Position, Position) { 99 | parse { 100 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 101 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 102 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 103 | eof as t => { 104 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 105 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 106 | } 107 | _ as t => { string_builder.write_string(t); string(lexbuf) } 108 | } 109 | } 110 | 111 | { 112 | pub fn new_lexer(input : String) -> Lexer { 113 | Lexbuf::from_string(input) 114 | } 115 | 116 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 117 | token(self._) 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /chap14-simpleerror/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string()}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | } 21 | } 22 | 23 | ///| 24 | fn process_file!(ctx : Context, fname : String) -> Context { 25 | if already_imported.contains(fname) { 26 | ctx 27 | } else { 28 | already_imported.push(fname) 29 | let cmds = parse_file!(ctx, fname) 30 | loop (ctx, cmds) { 31 | (ctx, Nil) => break ctx 32 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 33 | } 34 | } 35 | } 36 | 37 | ///| 38 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 39 | try { 40 | let code = @fs.read_file_to_string!(fname) 41 | let lexer = new_lexer(code) 42 | reset_lex(fname) 43 | let tokens = [] 44 | while true { 45 | let elem = lexer.next_token() 46 | tokens.push(elem) 47 | match elem.0 { 48 | EOF => break 49 | _ => continue 50 | } 51 | } 52 | let result = toplevel!(tokens) 53 | if has_lex_error.val { 54 | error_info!("") 55 | } else { 56 | result!(ctx).0 57 | } 58 | } catch { 59 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 60 | ErrorWithInfo(_) as e => raise e 61 | UnexpectedToken(t, (start, end), _) => 62 | if has_lex_error.val { 63 | error_info!("") 64 | } else { 65 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 66 | } 67 | _ => panic() 68 | } 69 | } 70 | 71 | ///| 72 | fn main { 73 | let argv = @sys.get_cli_args()[2:] 74 | try { 75 | if argv.length() != 1 { 76 | error_info!("you must specify exactly one input file") 77 | } else { 78 | let fname = argv[0] 79 | ignore(process_file!(Context::empty(), fname)) 80 | } 81 | } catch { 82 | ErrorWithInfo((msg, info)) => 83 | if not(msg.is_empty()) { 84 | println("Error:\{info} \{msg}") 85 | } 86 | _ => panic() 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /chap14-simpleerror/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap14-simpleerror/parser.mbty: -------------------------------------------------------------------------------- 1 | %derive Token 2 | %derive ParseError 3 | %position 4 | %start toplevel 5 | 6 | %token EOF 7 | %token ELSE "else" 8 | %token ERROR "error" 9 | %token FALSE "false" 10 | %token IF "if" 11 | %token IMPORT "import" 12 | %token ISZERO "iszero" 13 | %token LAMBDA "lambda" 14 | %token PRED "pred" 15 | %token SUCC "succ" 16 | %token THEN "then" 17 | %token TRUE "true" 18 | %token TRY "try" 19 | %token WITH "with" 20 | %token UBOOL "Bool" 21 | %token UBOT "Bot" 22 | %token UNAT "Nat" 23 | %token UTOP "Top" 24 | %token LCID 25 | %token INTV 26 | %token STRINGV 27 | %token ARROW "->" 28 | %token COLON ":" 29 | %token DOT "." 30 | %token EQ "=" 31 | %token LPAREN "(" 32 | %token RPAREN ")" 33 | %token SEMI ";" 34 | %token USCORE "_" 35 | 36 | %type toplevel 37 | %type command 38 | %type binder 39 | %type term 40 | %type app_term 41 | %type atom_term 42 | %type type 43 | %type atom_type 44 | 45 | %% 46 | 47 | toplevel 48 | : EOF { fn(ctx) { (Nil, ctx) } } 49 | | command ";" toplevel { 50 | fn(ctx) { 51 | let (cmd, ctx1) = $1!(ctx) 52 | let (cmds, ctx2) = $3!(ctx1) 53 | (Cons(cmd, cmds), ctx2) 54 | } 55 | } 56 | ; 57 | 58 | command 59 | : "import" STRINGV { fn(ctx) { (Import($2), ctx) } } 60 | | term { fn(ctx) { (Eval(FI($startpos, $endpos), $1!(ctx)), ctx) } } 61 | | LCID binder { fn(ctx) { (Bind(FI($startpos, $endpos), $1, $2!(ctx)), ctx.add_name($1)) } } 62 | ; 63 | 64 | binder 65 | : "=" term { fn(ctx) { TmAbb($2!(ctx), None) } } 66 | | ":" type { fn(ctx) { Var($2(ctx)) } } 67 | ; 68 | 69 | term 70 | : app_term { $1 } 71 | | "if" term "then" term "else" term { fn(ctx) { If(FI($startpos, $endpos), $2!(ctx), $4!(ctx), $6!(ctx)) } } 72 | | "lambda" LCID ":" type "." term { 73 | fn(ctx) { 74 | let ctx1 = ctx.add_name($2) 75 | Abs(FI($startpos, $endpos), $2, $4(ctx), $6!(ctx1)) 76 | } 77 | } 78 | | "lambda" "_" ":" type "." term { 79 | fn(ctx) { 80 | let ctx1 = ctx.add_name("_") 81 | Abs(FI($startpos, $endpos), "_", $4(ctx), $6!(ctx1)) 82 | } 83 | } 84 | | "try" term "with" term { fn(ctx) { Try(FI($startpos, $endpos), $2!(ctx), $4!(ctx)) } } 85 | ; 86 | 87 | app_term 88 | : atom_term { $1 } 89 | | "succ" atom_term { fn(ctx) { Succ(FI($startpos, $endpos), $2!(ctx)) } } 90 | | "pred" atom_term { fn(ctx) { Pred(FI($startpos, $endpos), $2!(ctx)) } } 91 | | "iszero" atom_term { fn(ctx) { IsZero(FI($startpos, $endpos), $2!(ctx)) } } 92 | | app_term atom_term { fn(ctx) { App(FI($startpos, $endpos), $1!(ctx), $2!(ctx)) } } 93 | ; 94 | 95 | atom_term 96 | : "(" term ")" { $2 } 97 | | "true" { fn(_) { True(FI($startpos, $endpos)) } } 98 | | "false" { fn(_) { False(FI($startpos, $endpos)) } } 99 | | INTV { 100 | fn(_) { 101 | let info = FI($startpos, $endpos) 102 | loop ($1, Zero(info)) { 103 | (0, acc) => break acc 104 | (n, acc) => continue (n - 1, Succ(info, acc)) 105 | } 106 | } 107 | } 108 | | LCID { fn(ctx) { Var(FI($startpos, $endpos), ctx.name_to_index!($1, FI($startpos, $endpos)), ctx.length()) } } 109 | | "error" { fn(_) { Error(FI($startpos, $endpos)) } } 110 | ; 111 | 112 | type 113 | : atom_type { $1 } 114 | | atom_type "->" type { fn(ctx) { Arr($1(ctx), $3(ctx)) } } 115 | ; 116 | 117 | atom_type 118 | : "(" type ")" { $2 } 119 | | "Bool" { fn(_) { Bool } } 120 | | "Nat" { fn(_) { Nat } } 121 | | "Top" { fn(_) { Top } } 122 | | "Bot" { fn(_) { Bot } } 123 | ; 124 | -------------------------------------------------------------------------------- /chap14-simpleerror/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseType = (Context) -> Type 50 | -------------------------------------------------------------------------------- /chap14-simpleerror/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | 10 | x : Bool; 11 | x; 12 | 13 | lambda x:Bool. x; 14 | (lambda x:Bool->Bool. if x false then true else false) 15 | (lambda x:Bool. if x then false else true); 16 | 17 | id = lambda x:Bool. x; 18 | id (id ((lambda z:Bool. id z) true)); 19 | 20 | tru = lambda t:Nat. lambda f:Nat. t; 21 | fls = lambda t:Nat. lambda f:Nat. f; 22 | 23 | test = lambda l:Nat->Nat->Nat. lambda m:Nat. lambda n:Nat. l m n; 24 | test tru 33 44; 25 | 26 | pair = lambda f:Nat. lambda s:Nat. lambda b:Nat->Nat->Nat. b f s; 27 | fst = lambda p:(Nat->Nat->Nat)->Nat. p tru; 28 | snd = lambda p:(Nat->Nat->Nat)->Nat. p fls; 29 | fst (pair 33 44); 30 | 31 | c0 = lambda s:Nat->Nat. lambda z:Nat. z; 32 | c1 = lambda s:Nat->Nat. lambda z:Nat. s z; 33 | c2 = lambda s:Nat->Nat. lambda z:Nat. s (s z); 34 | c3 = lambda s:Nat->Nat. lambda z:Nat. s (s (s z)); 35 | 36 | scc = lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. s (n s z); 37 | scc c1; 38 | 39 | plus = lambda m:(Nat->Nat)->Nat->Nat. lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. m s (n s z); 40 | plus c2 c2; 41 | 42 | realnat = lambda m:(Nat->Nat)->Nat->Nat. m (lambda x:Nat. succ x) 0; 43 | realnat c3; 44 | realnat (plus c2 c2); 45 | 46 | (lambda x:Nat. 0) error; 47 | (lambda x:Bool. x) error; 48 | (lambda x:Bool. x) (error true); 49 | 50 | try error with 5; 51 | try error with false; 52 | 53 | try if iszero(5) then true else error with false; 54 | 55 | if true then error else 5; 56 | if false then 5 else error 666; 57 | if false then error else error; 58 | -------------------------------------------------------------------------------- /chap15-simplesub/chap15-simplesub.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap15-simplesub 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | ELSE 60 | FALSE 61 | IF 62 | IMPORT 63 | ISZERO 64 | LAMBDA 65 | PRED 66 | SUCC 67 | THEN 68 | TRUE 69 | UBOOL 70 | UNAT 71 | UTOP 72 | LCID(String) 73 | INTV(Int) 74 | STRINGV(String) 75 | ARROW 76 | COLON 77 | COMMA 78 | DOT 79 | EQ 80 | LCURLY 81 | LPAREN 82 | RCURLY 83 | RPAREN 84 | SEMI 85 | USCORE 86 | } 87 | impl Token { 88 | kind(Self) -> TokenKind 89 | } 90 | impl Show for Token 91 | 92 | pub(all) enum TokenKind { 93 | TK_EOF 94 | TK_ELSE 95 | TK_FALSE 96 | TK_IF 97 | TK_IMPORT 98 | TK_ISZERO 99 | TK_LAMBDA 100 | TK_PRED 101 | TK_SUCC 102 | TK_THEN 103 | TK_TRUE 104 | TK_UBOOL 105 | TK_UNAT 106 | TK_UTOP 107 | TK_LCID 108 | TK_INTV 109 | TK_STRINGV 110 | TK_ARROW 111 | TK_COLON 112 | TK_COMMA 113 | TK_DOT 114 | TK_EQ 115 | TK_LCURLY 116 | TK_LPAREN 117 | TK_RCURLY 118 | TK_RPAREN 119 | TK_SEMI 120 | TK_USCORE 121 | } 122 | impl Show for TokenKind 123 | 124 | type Type 125 | impl Eq for Type 126 | 127 | // Type aliases 128 | pub typealias Position = Pos 129 | 130 | // Traits 131 | 132 | -------------------------------------------------------------------------------- /chap15-simplesub/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 55 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 56 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 57 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 58 | "lambda" as t => { (LAMBDA, newpos($startpos(t)), newpos($endpos(t))) } 59 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 60 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 61 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 62 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 63 | "Bool" as t => { (UBOOL, newpos($startpos(t)), newpos($endpos(t))) } 64 | "Nat" as t => { (UNAT, newpos($startpos(t)), newpos($endpos(t))) } 65 | "Top" as t => { (UTOP, newpos($startpos(t)), newpos($endpos(t))) } 66 | (['a'-'z' '_'] ['A'-'Z' 'a'-'z' '_' '0'-'9' '\'']*) as t => { (LCID(t), newpos($startpos(t)), newpos($endpos(t))) } 67 | "->" as t => { (ARROW, newpos($startpos(t)), newpos($endpos(t))) } 68 | ":" as t => { (COLON, newpos($startpos(t)), newpos($endpos(t))) } 69 | "," as t => { (COMMA, newpos($startpos(t)), newpos($endpos(t))) } 70 | "." as t => { (DOT, newpos($startpos(t)), newpos($endpos(t))) } 71 | "=" as t => { (EQ, newpos($startpos(t)), newpos($endpos(t))) } 72 | "{" as t => { (LCURLY, newpos($startpos(t)), newpos($endpos(t))) } 73 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 74 | "}" as t => { (RCURLY, newpos($startpos(t)), newpos($endpos(t))) } 75 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 76 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 77 | "_" as t => { (USCORE, newpos($startpos(t)), newpos($endpos(t))) } 78 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 79 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 80 | _ as t => { 81 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 82 | token(lexbuf) 83 | } 84 | } 85 | } 86 | 87 | rule comment() -> Unit { 88 | parse { 89 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 90 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 91 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 92 | [^ '\n'] => { comment(lexbuf) } 93 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 94 | } 95 | } 96 | 97 | rule string() -> (Token, Position, Position) { 98 | parse { 99 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 100 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 101 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 102 | eof as t => { 103 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 104 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 105 | } 106 | _ as t => { string_builder.write_string(t); string(lexbuf) } 107 | } 108 | } 109 | 110 | { 111 | pub fn new_lexer(input : String) -> Lexer { 112 | Lexbuf::from_string(input) 113 | } 114 | 115 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 116 | token(self._) 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /chap15-simplesub/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string()}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | } 21 | } 22 | 23 | ///| 24 | fn process_file!(ctx : Context, fname : String) -> Context { 25 | if already_imported.contains(fname) { 26 | ctx 27 | } else { 28 | already_imported.push(fname) 29 | let cmds = parse_file!(ctx, fname) 30 | loop (ctx, cmds) { 31 | (ctx, Nil) => break ctx 32 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 33 | } 34 | } 35 | } 36 | 37 | ///| 38 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 39 | try { 40 | let code = @fs.read_file_to_string!(fname) 41 | let lexer = new_lexer(code) 42 | reset_lex(fname) 43 | let tokens = [] 44 | while true { 45 | let elem = lexer.next_token() 46 | tokens.push(elem) 47 | match elem.0 { 48 | EOF => break 49 | _ => continue 50 | } 51 | } 52 | let result = toplevel!(tokens) 53 | if has_lex_error.val { 54 | error_info!("") 55 | } else { 56 | result!(ctx).0 57 | } 58 | } catch { 59 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 60 | ErrorWithInfo(_) as e => raise e 61 | UnexpectedToken(t, (start, end), _) => 62 | if has_lex_error.val { 63 | error_info!("") 64 | } else { 65 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 66 | } 67 | _ => panic() 68 | } 69 | } 70 | 71 | ///| 72 | fn main { 73 | let argv = @sys.get_cli_args()[2:] 74 | try { 75 | if argv.length() != 1 { 76 | error_info!("you must specify exactly one input file") 77 | } else { 78 | let fname = argv[0] 79 | ignore(process_file!(Context::empty(), fname)) 80 | } 81 | } catch { 82 | ErrorWithInfo((msg, info)) => 83 | if not(msg.is_empty()) { 84 | println("Error:\{info} \{msg}") 85 | } 86 | _ => panic() 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /chap15-simplesub/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap15-simplesub/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 50 | 51 | ///| 52 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 53 | 54 | ///| 55 | typealias ParseType = (Context) -> Type 56 | 57 | ///| 58 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)] 59 | 60 | ///| 61 | typealias ParseLabelType = (Context, Int) -> (String, Type) 62 | -------------------------------------------------------------------------------- /chap15-simplesub/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | 10 | x : Bool; 11 | x; 12 | 13 | lambda x:Bool. x; 14 | (lambda x:Bool->Bool. if x false then true else false) 15 | (lambda x:Bool. if x then false else true); 16 | 17 | id = lambda x:Bool. x; 18 | id (id ((lambda z:Bool. id z) true)); 19 | 20 | tru = lambda t:Nat. lambda f:Nat. t; 21 | fls = lambda t:Nat. lambda f:Nat. f; 22 | 23 | test = lambda l:Nat->Nat->Nat. lambda m:Nat. lambda n:Nat. l m n; 24 | test tru 33 44; 25 | 26 | pair = lambda f:Nat. lambda s:Nat. lambda b:Nat->Nat->Nat. b f s; 27 | fst = lambda p:(Nat->Nat->Nat)->Nat. p tru; 28 | snd = lambda p:(Nat->Nat->Nat)->Nat. p fls; 29 | fst (pair 33 44); 30 | 31 | c0 = lambda s:Nat->Nat. lambda z:Nat. z; 32 | c1 = lambda s:Nat->Nat. lambda z:Nat. s z; 33 | c2 = lambda s:Nat->Nat. lambda z:Nat. s (s z); 34 | c3 = lambda s:Nat->Nat. lambda z:Nat. s (s (s z)); 35 | 36 | scc = lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. s (n s z); 37 | scc c1; 38 | 39 | plus = lambda m:(Nat->Nat)->Nat->Nat. lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. m s (n s z); 40 | plus c2 c2; 41 | 42 | realnat = lambda m:(Nat->Nat)->Nat->Nat. m (lambda x:Nat. succ x) 0; 43 | realnat c3; 44 | realnat (plus c2 c2); 45 | 46 | {pred 4, if true then false else false}; 47 | {pred 4, if true then false else false}.1; 48 | 49 | (lambda x:{Nat, Nat}. x.2) {pred 4, pred 5}; 50 | 51 | t1 = {partno=55, cost=30}; 52 | t2 = {cost=30, partno=55}; 53 | ft = lambda x:{partno:Nat, cost:Nat}. x.cost; 54 | ft(t1); 55 | ft(t2); 56 | ft({cost=123, partno=124, abc=125}); 57 | 58 | {x=true, y=false}; 59 | {x=true, y=false}.x; 60 | {true, false}; 61 | {true, false}.1; 62 | 63 | if true then {x=true,y=false,a=false} else {y=false,x={},b=false}; 64 | 65 | lambda x:Top. x; 66 | (lambda x:Top. x) (lambda x:Top. x); 67 | (lambda x:Top->Top. x) (lambda x:Top. x); 68 | 69 | (lambda r:{x:Top->Top}. r.x r.x) 70 | {x=lambda z:Top.z, y=lambda z:Top.z}; 71 | -------------------------------------------------------------------------------- /chap20-fullequirec/chap20-fullequirec.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap20-fullequirec 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | AS 60 | CASE 61 | ELSE 62 | FALSE 63 | FIX 64 | IF 65 | IMPORT 66 | ISZERO 67 | LAMBDA 68 | OF 69 | PRED 70 | SUCC 71 | THEN 72 | TIMESDOUBLE 73 | TRUE 74 | UNIT 75 | UBOOL 76 | UDOUBLE 77 | UNAT 78 | UREC 79 | USTRING 80 | UUNIT 81 | LCID(String) 82 | UCID(String) 83 | INTV(Int) 84 | DOUBLEV(Double) 85 | STRINGV(String) 86 | DDARROW 87 | ARROW 88 | COLON 89 | COMMA 90 | DOT 91 | EQ 92 | GT 93 | LCURLY 94 | LPAREN 95 | LT 96 | RCURLY 97 | RPAREN 98 | SEMI 99 | USCORE 100 | VBAR 101 | } 102 | impl Token { 103 | kind(Self) -> TokenKind 104 | } 105 | impl Show for Token 106 | 107 | pub(all) enum TokenKind { 108 | TK_EOF 109 | TK_AS 110 | TK_CASE 111 | TK_ELSE 112 | TK_FALSE 113 | TK_FIX 114 | TK_IF 115 | TK_IMPORT 116 | TK_ISZERO 117 | TK_LAMBDA 118 | TK_OF 119 | TK_PRED 120 | TK_SUCC 121 | TK_THEN 122 | TK_TIMESDOUBLE 123 | TK_TRUE 124 | TK_UNIT 125 | TK_UBOOL 126 | TK_UDOUBLE 127 | TK_UNAT 128 | TK_UREC 129 | TK_USTRING 130 | TK_UUNIT 131 | TK_LCID 132 | TK_UCID 133 | TK_INTV 134 | TK_DOUBLEV 135 | TK_STRINGV 136 | TK_DDARROW 137 | TK_ARROW 138 | TK_COLON 139 | TK_COMMA 140 | TK_DOT 141 | TK_EQ 142 | TK_GT 143 | TK_LCURLY 144 | TK_LPAREN 145 | TK_LT 146 | TK_RCURLY 147 | TK_RPAREN 148 | TK_SEMI 149 | TK_USCORE 150 | TK_VBAR 151 | } 152 | impl Show for TokenKind 153 | 154 | type Type 155 | impl Eq for Type 156 | impl Hash for Type 157 | 158 | // Type aliases 159 | pub typealias Position = Pos 160 | 161 | // Traits 162 | 163 | -------------------------------------------------------------------------------- /chap20-fullequirec/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string(ctx)}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | } 21 | } 22 | 23 | ///| 24 | fn process_file!(ctx : Context, fname : String) -> Context { 25 | if already_imported.contains(fname) { 26 | ctx 27 | } else { 28 | already_imported.push(fname) 29 | let cmds = parse_file!(ctx, fname) 30 | loop (ctx, cmds) { 31 | (ctx, Nil) => break ctx 32 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 33 | } 34 | } 35 | } 36 | 37 | ///| 38 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 39 | try { 40 | let code = @fs.read_file_to_string!(fname) 41 | let lexer = new_lexer(code) 42 | reset_lex(fname) 43 | let tokens = [] 44 | while true { 45 | let elem = lexer.next_token() 46 | tokens.push(elem) 47 | match elem.0 { 48 | EOF => break 49 | _ => continue 50 | } 51 | } 52 | let result = toplevel!(tokens) 53 | if has_lex_error.val { 54 | error_info!("") 55 | } else { 56 | result!(ctx).0 57 | } 58 | } catch { 59 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 60 | ErrorWithInfo(_) as e => raise e 61 | UnexpectedToken(t, (start, end), _) => 62 | if has_lex_error.val { 63 | error_info!("") 64 | } else { 65 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 66 | } 67 | _ => panic() 68 | } 69 | } 70 | 71 | ///| 72 | fn main { 73 | let argv = @sys.get_cli_args()[2:] 74 | try { 75 | if argv.length() != 1 { 76 | error_info!("you must specify exactly one input file") 77 | } else { 78 | let fname = argv[0] 79 | ignore(process_file!(Context::empty(), fname)) 80 | } 81 | } catch { 82 | ErrorWithInfo((msg, info)) => 83 | if not(msg.is_empty()) { 84 | println("Error:\{info} \{msg}") 85 | } 86 | _ => panic() 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /chap20-fullequirec/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap20-fullequirec/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseCases = (Context) -> @immut/list.T[(String, (String, Term))]!Error 50 | 51 | ///| 52 | typealias ParseCase = (Context) -> (String, (String, Term))!Error 53 | 54 | ///| 55 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 56 | 57 | ///| 58 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 59 | 60 | ///| 61 | typealias ParseType = (Context) -> Type!Error 62 | 63 | ///| 64 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)]!Error 65 | 66 | ///| 67 | typealias ParseLabelType = (Context, Int) -> (String, Type)!Error 68 | -------------------------------------------------------------------------------- /chap20-fullisorec/chap20-fullisorec.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap20-fullisorec 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | AS 60 | CASE 61 | ELSE 62 | FALSE 63 | FIX 64 | FOLD 65 | IF 66 | IMPORT 67 | ISZERO 68 | LAMBDA 69 | OF 70 | PRED 71 | SUCC 72 | THEN 73 | TIMESDOUBLE 74 | TRUE 75 | UNFOLD 76 | UNIT 77 | UBOOL 78 | UDOUBLE 79 | UNAT 80 | UREC 81 | USTRING 82 | UUNIT 83 | LCID(String) 84 | UCID(String) 85 | INTV(Int) 86 | DOUBLEV(Double) 87 | STRINGV(String) 88 | DDARROW 89 | ARROW 90 | COLON 91 | COMMA 92 | DOT 93 | EQ 94 | GT 95 | LCURLY 96 | LPAREN 97 | LSQUARE 98 | LT 99 | RCURLY 100 | RPAREN 101 | RSQUARE 102 | SEMI 103 | USCORE 104 | VBAR 105 | } 106 | impl Token { 107 | kind(Self) -> TokenKind 108 | } 109 | impl Show for Token 110 | 111 | pub(all) enum TokenKind { 112 | TK_EOF 113 | TK_AS 114 | TK_CASE 115 | TK_ELSE 116 | TK_FALSE 117 | TK_FIX 118 | TK_FOLD 119 | TK_IF 120 | TK_IMPORT 121 | TK_ISZERO 122 | TK_LAMBDA 123 | TK_OF 124 | TK_PRED 125 | TK_SUCC 126 | TK_THEN 127 | TK_TIMESDOUBLE 128 | TK_TRUE 129 | TK_UNFOLD 130 | TK_UNIT 131 | TK_UBOOL 132 | TK_UDOUBLE 133 | TK_UNAT 134 | TK_UREC 135 | TK_USTRING 136 | TK_UUNIT 137 | TK_LCID 138 | TK_UCID 139 | TK_INTV 140 | TK_DOUBLEV 141 | TK_STRINGV 142 | TK_DDARROW 143 | TK_ARROW 144 | TK_COLON 145 | TK_COMMA 146 | TK_DOT 147 | TK_EQ 148 | TK_GT 149 | TK_LCURLY 150 | TK_LPAREN 151 | TK_LSQUARE 152 | TK_LT 153 | TK_RCURLY 154 | TK_RPAREN 155 | TK_RSQUARE 156 | TK_SEMI 157 | TK_USCORE 158 | TK_VBAR 159 | } 160 | impl Show for TokenKind 161 | 162 | type Type 163 | impl Eq for Type 164 | impl Hash for Type 165 | 166 | // Type aliases 167 | pub typealias Position = Pos 168 | 169 | // Traits 170 | 171 | -------------------------------------------------------------------------------- /chap20-fullisorec/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string(ctx)}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | } 21 | } 22 | 23 | ///| 24 | fn process_file!(ctx : Context, fname : String) -> Context { 25 | if already_imported.contains(fname) { 26 | ctx 27 | } else { 28 | already_imported.push(fname) 29 | let cmds = parse_file!(ctx, fname) 30 | loop (ctx, cmds) { 31 | (ctx, Nil) => break ctx 32 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 33 | } 34 | } 35 | } 36 | 37 | ///| 38 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 39 | try { 40 | let code = @fs.read_file_to_string!(fname) 41 | let lexer = new_lexer(code) 42 | reset_lex(fname) 43 | let tokens = [] 44 | while true { 45 | let elem = lexer.next_token() 46 | tokens.push(elem) 47 | match elem.0 { 48 | EOF => break 49 | _ => continue 50 | } 51 | } 52 | let result = toplevel!(tokens) 53 | if has_lex_error.val { 54 | error_info!("") 55 | } else { 56 | result!(ctx).0 57 | } 58 | } catch { 59 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 60 | ErrorWithInfo(_) as e => raise e 61 | UnexpectedToken(t, (start, end), _) => 62 | if has_lex_error.val { 63 | error_info!("") 64 | } else { 65 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 66 | } 67 | _ => panic() 68 | } 69 | } 70 | 71 | ///| 72 | fn main { 73 | let argv = @sys.get_cli_args()[2:] 74 | try { 75 | if argv.length() != 1 { 76 | error_info!("you must specify exactly one input file") 77 | } else { 78 | let fname = argv[0] 79 | ignore(process_file!(Context::empty(), fname)) 80 | } 81 | } catch { 82 | ErrorWithInfo((msg, info)) => 83 | if not(msg.is_empty()) { 84 | println("Error:\{info} \{msg}") 85 | } 86 | _ => panic() 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /chap20-fullisorec/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap20-fullisorec/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseCases = (Context) -> @immut/list.T[(String, (String, Term))]!Error 50 | 51 | ///| 52 | typealias ParseCase = (Context) -> (String, (String, Term))!Error 53 | 54 | ///| 55 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 56 | 57 | ///| 58 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 59 | 60 | ///| 61 | typealias ParseType = (Context) -> Type!Error 62 | 63 | ///| 64 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)]!Error 65 | 66 | ///| 67 | typealias ParseLabelType = (Context, Int) -> (String, Type)!Error 68 | -------------------------------------------------------------------------------- /chap22-simplerecon/chap22-simplerecon.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap22-simplerecon 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Constr 20 | 21 | type Context 22 | 23 | type ErrorWithInfo 24 | 25 | type Info 26 | 27 | pub(all) struct LexEngine { 28 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 29 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 30 | start_tags : Array[Int] 31 | code_blocks_n : Int 32 | } 33 | impl LexEngine { 34 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 35 | } 36 | 37 | type Lexbuf 38 | impl Lexbuf { 39 | from_string(String) -> Self 40 | } 41 | 42 | pub(all) type Lexer Lexbuf 43 | impl Lexer { 44 | next_token(Self) -> (Token, Pos, Pos) 45 | } 46 | 47 | type NoRuleApplies 48 | 49 | pub type! ParseError { 50 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 51 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 52 | } 53 | impl Show for ParseError 54 | 55 | type Pos 56 | 57 | type Term 58 | 59 | pub(all) enum Token { 60 | EOF 61 | ELSE 62 | FALSE 63 | IF 64 | IMPORT 65 | ISZERO 66 | LAMBDA 67 | PRED 68 | SUCC 69 | THEN 70 | TRUE 71 | UBOOL 72 | UNAT 73 | LCID(String) 74 | UCID(String) 75 | INTV(Int) 76 | STRINGV(String) 77 | ARROW 78 | COLON 79 | DOT 80 | EQ 81 | LPAREN 82 | RPAREN 83 | SEMI 84 | USCORE 85 | } 86 | impl Token { 87 | kind(Self) -> TokenKind 88 | } 89 | impl Show for Token 90 | 91 | pub(all) enum TokenKind { 92 | TK_EOF 93 | TK_ELSE 94 | TK_FALSE 95 | TK_IF 96 | TK_IMPORT 97 | TK_ISZERO 98 | TK_LAMBDA 99 | TK_PRED 100 | TK_SUCC 101 | TK_THEN 102 | TK_TRUE 103 | TK_UBOOL 104 | TK_UNAT 105 | TK_LCID 106 | TK_UCID 107 | TK_INTV 108 | TK_STRINGV 109 | TK_ARROW 110 | TK_COLON 111 | TK_DOT 112 | TK_EQ 113 | TK_LPAREN 114 | TK_RPAREN 115 | TK_SEMI 116 | TK_USCORE 117 | } 118 | impl Show for TokenKind 119 | 120 | type Type 121 | impl Eq for Type 122 | 123 | type UVarGen 124 | 125 | // Type aliases 126 | pub typealias Position = Pos 127 | 128 | // Traits 129 | 130 | -------------------------------------------------------------------------------- /chap22-simplerecon/lexer.mbtx: -------------------------------------------------------------------------------- 1 | { 2 | pub(all) type Lexer Lexbuf 3 | 4 | let pos_fname : Ref[String] = Ref::new("") 5 | let pos_lnum : Ref[Int] = Ref::new(1) 6 | let pos_bol : Ref[Int] = Ref::new(0) 7 | 8 | let has_lex_error : Ref[Bool] = Ref::new(false) 9 | 10 | fn reset_lex(fname : String) -> Unit { 11 | pos_fname.val = fname 12 | pos_lnum.val = 1 13 | pos_bol.val = 0 14 | has_lex_error.val = false 15 | } 16 | 17 | fn newline(pos : Int) -> Unit { 18 | pos_lnum.val += 1 19 | pos_bol.val = pos 20 | } 21 | 22 | fn newpos(pos : Int) -> Pos { 23 | { fname: pos_fname.val, lnum: pos_lnum.val, bol: pos_bol.val, cnum: pos } 24 | } 25 | 26 | fn record_lex_error(msg : String, start : Pos, end : Pos) -> Unit { 27 | println("Error:\{FI(start, end)} \{msg}") 28 | has_lex_error.val = true 29 | } 30 | 31 | let comment_depth : Ref[Int] = Ref::new(0) 32 | 33 | let string_builder : StringBuilder = StringBuilder::new() 34 | let string_start : Ref[Int] = Ref::new(0) 35 | } 36 | 37 | rule token() -> (Token, Position, Position) { 38 | parse { 39 | ('\r'* '\n') as t => { newline($endpos(t)); token(lexbuf) } 40 | [' ' '\t']+ => { token(lexbuf) } 41 | "*/" as t => { record_lex_error("unmatched end of comment", newpos($startpos(t)), newpos($endpos(t))); token(lexbuf) } 42 | "/*" => { comment_depth.val = 1; comment(lexbuf); token(lexbuf) } 43 | ['0'-'9']+ as t => { 44 | try { 45 | (INTV(@strconv.parse_int!(t)), newpos($startpos(t)), newpos($endpos(t))) 46 | } catch { 47 | StrConvError(_) => { 48 | record_lex_error("int literal invalid", newpos($startpos(t)), newpos($endpos(t))) 49 | token(lexbuf) 50 | } 51 | } 52 | } 53 | "else" as t => { (ELSE, newpos($startpos(t)), newpos($endpos(t))) } 54 | "false" as t => { (FALSE, newpos($startpos(t)), newpos($endpos(t))) } 55 | "if" as t => { (IF, newpos($startpos(t)), newpos($endpos(t))) } 56 | "import" as t => { (IMPORT, newpos($startpos(t)), newpos($endpos(t))) } 57 | "iszero" as t => { (ISZERO, newpos($startpos(t)), newpos($endpos(t))) } 58 | "lambda" as t => { (LAMBDA, newpos($startpos(t)), newpos($endpos(t))) } 59 | "pred" as t => { (PRED, newpos($startpos(t)), newpos($endpos(t))) } 60 | "succ" as t => { (SUCC, newpos($startpos(t)), newpos($endpos(t))) } 61 | "then" as t => { (THEN, newpos($startpos(t)), newpos($endpos(t))) } 62 | "true" as t => { (TRUE, newpos($startpos(t)), newpos($endpos(t))) } 63 | "Bool" as t => { (UBOOL, newpos($startpos(t)), newpos($endpos(t))) } 64 | "Nat" as t => { (UNAT, newpos($startpos(t)), newpos($endpos(t))) } 65 | (['a'-'z' '_'] ['A'-'Z' 'a'-'z' '_' '0'-'9' '\'']*) as t => { (LCID(t), newpos($startpos(t)), newpos($endpos(t))) } 66 | (['A'-'Z'] ['A'-'Z' 'a'-'z' '_' '0'-'9' '\'']*) as t => { (UCID(t), newpos($startpos(t)), newpos($endpos(t))) } 67 | "->" as t => { (ARROW, newpos($startpos(t)), newpos($endpos(t))) } 68 | ":" as t => { (COLON, newpos($startpos(t)), newpos($endpos(t))) } 69 | "." as t => { (DOT, newpos($startpos(t)), newpos($endpos(t))) } 70 | "=" as t => { (EQ, newpos($startpos(t)), newpos($endpos(t))) } 71 | "(" as t => { (LPAREN, newpos($startpos(t)), newpos($endpos(t))) } 72 | ")" as t => { (RPAREN, newpos($startpos(t)), newpos($endpos(t))) } 73 | ";" as t => { (SEMI, newpos($startpos(t)), newpos($endpos(t))) } 74 | "_" as t => { (USCORE, newpos($startpos(t)), newpos($endpos(t))) } 75 | '"' as t => { string_builder.reset(); string_start.val = $startpos(t); string(lexbuf) } 76 | eof as t => { (EOF, newpos($startpos(t)), newpos($endpos(t))) } 77 | _ as t => { 78 | record_lex_error("unrecognized token", newpos($startpos(t)), newpos($endpos(t))) 79 | token(lexbuf) 80 | } 81 | } 82 | } 83 | 84 | rule comment() -> Unit { 85 | parse { 86 | "/*" => { comment_depth.val += 1; comment(lexbuf) } 87 | "*/" => { comment_depth.val -= 1; if comment_depth.val > 0 { comment(lexbuf) } } 88 | eof as t => { record_lex_error("comment not terminated", newpos($startpos(t)), newpos($endpos(t))) } 89 | [^ '\n'] => { comment(lexbuf) } 90 | '\n' as t => { newline($endpos(t)); comment(lexbuf) } 91 | } 92 | } 93 | 94 | rule string() -> (Token, Position, Position) { 95 | parse { 96 | '"' as t => { (STRINGV(string_builder.to_string()), newpos(string_start.val), newpos($endpos(t))) } 97 | '\\' => { string_builder.write_string("\\\\"); string(lexbuf) } 98 | '\n' as t => { string_builder.write_string("\n"); newline($endpos(t)); string(lexbuf) } 99 | eof as t => { 100 | record_lex_error("string not terminated", newpos($startpos(t)), newpos($endpos(t))) 101 | (EOF, newpos($startpos(t)), newpos($endpos(t))) 102 | } 103 | _ as t => { string_builder.write_string(t); string(lexbuf) } 104 | } 105 | } 106 | 107 | { 108 | pub fn new_lexer(input : String) -> Lexer { 109 | Lexbuf::from_string(input) 110 | } 111 | 112 | pub fn next_token(self : Lexer) -> (Token, Position, Position) { 113 | token(self._) 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /chap22-simplerecon/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!( 6 | ctx : Context, 7 | uvar_gen : UVarGen, 8 | constr : Constr, 9 | cmd : Command 10 | ) -> Context { 11 | match cmd { 12 | Import(fname) => process_file!(ctx, uvar_gen, constr, fname) 13 | Eval(info, t) => { 14 | let ty = t.recon_type!(ctx, uvar_gen, constr) 15 | constr.unify!("could not simplify constraints", info) 16 | let t_prime = t.eval(ctx) 17 | println( 18 | "\{t_prime.to_string(ctx)}\n : \{ty.subst_constr(constr).to_string()}", 19 | ) 20 | ctx 21 | } 22 | Bind(info, x, bind) => { 23 | let bind_checked = bind.recon_type!(ctx, uvar_gen, constr, info) 24 | let bind_prime = bind_checked.eval(ctx) 25 | println("\{x} \{bind_prime.to_string(ctx)}") 26 | ctx.add_binding(x, bind_prime) 27 | } 28 | } 29 | } 30 | 31 | ///| 32 | fn process_file!( 33 | ctx : Context, 34 | uvar_gen : UVarGen, 35 | constr : Constr, 36 | fname : String 37 | ) -> Context { 38 | if already_imported.contains(fname) { 39 | ctx 40 | } else { 41 | already_imported.push(fname) 42 | let cmds = parse_file!(ctx, fname) 43 | loop (ctx, cmds) { 44 | (ctx, Nil) => break ctx 45 | (ctx, Cons(c, rest)) => 46 | continue (process_command!(ctx, uvar_gen, constr, c), rest) 47 | } 48 | } 49 | } 50 | 51 | ///| 52 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 53 | try { 54 | let code = @fs.read_file_to_string!(fname) 55 | let lexer = new_lexer(code) 56 | reset_lex(fname) 57 | let tokens = [] 58 | while true { 59 | let elem = lexer.next_token() 60 | tokens.push(elem) 61 | match elem.0 { 62 | EOF => break 63 | _ => continue 64 | } 65 | } 66 | let result = toplevel!(tokens) 67 | if has_lex_error.val { 68 | error_info!("") 69 | } else { 70 | result!(ctx).0 71 | } 72 | } catch { 73 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 74 | ErrorWithInfo(_) as e => raise e 75 | UnexpectedToken(t, (start, end), _) => 76 | if has_lex_error.val { 77 | error_info!("") 78 | } else { 79 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 80 | } 81 | _ => panic() 82 | } 83 | } 84 | 85 | ///| 86 | fn main { 87 | let argv = @sys.get_cli_args()[2:] 88 | try { 89 | if argv.length() != 1 { 90 | error_info!("you must specify exactly one input file") 91 | } else { 92 | let fname = argv[0] 93 | ignore( 94 | process_file!(Context::empty(), UVarGen::new(), Constr::new(), fname), 95 | ) 96 | } 97 | } catch { 98 | ErrorWithInfo((msg, info)) => 99 | if not(msg.is_empty()) { 100 | println("Error:\{info} \{msg}") 101 | } 102 | _ => panic() 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /chap22-simplerecon/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap22-simplerecon/parser.mbty: -------------------------------------------------------------------------------- 1 | %derive Token 2 | %derive ParseError 3 | %position 4 | %start toplevel 5 | 6 | %token EOF 7 | %token ELSE "else" 8 | %token FALSE "false" 9 | %token IF "if" 10 | %token IMPORT "import" 11 | %token ISZERO "iszero" 12 | %token LAMBDA "lambda" 13 | %token PRED "pred" 14 | %token SUCC "succ" 15 | %token THEN "then" 16 | %token TRUE "true" 17 | %token UBOOL "Bool" 18 | %token UNAT "Nat" 19 | %token LCID 20 | %token UCID 21 | %token INTV 22 | %token STRINGV 23 | %token ARROW "->" 24 | %token COLON ":" 25 | %token DOT "." 26 | %token EQ "=" 27 | %token LPAREN "(" 28 | %token RPAREN ")" 29 | %token SEMI ";" 30 | %token USCORE "_" 31 | 32 | %type toplevel 33 | %type command 34 | %type binder 35 | %type term 36 | %type app_term 37 | %type atom_term 38 | %type type 39 | %type atom_type 40 | 41 | %% 42 | 43 | toplevel 44 | : EOF { fn(ctx) { (Nil, ctx) } } 45 | | command ";" toplevel { 46 | fn(ctx) { 47 | let (cmd, ctx1) = $1!(ctx) 48 | let (cmds, ctx2) = $3!(ctx1) 49 | (Cons(cmd, cmds), ctx2) 50 | } 51 | } 52 | ; 53 | 54 | command 55 | : "import" STRINGV { fn(ctx) { (Import($2), ctx) } } 56 | | term { fn(ctx) { (Eval(FI($startpos, $endpos), $1!(ctx)), ctx) } } 57 | | LCID binder { fn(ctx) { (Bind(FI($startpos, $endpos), $1, $2!(ctx)), ctx.add_name($1)) } } 58 | ; 59 | 60 | binder 61 | : "=" term { fn(ctx) { TmAbb($2!(ctx), None) } } 62 | | ":" type { fn(ctx) { Var($2(ctx)) } } 63 | ; 64 | 65 | term 66 | : app_term { $1 } 67 | | "if" term "then" term "else" term { fn(ctx) { If(FI($startpos, $endpos), $2!(ctx), $4!(ctx), $6!(ctx)) } } 68 | | "lambda" LCID ":" type "." term { 69 | fn(ctx) { 70 | let ctx1 = ctx.add_name($2) 71 | Abs(FI($startpos, $endpos), $2, Some($4(ctx)), $6!(ctx1)) 72 | } 73 | } 74 | | "lambda" "_" ":" type "." term { 75 | fn(ctx) { 76 | let ctx1 = ctx.add_name("_") 77 | Abs(FI($startpos, $endpos), "_", Some($4(ctx)), $6!(ctx1)) 78 | } 79 | } 80 | | "lambda" LCID "." term { 81 | fn(ctx) { 82 | let ctx1 = ctx.add_name($2) 83 | Abs(FI($startpos, $endpos), $2, None, $4!(ctx1)) 84 | } 85 | } 86 | | "lambda" "_" "." term { 87 | fn(ctx) { 88 | let ctx1 = ctx.add_name("_") 89 | Abs(FI($startpos, $endpos), "_", None, $4!(ctx1)) 90 | } 91 | } 92 | ; 93 | 94 | app_term 95 | : atom_term { $1 } 96 | | "succ" atom_term { fn(ctx) { Succ(FI($startpos, $endpos), $2!(ctx)) } } 97 | | "pred" atom_term { fn(ctx) { Pred(FI($startpos, $endpos), $2!(ctx)) } } 98 | | "iszero" atom_term { fn(ctx) { IsZero(FI($startpos, $endpos), $2!(ctx)) } } 99 | | app_term atom_term { fn(ctx) { App(FI($startpos, $endpos), $1!(ctx), $2!(ctx)) } } 100 | ; 101 | 102 | atom_term 103 | : "(" term ")" { $2 } 104 | | "true" { fn(_) { True(FI($startpos, $endpos)) } } 105 | | "false" { fn(_) { False(FI($startpos, $endpos)) } } 106 | | INTV { 107 | fn(_) { 108 | let info = FI($startpos, $endpos) 109 | loop ($1, Zero(info)) { 110 | (0, acc) => break acc 111 | (n, acc) => continue (n - 1, Succ(info, acc)) 112 | } 113 | } 114 | } 115 | | LCID { fn(ctx) { Var(FI($startpos, $endpos), ctx.name_to_index!($1, FI($startpos, $endpos)), ctx.length()) } } 116 | ; 117 | 118 | type 119 | : atom_type { $1 } 120 | | atom_type "->" type { fn(ctx) { Arr($1(ctx), $3(ctx)) } } 121 | ; 122 | 123 | atom_type 124 | : "(" type ")" { $2 } 125 | | "Bool" { fn(_) { Bool } } 126 | | "Nat" { fn(_) { Nat } } 127 | | UCID { fn(_) { Id($1) } } 128 | ; 129 | -------------------------------------------------------------------------------- /chap22-simplerecon/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseType = (Context) -> Type 50 | -------------------------------------------------------------------------------- /chap22-simplerecon/test.f: -------------------------------------------------------------------------------- 1 | /* Examples for testing */ 2 | 3 | true; 4 | if false then true else false; 5 | 6 | 42; 7 | succ (pred 0); 8 | iszero (pred (succ (succ 0))); 9 | 10 | x : Bool; 11 | x; 12 | 13 | lambda x:Bool. x; 14 | (lambda x:Bool->Bool. if x false then true else false) 15 | (lambda x:Bool. if x then false else true); 16 | 17 | id = lambda x:Bool. x; 18 | id (id ((lambda z:Bool. id z) true)); 19 | 20 | tru = lambda t:Nat. lambda f:Nat. t; 21 | fls = lambda t:Nat. lambda f:Nat. f; 22 | 23 | test = lambda l:Nat->Nat->Nat. lambda m:Nat. lambda n:Nat. l m n; 24 | test tru 33 44; 25 | 26 | pair = lambda f:Nat. lambda s:Nat. lambda b:Nat->Nat->Nat. b f s; 27 | fst = lambda p:(Nat->Nat->Nat)->Nat. p tru; 28 | snd = lambda p:(Nat->Nat->Nat)->Nat. p fls; 29 | fst (pair 33 44); 30 | 31 | c0 = lambda s:Nat->Nat. lambda z:Nat. z; 32 | c1 = lambda s:Nat->Nat. lambda z:Nat. s z; 33 | c2 = lambda s:Nat->Nat. lambda z:Nat. s (s z); 34 | c3 = lambda s:Nat->Nat. lambda z:Nat. s (s (s z)); 35 | 36 | scc = lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. s (n s z); 37 | scc c1; 38 | 39 | plus = lambda m:(Nat->Nat)->Nat->Nat. lambda n:(Nat->Nat)->Nat->Nat. lambda s:Nat->Nat. lambda z:Nat. m s (n s z); 40 | plus c2 c2; 41 | 42 | realnat = lambda m:(Nat->Nat)->Nat->Nat. m (lambda x:Nat. succ x) 0; 43 | realnat c3; 44 | realnat (plus c2 c2); 45 | 46 | lambda x:A. x; 47 | 48 | (lambda x:X. lambda y:X->X. y x); 49 | (lambda x:X->X. x 0) (lambda y:Nat. y); 50 | 51 | (lambda x. x 0); 52 | (lambda g. g (g (g 42))) (lambda x. 1); 53 | -------------------------------------------------------------------------------- /chap23-fullpoly/chap23-fullpoly.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap23-fullpoly 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | AS 60 | CASE 61 | ELSE 62 | FALSE 63 | FIX 64 | IF 65 | IMPORT 66 | IN 67 | ISZERO 68 | LAMBDA 69 | LET 70 | OF 71 | PRED 72 | SUCC 73 | THEN 74 | TIMESDOUBLE 75 | TRUE 76 | UNIT 77 | UALL 78 | UBOOL 79 | UDOUBLE 80 | UNAT 81 | USOME 82 | USTRING 83 | UUNIT 84 | LCID(String) 85 | UCID(String) 86 | INTV(Int) 87 | DOUBLEV(Double) 88 | STRINGV(String) 89 | DDARROW 90 | ARROW 91 | COLON 92 | COMMA 93 | DOT 94 | EQ 95 | GT 96 | LCURLY 97 | LPAREN 98 | LSQUARE 99 | LT 100 | RCURLY 101 | RPAREN 102 | RSQUARE 103 | SEMI 104 | STAR 105 | USCORE 106 | VBAR 107 | } 108 | impl Token { 109 | kind(Self) -> TokenKind 110 | } 111 | impl Show for Token 112 | 113 | pub(all) enum TokenKind { 114 | TK_EOF 115 | TK_AS 116 | TK_CASE 117 | TK_ELSE 118 | TK_FALSE 119 | TK_FIX 120 | TK_IF 121 | TK_IMPORT 122 | TK_IN 123 | TK_ISZERO 124 | TK_LAMBDA 125 | TK_LET 126 | TK_OF 127 | TK_PRED 128 | TK_SUCC 129 | TK_THEN 130 | TK_TIMESDOUBLE 131 | TK_TRUE 132 | TK_UNIT 133 | TK_UALL 134 | TK_UBOOL 135 | TK_UDOUBLE 136 | TK_UNAT 137 | TK_USOME 138 | TK_USTRING 139 | TK_UUNIT 140 | TK_LCID 141 | TK_UCID 142 | TK_INTV 143 | TK_DOUBLEV 144 | TK_STRINGV 145 | TK_DDARROW 146 | TK_ARROW 147 | TK_COLON 148 | TK_COMMA 149 | TK_DOT 150 | TK_EQ 151 | TK_GT 152 | TK_LCURLY 153 | TK_LPAREN 154 | TK_LSQUARE 155 | TK_LT 156 | TK_RCURLY 157 | TK_RPAREN 158 | TK_RSQUARE 159 | TK_SEMI 160 | TK_STAR 161 | TK_USCORE 162 | TK_VBAR 163 | } 164 | impl Show for TokenKind 165 | 166 | type Type 167 | 168 | // Type aliases 169 | pub typealias Position = Pos 170 | 171 | // Traits 172 | 173 | -------------------------------------------------------------------------------- /chap23-fullpoly/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string(ctx)}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | SomeBind(info, x1, x2, t) => { 21 | let ty = t.derive_type!(ctx) 22 | match ty.simplify(ctx) { 23 | Some(_, ty1) => { 24 | let t_prime = t.eval(ctx) 25 | let b = match t_prime { 26 | Pack(_, _, t2, _) => TmAbb(t2.shift(1), Some(ty1)) 27 | _ => Var(ty1) 28 | } 29 | println("\{x1} \{TyVar.to_string(ctx)}") 30 | let ctx1 = ctx.add_binding(x1, TyVar) 31 | println("\{x2} \{b.to_string(ctx1)}") 32 | let ctx2 = ctx1.add_binding(x2, b) 33 | ctx2 34 | } 35 | _ => error_info!("existential type expected", info~) 36 | } 37 | } 38 | } 39 | } 40 | 41 | ///| 42 | fn process_file!(ctx : Context, fname : String) -> Context { 43 | if already_imported.contains(fname) { 44 | ctx 45 | } else { 46 | already_imported.push(fname) 47 | let cmds = parse_file!(ctx, fname) 48 | loop (ctx, cmds) { 49 | (ctx, Nil) => break ctx 50 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 51 | } 52 | } 53 | } 54 | 55 | ///| 56 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 57 | try { 58 | let code = @fs.read_file_to_string!(fname) 59 | let lexer = new_lexer(code) 60 | reset_lex(fname) 61 | let tokens = [] 62 | while true { 63 | let elem = lexer.next_token() 64 | tokens.push(elem) 65 | match elem.0 { 66 | EOF => break 67 | _ => continue 68 | } 69 | } 70 | let result = toplevel!(tokens) 71 | if has_lex_error.val { 72 | error_info!("") 73 | } else { 74 | result!(ctx).0 75 | } 76 | } catch { 77 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 78 | ErrorWithInfo(_) as e => raise e 79 | UnexpectedToken(t, (start, end), _) => 80 | if has_lex_error.val { 81 | error_info!("") 82 | } else { 83 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 84 | } 85 | _ => panic() 86 | } 87 | } 88 | 89 | ///| 90 | fn main { 91 | let argv = @sys.get_cli_args()[2:] 92 | try { 93 | if argv.length() != 1 { 94 | error_info!("you must specify exactly one input file") 95 | } else { 96 | let fname = argv[0] 97 | ignore(process_file!(Context::empty(), fname)) 98 | } 99 | } catch { 100 | ErrorWithInfo((msg, info)) => 101 | if not(msg.is_empty()) { 102 | println("Error:\{info} \{msg}") 103 | } 104 | _ => panic() 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /chap23-fullpoly/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap23-fullpoly/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseCases = (Context) -> @immut/list.T[(String, (String, Term))]!Error 50 | 51 | ///| 52 | typealias ParseCase = (Context) -> (String, (String, Term))!Error 53 | 54 | ///| 55 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 56 | 57 | ///| 58 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 59 | 60 | ///| 61 | typealias ParseType = (Context) -> Type!Error 62 | 63 | ///| 64 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)]!Error 65 | 66 | ///| 67 | typealias ParseLabelType = (Context, Int) -> (String, Type)!Error 68 | -------------------------------------------------------------------------------- /chap26-fullfsub/chap26-fullfsub.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap26-fullfsub 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | pub(all) struct LexEngine { 26 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 27 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 28 | start_tags : Array[Int] 29 | code_blocks_n : Int 30 | } 31 | impl LexEngine { 32 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 33 | } 34 | 35 | type Lexbuf 36 | impl Lexbuf { 37 | from_string(String) -> Self 38 | } 39 | 40 | pub(all) type Lexer Lexbuf 41 | impl Lexer { 42 | next_token(Self) -> (Token, Pos, Pos) 43 | } 44 | 45 | type NoRuleApplies 46 | 47 | pub type! ParseError { 48 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 49 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 50 | } 51 | impl Show for ParseError 52 | 53 | type Pos 54 | 55 | type Term 56 | 57 | pub(all) enum Token { 58 | EOF 59 | AS 60 | CASE 61 | ELSE 62 | FALSE 63 | FIX 64 | IF 65 | IMPORT 66 | IN 67 | ISZERO 68 | LAMBDA 69 | LET 70 | OF 71 | PRED 72 | SUCC 73 | THEN 74 | TIMESDOUBLE 75 | TRUE 76 | UNIT 77 | UALL 78 | UBOOL 79 | UDOUBLE 80 | UNAT 81 | USOME 82 | USTRING 83 | UTOP 84 | UUNIT 85 | LCID(String) 86 | UCID(String) 87 | INTV(Int) 88 | DOUBLEV(Double) 89 | STRINGV(String) 90 | DDARROW 91 | ARROW 92 | LEQ 93 | COLON 94 | COMMA 95 | DOT 96 | EQ 97 | GT 98 | LCURLY 99 | LPAREN 100 | LSQUARE 101 | LT 102 | RCURLY 103 | RPAREN 104 | RSQUARE 105 | SEMI 106 | STAR 107 | USCORE 108 | VBAR 109 | } 110 | impl Token { 111 | kind(Self) -> TokenKind 112 | } 113 | impl Show for Token 114 | 115 | pub(all) enum TokenKind { 116 | TK_EOF 117 | TK_AS 118 | TK_CASE 119 | TK_ELSE 120 | TK_FALSE 121 | TK_FIX 122 | TK_IF 123 | TK_IMPORT 124 | TK_IN 125 | TK_ISZERO 126 | TK_LAMBDA 127 | TK_LET 128 | TK_OF 129 | TK_PRED 130 | TK_SUCC 131 | TK_THEN 132 | TK_TIMESDOUBLE 133 | TK_TRUE 134 | TK_UNIT 135 | TK_UALL 136 | TK_UBOOL 137 | TK_UDOUBLE 138 | TK_UNAT 139 | TK_USOME 140 | TK_USTRING 141 | TK_UTOP 142 | TK_UUNIT 143 | TK_LCID 144 | TK_UCID 145 | TK_INTV 146 | TK_DOUBLEV 147 | TK_STRINGV 148 | TK_DDARROW 149 | TK_ARROW 150 | TK_LEQ 151 | TK_COLON 152 | TK_COMMA 153 | TK_DOT 154 | TK_EQ 155 | TK_GT 156 | TK_LCURLY 157 | TK_LPAREN 158 | TK_LSQUARE 159 | TK_LT 160 | TK_RCURLY 161 | TK_RPAREN 162 | TK_RSQUARE 163 | TK_SEMI 164 | TK_STAR 165 | TK_USCORE 166 | TK_VBAR 167 | } 168 | impl Show for TokenKind 169 | 170 | type Type 171 | 172 | // Type aliases 173 | pub typealias Position = Pos 174 | 175 | // Traits 176 | 177 | -------------------------------------------------------------------------------- /chap26-fullfsub/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string(ctx)}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | SomeBind(info, x1, x2, t) => { 21 | let ty = t.derive_type!(ctx) 22 | match ty.simplify(ctx) { 23 | Some(_, ty1, ty2) => { 24 | let t_prime = t.eval(ctx) 25 | let b = match t_prime { 26 | Pack(_, _, t2, _) => TmAbb(t2.shift(1), Some(ty2)) 27 | _ => Var(ty2) 28 | } 29 | println("\{x1} \{TyVar(ty1).to_string(ctx)}") 30 | let ctx1 = ctx.add_binding(x1, TyVar(ty1)) 31 | println("\{x2} \{b.to_string(ctx1)}") 32 | let ctx2 = ctx1.add_binding(x2, b) 33 | ctx2 34 | } 35 | _ => error_info!("existential type expected", info~) 36 | } 37 | } 38 | } 39 | } 40 | 41 | ///| 42 | fn process_file!(ctx : Context, fname : String) -> Context { 43 | if already_imported.contains(fname) { 44 | ctx 45 | } else { 46 | already_imported.push(fname) 47 | let cmds = parse_file!(ctx, fname) 48 | loop (ctx, cmds) { 49 | (ctx, Nil) => break ctx 50 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 51 | } 52 | } 53 | } 54 | 55 | ///| 56 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 57 | try { 58 | let code = @fs.read_file_to_string!(fname) 59 | let lexer = new_lexer(code) 60 | reset_lex(fname) 61 | let tokens = [] 62 | while true { 63 | let elem = lexer.next_token() 64 | tokens.push(elem) 65 | match elem.0 { 66 | EOF => break 67 | _ => continue 68 | } 69 | } 70 | let result = toplevel!(tokens) 71 | if has_lex_error.val { 72 | error_info!("") 73 | } else { 74 | result!(ctx).0 75 | } 76 | } catch { 77 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 78 | ErrorWithInfo(_) as e => raise e 79 | UnexpectedToken(t, (start, end), _) => 80 | if has_lex_error.val { 81 | error_info!("") 82 | } else { 83 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 84 | } 85 | _ => panic() 86 | } 87 | } 88 | 89 | ///| 90 | fn main { 91 | let argv = @sys.get_cli_args()[2:] 92 | try { 93 | if argv.length() != 1 { 94 | error_info!("you must specify exactly one input file") 95 | } else { 96 | let fname = argv[0] 97 | ignore(process_file!(Context::empty(), fname)) 98 | } 99 | } catch { 100 | ErrorWithInfo((msg, info)) => 101 | if not(msg.is_empty()) { 102 | println("Error:\{info} \{msg}") 103 | } 104 | _ => panic() 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /chap26-fullfsub/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap26-fullfsub/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseCases = (Context) -> @immut/list.T[(String, (String, Term))]!Error 50 | 51 | ///| 52 | typealias ParseCase = (Context) -> (String, (String, Term))!Error 53 | 54 | ///| 55 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 56 | 57 | ///| 58 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 59 | 60 | ///| 61 | typealias ParseType = (Context) -> Type!Error 62 | 63 | ///| 64 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)]!Error 65 | 66 | ///| 67 | typealias ParseLabelType = (Context, Int) -> (String, Type)!Error 68 | -------------------------------------------------------------------------------- /chap29-fullomega/chap29-fullomega.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap29-fullomega 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | type Kind 26 | impl Eq for Kind 27 | 28 | pub(all) struct LexEngine { 29 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 30 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 31 | start_tags : Array[Int] 32 | code_blocks_n : Int 33 | } 34 | impl LexEngine { 35 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 36 | } 37 | 38 | type Lexbuf 39 | impl Lexbuf { 40 | from_string(String) -> Self 41 | } 42 | 43 | pub(all) type Lexer Lexbuf 44 | impl Lexer { 45 | next_token(Self) -> (Token, Pos, Pos) 46 | } 47 | 48 | type NoRuleApplies 49 | 50 | pub type! ParseError { 51 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 52 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 53 | } 54 | impl Show for ParseError 55 | 56 | type Pos 57 | 58 | type Term 59 | 60 | pub(all) enum Token { 61 | EOF 62 | AS 63 | CASE 64 | ELSE 65 | FALSE 66 | FIX 67 | IF 68 | IMPORT 69 | IN 70 | ISZERO 71 | LAMBDA 72 | LET 73 | OF 74 | PRED 75 | SUCC 76 | THEN 77 | TIMESDOUBLE 78 | TRUE 79 | UNIT 80 | UALL 81 | UBOOL 82 | UDOUBLE 83 | UNAT 84 | USOME 85 | USTRING 86 | UUNIT 87 | LCID(String) 88 | UCID(String) 89 | INTV(Int) 90 | DOUBLEV(Double) 91 | STRINGV(String) 92 | DDARROW 93 | ARROW 94 | DARROW 95 | COLONCOLON 96 | COLON 97 | COMMA 98 | DOT 99 | EQ 100 | GT 101 | LCURLY 102 | LPAREN 103 | LSQUARE 104 | LT 105 | RCURLY 106 | RPAREN 107 | RSQUARE 108 | SEMI 109 | STAR 110 | USCORE 111 | VBAR 112 | } 113 | impl Token { 114 | kind(Self) -> TokenKind 115 | } 116 | impl Show for Token 117 | 118 | pub(all) enum TokenKind { 119 | TK_EOF 120 | TK_AS 121 | TK_CASE 122 | TK_ELSE 123 | TK_FALSE 124 | TK_FIX 125 | TK_IF 126 | TK_IMPORT 127 | TK_IN 128 | TK_ISZERO 129 | TK_LAMBDA 130 | TK_LET 131 | TK_OF 132 | TK_PRED 133 | TK_SUCC 134 | TK_THEN 135 | TK_TIMESDOUBLE 136 | TK_TRUE 137 | TK_UNIT 138 | TK_UALL 139 | TK_UBOOL 140 | TK_UDOUBLE 141 | TK_UNAT 142 | TK_USOME 143 | TK_USTRING 144 | TK_UUNIT 145 | TK_LCID 146 | TK_UCID 147 | TK_INTV 148 | TK_DOUBLEV 149 | TK_STRINGV 150 | TK_DDARROW 151 | TK_ARROW 152 | TK_DARROW 153 | TK_COLONCOLON 154 | TK_COLON 155 | TK_COMMA 156 | TK_DOT 157 | TK_EQ 158 | TK_GT 159 | TK_LCURLY 160 | TK_LPAREN 161 | TK_LSQUARE 162 | TK_LT 163 | TK_RCURLY 164 | TK_RPAREN 165 | TK_RSQUARE 166 | TK_SEMI 167 | TK_STAR 168 | TK_USCORE 169 | TK_VBAR 170 | } 171 | impl Show for TokenKind 172 | 173 | type Type 174 | 175 | // Type aliases 176 | pub typealias Position = Pos 177 | 178 | // Traits 179 | 180 | -------------------------------------------------------------------------------- /chap29-fullomega/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string(ctx)}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | SomeBind(info, x1, x2, t) => { 21 | let ty = t.derive_type!(ctx) 22 | match ty.simplify(ctx) { 23 | Some(_, kd1, ty2) => { 24 | let t_prime = t.eval(ctx) 25 | let b = match t_prime { 26 | Pack(_, _, t2, _) => TmAbb(t2.shift(1), Some(ty2)) 27 | _ => Var(ty2) 28 | } 29 | println("\{x1} \{TyVar(kd1).to_string(ctx)}") 30 | let ctx1 = ctx.add_binding(x1, TyVar(kd1)) 31 | println("\{x2} \{b.to_string(ctx1)}") 32 | let ctx2 = ctx1.add_binding(x2, b) 33 | ctx2 34 | } 35 | _ => error_info!("existential type expected", info~) 36 | } 37 | } 38 | } 39 | } 40 | 41 | ///| 42 | fn process_file!(ctx : Context, fname : String) -> Context { 43 | if already_imported.contains(fname) { 44 | ctx 45 | } else { 46 | already_imported.push(fname) 47 | let cmds = parse_file!(ctx, fname) 48 | loop (ctx, cmds) { 49 | (ctx, Nil) => break ctx 50 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 51 | } 52 | } 53 | } 54 | 55 | ///| 56 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 57 | try { 58 | let code = @fs.read_file_to_string!(fname) 59 | let lexer = new_lexer(code) 60 | reset_lex(fname) 61 | let tokens = [] 62 | while true { 63 | let elem = lexer.next_token() 64 | tokens.push(elem) 65 | match elem.0 { 66 | EOF => break 67 | _ => continue 68 | } 69 | } 70 | let result = toplevel!(tokens) 71 | if has_lex_error.val { 72 | error_info!("") 73 | } else { 74 | result!(ctx).0 75 | } 76 | } catch { 77 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 78 | ErrorWithInfo(_) as e => raise e 79 | UnexpectedToken(t, (start, end), _) => 80 | if has_lex_error.val { 81 | error_info!("") 82 | } else { 83 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 84 | } 85 | _ => panic() 86 | } 87 | } 88 | 89 | ///| 90 | fn main { 91 | let argv = @sys.get_cli_args()[2:] 92 | try { 93 | if argv.length() != 1 { 94 | error_info!("you must specify exactly one input file") 95 | } else { 96 | let fname = argv[0] 97 | ignore(process_file!(Context::empty(), fname)) 98 | } 99 | } catch { 100 | ErrorWithInfo((msg, info)) => 101 | if not(msg.is_empty()) { 102 | println("Error:\{info} \{msg}") 103 | } 104 | _ => panic() 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /chap29-fullomega/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap29-fullomega/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseCases = (Context) -> @immut/list.T[(String, (String, Term))]!Error 50 | 51 | ///| 52 | typealias ParseCase = (Context) -> (String, (String, Term))!Error 53 | 54 | ///| 55 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 56 | 57 | ///| 58 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 59 | 60 | ///| 61 | typealias ParseType = (Context) -> Type!Error 62 | 63 | ///| 64 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)]!Error 65 | 66 | ///| 67 | typealias ParseLabelType = (Context, Int) -> (String, Type)!Error 68 | 69 | ///| 70 | typealias ParseKind = (Context) -> Kind 71 | -------------------------------------------------------------------------------- /chap31-fullfomsub/chap31-fullfomsub.mbti: -------------------------------------------------------------------------------- 1 | package TAPL/chap31-fullfomsub 2 | 3 | alias @moonbitlang/core/immut/list as @list 4 | 5 | // Values 6 | fn new_lexer(String) -> Lexer 7 | 8 | fn next_token(Lexer) -> (Token, Pos, Pos) 9 | 10 | fn run(LexEngine, Lexbuf) -> (Int, Array[(Int, Int)]) 11 | 12 | fn toplevel(Array[(Token, Pos, Pos)], initial_pos? : Pos) -> ((Context) -> (@list.T[Command], Context)!)!ParseError 13 | 14 | // Types and methods 15 | type Binding 16 | 17 | type Command 18 | 19 | type Context 20 | 21 | type ErrorWithInfo 22 | 23 | type Info 24 | 25 | type Kind 26 | impl Eq for Kind 27 | 28 | pub(all) struct LexEngine { 29 | graph : Array[(Int) -> (Int, Array[Array[Int]])] 30 | end_nodes : Array[(Int, Array[((Int, Int), (Int, Int))])?] 31 | start_tags : Array[Int] 32 | code_blocks_n : Int 33 | } 34 | impl LexEngine { 35 | run(Self, Lexbuf) -> (Int, Array[(Int, Int)]) 36 | } 37 | 38 | type Lexbuf 39 | impl Lexbuf { 40 | from_string(String) -> Self 41 | } 42 | 43 | pub(all) type Lexer Lexbuf 44 | impl Lexer { 45 | next_token(Self) -> (Token, Pos, Pos) 46 | } 47 | 48 | type NoRuleApplies 49 | 50 | pub type! ParseError { 51 | UnexpectedToken(Token, (Pos, Pos), Array[TokenKind]) 52 | UnexpectedEndOfInput(Pos, Array[TokenKind]) 53 | } 54 | impl Show for ParseError 55 | 56 | type Pos 57 | 58 | type Term 59 | 60 | pub(all) enum Token { 61 | EOF 62 | AS 63 | CASE 64 | ELSE 65 | FALSE 66 | FIX 67 | IF 68 | IMPORT 69 | IN 70 | ISZERO 71 | LAMBDA 72 | LET 73 | OF 74 | PRED 75 | SUCC 76 | THEN 77 | TIMESDOUBLE 78 | TRUE 79 | UNIT 80 | UALL 81 | UBOOL 82 | UDOUBLE 83 | UNAT 84 | USOME 85 | USTRING 86 | UTOP 87 | UUNIT 88 | LCID(String) 89 | UCID(String) 90 | INTV(Int) 91 | DOUBLEV(Double) 92 | STRINGV(String) 93 | DDARROW 94 | ARROW 95 | COLONCOLON 96 | DARROW 97 | LEQ 98 | COLON 99 | COMMA 100 | DOT 101 | EQ 102 | GT 103 | LCURLY 104 | LPAREN 105 | LSQUARE 106 | LT 107 | RCURLY 108 | RPAREN 109 | RSQUARE 110 | SEMI 111 | STAR 112 | USCORE 113 | VBAR 114 | } 115 | impl Token { 116 | kind(Self) -> TokenKind 117 | } 118 | impl Show for Token 119 | 120 | pub(all) enum TokenKind { 121 | TK_EOF 122 | TK_AS 123 | TK_CASE 124 | TK_ELSE 125 | TK_FALSE 126 | TK_FIX 127 | TK_IF 128 | TK_IMPORT 129 | TK_IN 130 | TK_ISZERO 131 | TK_LAMBDA 132 | TK_LET 133 | TK_OF 134 | TK_PRED 135 | TK_SUCC 136 | TK_THEN 137 | TK_TIMESDOUBLE 138 | TK_TRUE 139 | TK_UNIT 140 | TK_UALL 141 | TK_UBOOL 142 | TK_UDOUBLE 143 | TK_UNAT 144 | TK_USOME 145 | TK_USTRING 146 | TK_UTOP 147 | TK_UUNIT 148 | TK_LCID 149 | TK_UCID 150 | TK_INTV 151 | TK_DOUBLEV 152 | TK_STRINGV 153 | TK_DDARROW 154 | TK_ARROW 155 | TK_COLONCOLON 156 | TK_DARROW 157 | TK_LEQ 158 | TK_COLON 159 | TK_COMMA 160 | TK_DOT 161 | TK_EQ 162 | TK_GT 163 | TK_LCURLY 164 | TK_LPAREN 165 | TK_LSQUARE 166 | TK_LT 167 | TK_RCURLY 168 | TK_RPAREN 169 | TK_RSQUARE 170 | TK_SEMI 171 | TK_STAR 172 | TK_USCORE 173 | TK_VBAR 174 | } 175 | impl Show for TokenKind 176 | 177 | type Type 178 | 179 | // Type aliases 180 | pub typealias Position = Pos 181 | 182 | // Traits 183 | 184 | -------------------------------------------------------------------------------- /chap31-fullfomsub/main.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | let already_imported : Array[String] = [] 3 | 4 | ///| 5 | fn process_command!(ctx : Context, cmd : Command) -> Context { 6 | match cmd { 7 | Import(fname) => process_file!(ctx, fname) 8 | Eval(_, t) => { 9 | let ty = t.derive_type!(ctx) 10 | let t_prime = t.eval(ctx) 11 | println("\{t_prime.to_string(ctx)}\n : \{ty.to_string(ctx)}") 12 | ctx 13 | } 14 | Bind(info, x, bind) => { 15 | let bind_checked = bind.derive_type!(ctx, info) 16 | let bind_prime = bind_checked.eval(ctx) 17 | println("\{x} \{bind_prime.to_string(ctx)}") 18 | ctx.add_binding(x, bind_prime) 19 | } 20 | SomeBind(info, x1, x2, t) => { 21 | let ty = t.derive_type!(ctx) 22 | match ty.simplify(ctx) { 23 | Some(_, ty1, ty2) => { 24 | let t_prime = t.eval(ctx) 25 | let b = match t_prime { 26 | Pack(_, _, t2, _) => TmAbb(t2.shift(1), Some(ty2)) 27 | _ => Var(ty2) 28 | } 29 | println("\{x1} \{TyVar(ty1).to_string(ctx)}") 30 | let ctx1 = ctx.add_binding(x1, TyVar(ty1)) 31 | println("\{x2} \{b.to_string(ctx1)}") 32 | let ctx2 = ctx1.add_binding(x2, b) 33 | ctx2 34 | } 35 | _ => error_info!("existential type expected", info~) 36 | } 37 | } 38 | } 39 | } 40 | 41 | ///| 42 | fn process_file!(ctx : Context, fname : String) -> Context { 43 | if already_imported.contains(fname) { 44 | ctx 45 | } else { 46 | already_imported.push(fname) 47 | let cmds = parse_file!(ctx, fname) 48 | loop (ctx, cmds) { 49 | (ctx, Nil) => break ctx 50 | (ctx, Cons(c, rest)) => continue (process_command!(ctx, c), rest) 51 | } 52 | } 53 | } 54 | 55 | ///| 56 | fn parse_file!(ctx : Context, fname : String) -> @immut/list.T[Command] { 57 | try { 58 | let code = @fs.read_file_to_string!(fname) 59 | let lexer = new_lexer(code) 60 | reset_lex(fname) 61 | let tokens = [] 62 | while true { 63 | let elem = lexer.next_token() 64 | tokens.push(elem) 65 | match elem.0 { 66 | EOF => break 67 | _ => continue 68 | } 69 | } 70 | let result = toplevel!(tokens) 71 | if has_lex_error.val { 72 | error_info!("") 73 | } else { 74 | result!(ctx).0 75 | } 76 | } catch { 77 | @fs.IOError(_) => error_info!("cannot locate file \"\{fname}\"") 78 | ErrorWithInfo(_) as e => raise e 79 | UnexpectedToken(t, (start, end), _) => 80 | if has_lex_error.val { 81 | error_info!("") 82 | } else { 83 | error_info!("unexpected token \"\{t}\"", info=FI(start, end)) 84 | } 85 | _ => panic() 86 | } 87 | } 88 | 89 | ///| 90 | fn main { 91 | let argv = @sys.get_cli_args()[2:] 92 | try { 93 | if argv.length() != 1 { 94 | error_info!("you must specify exactly one input file") 95 | } else { 96 | let fname = argv[0] 97 | ignore(process_file!(Context::empty(), fname)) 98 | } 99 | } catch { 100 | ErrorWithInfo((msg, info)) => 101 | if not(msg.is_empty()) { 102 | println("Error:\{info} \{msg}") 103 | } 104 | _ => panic() 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /chap31-fullfomsub/moon.pkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "is-main": true, 3 | "import": [ 4 | "moonbitlang/x/fs", 5 | "moonbitlang/x/sys" 6 | ], 7 | "pre-build": [ 8 | { 9 | "command": "node $mod_dir/.mooncakes/moonbitlang/yacc/boot/moonyacc.js $input -o $output", 10 | "input": "parser.mbty", 11 | "output": "parser.mbt" 12 | }, 13 | { 14 | "command": "$mod_dir/.mooncakes/moonbitlang/lex/moonlex $input -o $output", 15 | "input": "lexer.mbtx", 16 | "output": "lexer.mbt" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /chap31-fullfomsub/support.mbt: -------------------------------------------------------------------------------- 1 | ///| 2 | struct Pos { 3 | fname : String 4 | lnum : Int 5 | bol : Int 6 | cnum : Int 7 | } 8 | 9 | ///| 10 | impl Show for Pos with output(self, logger) { 11 | logger.write_string("\{self.fname}:\{self.lnum}:\{self.cnum - self.bol + 1}") 12 | } 13 | 14 | ///| 15 | enum Info { 16 | FI(Pos, Pos) 17 | UNKNOWN 18 | } 19 | 20 | ///| 21 | impl Show for Info with output(self, logger) { 22 | match self { 23 | FI(start, end) => logger.write_string(" [\{start}--\{end}]") 24 | UNKNOWN => () 25 | } 26 | } 27 | 28 | ///| 29 | type! ErrorWithInfo (String, Info) 30 | 31 | ///| 32 | fn error_info![T](msg : String, info~ : Info = UNKNOWN) -> T { 33 | raise ErrorWithInfo((msg, info)) 34 | } 35 | 36 | ///| 37 | typealias ParseToplevel = (Context) -> (@immut/list.T[Command], Context)!Error 38 | 39 | ///| 40 | typealias ParseCommand = (Context) -> (Command, Context)!Error 41 | 42 | ///| 43 | typealias ParseBinder = (Context) -> Binding!Error 44 | 45 | ///| 46 | typealias ParseTerm = (Context) -> Term!Error 47 | 48 | ///| 49 | typealias ParseCases = (Context) -> @immut/list.T[(String, (String, Term))]!Error 50 | 51 | ///| 52 | typealias ParseCase = (Context) -> (String, (String, Term))!Error 53 | 54 | ///| 55 | typealias ParseLabelTerms = (Context, Int) -> @immut/list.T[(String, Term)]!Error 56 | 57 | ///| 58 | typealias ParseLabelTerm = (Context, Int) -> (String, Term)!Error 59 | 60 | ///| 61 | typealias ParseType = (Context) -> Type!Error 62 | 63 | ///| 64 | typealias ParseLabelTypes = (Context, Int) -> @immut/list.T[(String, Type)]!Error 65 | 66 | ///| 67 | typealias ParseLabelType = (Context, Int) -> (String, Type)!Error 68 | 69 | ///| 70 | typealias ParseKind = (Context) -> Kind 71 | -------------------------------------------------------------------------------- /moon.mod.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "TAPL", 3 | "version": "1.0.0", 4 | "deps": { 5 | "moonbitlang/x": "0.4.21", 6 | "moonbitlang/yacc": "0.2.9" 7 | }, 8 | "bin-deps": { 9 | "moonbitlang/lex": "0.3.2" 10 | }, 11 | "license": "MIT", 12 | "source": ".", 13 | "warn-list": "-6-7" 14 | } 15 | --------------------------------------------------------------------------------