├── .build.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── Notes ├── .ztr-directory ├── 20220506204442.md ├── 20220506205025.md ├── 20220509195321.md ├── 20220509195813.md └── temporary.md ├── README.md ├── paper ├── Makefile ├── biblio.bib ├── paper.pdf └── paper.tex ├── src ├── ast │ ├── mod.rs │ └── pretty.rs ├── bin │ ├── debug-lsp │ │ └── main.rs │ ├── tethys.rs │ └── uitest │ │ ├── header.rs │ │ ├── main.rs │ │ └── runners.rs ├── codegen │ ├── closure.rs │ ├── ir.rs │ ├── llvm.rs │ └── mod.rs ├── ctxt.rs ├── diag.rs ├── error.rs ├── intern.rs ├── lib.rs ├── parse │ └── mod.rs ├── resolve │ └── mod.rs └── typeck │ ├── ast.rs │ ├── mod.rs │ ├── norm.rs │ ├── pretty.rs │ └── unify.rs ├── support ├── README.md ├── examples │ └── fizzbuzz.tys ├── highlight │ ├── .vscodeignore │ ├── LICENSE │ ├── language-configuration.json │ ├── package.json │ ├── syntaxes │ │ └── tethys.tmLanguage.json │ └── tethys-lang-0.0.1.vsix ├── prelude.tys ├── testing.tys ├── tests │ └── resolve │ │ ├── tyvar-scope-1.tys │ │ ├── tyvar-scope-2.tys │ │ ├── tyvar-scope-3.tys │ │ ├── tyvar-scope-4.tys │ │ ├── tyvar-scope-5.tys │ │ └── tyvar-scope-6.tys ├── tethys-mode.el ├── tychk_nbe.ml ├── typeck.ml ├── typeck │ ├── .gitignore │ ├── .ocamlformat │ ├── bin │ │ ├── dune │ │ └── main.ml │ ├── dune-project │ ├── lib │ │ ├── dune │ │ └── typeck.ml │ ├── test │ │ ├── dune │ │ └── typeck.ml │ └── typeck.opam └── vim-tethys │ ├── README.md │ ├── ftdetect │ └── tethys.vim │ └── syntax │ └── tethys.vim └── tmp.tys /.build.yml: -------------------------------------------------------------------------------- 1 | image: alpine/edge 2 | secrets: 3 | - e020de33-eb67-40d4-b09c-147ddf4d0c8e 4 | sources: 5 | - git+ssh://git@git.sr.ht/~thepuzzlemaker/tethys 6 | tasks: 7 | - mirror: | 8 | cd tethys 9 | git remote add github git@github.com:ThePuzzlemaker/tethys.git 10 | ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts 11 | git push --all github 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.aux 2 | *.dvi 3 | *.fdb_latexmk 4 | *.fls 5 | *.log 6 | *.out 7 | *.toc 8 | *.bbl 9 | *.bcf 10 | *.blg 11 | *.xml 12 | *.synctex.gz 13 | *.synctex(busy) 14 | .vscode/ 15 | 16 | # Added by cargo 17 | 18 | /target 19 | *~ 20 | .stack-work 21 | *.ll 22 | *.bc 23 | *.o 24 | *.wasm 25 | *.wat 26 | .dir-locals.el -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "tethys" 3 | version = "0.1.0" 4 | edition = "2021" 5 | default-run = "tethys" 6 | rust-version = "1.66.1" 7 | license = "BSD-3-Clause" 8 | 9 | [dependencies] 10 | ariadne = "0.3.0" 11 | logos = "0.12.1" 12 | derive_more = "0.99.17" 13 | pretty = "0.12.3" 14 | index_vec = "0.1.3" 15 | thiserror = "1.0.37" 16 | eyre = "0.6.8" 17 | stable-vec = "0.4.0" 18 | internment = "0.7.0" 19 | id-arena = "2.2.1" 20 | im = "15.1.0" 21 | chumsky = { git = "https://github.com/zesterer/chumsky.git", features = ["pratt", "label"] } 22 | cranelift-entity = "0.103.0" 23 | itertools = "0.12.0" 24 | spinneret = { git = "https://git.sr.ht/~thepuzzlemaker/spinneret" } 25 | llvm-sys = { version = "170", features = ["prefer-static"] } 26 | libc = "0.2.153" 27 | 28 | [dependencies.calypso_base] 29 | git = "https://github.com/calypso-lang/calypso.git" 30 | rev = "fe9c168" 31 | default-features = false 32 | features = ["span", "symbol"] 33 | 34 | [dependencies.color-eyre] 35 | version = "0.6.1" 36 | optional = true 37 | 38 | [dependencies.tracing-subscriber] 39 | version = "0.3.16" 40 | optional = true 41 | features = ["parking_lot", "env-filter", "time"] 42 | 43 | [dependencies.tracing] 44 | version = "0.1.37" 45 | features = ["max_level_trace", "release_max_level_info"] 46 | 47 | [dependencies.serde] 48 | version = "1.0.147" 49 | optional = true 50 | features = ["derive"] 51 | 52 | [dependencies.serde_yaml] 53 | version = "0.9.14" 54 | optional = true 55 | 56 | [dependencies.clap] 57 | version = "4.1.8" 58 | optional = true 59 | features = ["color", "derive"] 60 | 61 | [dependencies.walkdir] 62 | version = "2.3.2" 63 | optional = true 64 | 65 | [dependencies.indicatif] 66 | version = "0.16.2" 67 | optional = true 68 | 69 | [dependencies.tower-lsp] 70 | version = "0.19.0" 71 | optional = true 72 | 73 | [dependencies.tokio] 74 | version = "1.21.2" 75 | optional = true 76 | features = ["full"] 77 | 78 | [features] 79 | default = ["tethys-deps"] 80 | tethys-deps = [ 81 | "color-eyre", 82 | "tracing-subscriber" 83 | ] 84 | uitest-deps = [ 85 | "color-eyre", 86 | "tracing-subscriber", 87 | "serde", 88 | "serde_yaml", 89 | "clap", 90 | "walkdir", 91 | "indicatif" 92 | ] 93 | debug-lsp-deps = [ 94 | "color-eyre", 95 | "tracing-subscriber", 96 | "tower-lsp", 97 | "tokio" 98 | ] 99 | 100 | [[bin]] 101 | name = "tethys" 102 | path = "src/bin/tethys.rs" 103 | required-features = ["tethys-deps"] 104 | 105 | [[bin]] 106 | name = "uitest" 107 | path = "src/bin/uitest/main.rs" 108 | required-features = ["uitest-deps"] 109 | 110 | [[bin]] 111 | name = "debug-lsp" 112 | path = "src/bin/debug-lsp/main.rs" 113 | required-features = ["debug-lsp-deps"] 114 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) ThePuzzlemaker 2023. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are 5 | met: 6 | 7 | 1. Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 10 | 2. Redistributions in binary form must reproduce the above 11 | copyright notice, this list of conditions and the following 12 | disclaimer in the documentation and/or other materials provided 13 | with the distribution. 14 | 15 | 3. Neither the name of the copyright holder nor the names of its 16 | contributors may be used to endorse or promote products derived 17 | from this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | Some source code is derived from András Kovács's Elaboration Zoo, 32 | which is licensed under BSD-3-Clause. 33 | https://github.com/AndrasKovacs/elaboration-zoo/tree/master/03-holes -------------------------------------------------------------------------------- /Notes/.ztr-directory: -------------------------------------------------------------------------------- 1 | {"sorting":"name-up","project":{"title":"Notes","formats":["latex","markdown","chromium-pdf"],"filters":[],"cslStyle":"","templates":{"tex":"","html":""}},"icon":null} -------------------------------------------------------------------------------- /Notes/20220506204442.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Typechecking, An Overview 3 | keywords: 4 | - Typechecking 5 | --- 6 | 7 | # Typechecking, An Overview -------------------------------------------------------------------------------- /Notes/20220506205025.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: AST 3 | --- 4 | 5 | # AST 6 | 7 | ## TODOs 8 | 9 | - [ ] add more token information to the AST--CST-like 10 | - [ ] resolution AST – indices/node-index-like -------------------------------------------------------------------------------- /Notes/20220509195321.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: System F 3 | keywords: 4 | - Typechecking 5 | - Lambda Calculus 6 | --- 7 | 8 | # System F 9 | 10 | -------------------------------------------------------------------------------- /Notes/20220509195813.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Bidirectional Typechecking 3 | keywords: 4 | - Typechecking 5 | --- 6 | 7 | # Bidirectional Typechecking -------------------------------------------------------------------------------- /Notes/temporary.md: -------------------------------------------------------------------------------- 1 | 34 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tethys 2 | 3 | 4 | [![License][license_badge]][license_link] [![Discord](https://img.shields.io/discord/822290196057948171?style=flat-square&color=blue)](https://discord.gg/26X6ChQQcG) ![Lines of Code][tokei_loc] 5 | 6 | [tokei_loc]: https://img.shields.io/tokei/lines/github/ThePuzzlemaker/Tethys?style=flat-square 7 | 8 | 9 | 10 | 11 | [license_badge]: https://img.shields.io/badge/license-BSD--3--Clause-blue?style=flat-square 12 | [license_link]: #license 13 | 14 | [calypso]: https://calypso-lang.github.io/ 15 | 16 | Tethys is a toy functional programming language based on a System 17 | F-based core calculus. 18 | 19 | This language is mostly for learning about type systems, but I am 20 | going to make it into something half-fleshed (as opposed to 21 | fully-fleshed). It's not going to be *efficient* (most likely), but it 22 | will be a good learning experience. 23 | 24 | Note that this code is very work-in-progress. Contributions are 25 | welcome (and encouraged!), but it's likely that as a toy langauge, 26 | this will never be in a state that is helpful or efficient to use in 27 | production. If you want a language that will (eventually) be robust 28 | (enough), see [Calypso][calypso]. 29 | 30 | ## Example 31 | 32 | The following example is an implementation of 33 | [FizzBuzz](https://en.wikipedia.org/wiki/Fizz_buzz) up to 100. This is 34 | also available at 35 | [`support/examples/fizzbuzz.tys`](./support/examples/fizzbuzz.tys). Note 36 | that this is currently pseudocode, though it will likely not change 37 | much by the time most features are implemented. 38 | 39 | ```elixir 40 | def main 41 | : () -> () 42 | = \_. 43 | each (fizzbuzz 100) println 44 | 45 | def fizzbuzz 46 | : Int -> List[String] 47 | = \max. 48 | map (rangeI 1 100) (\n. 49 | if (divides n 15) 50 | then "FizzBuzz" 51 | else if (divides n 3) 52 | then "Fizz" 53 | else if (divides n 5) 54 | then "Buzz" 55 | # Typeclasses soon(tm) 56 | else intToString n) 57 | ``` 58 | 59 | ## Internals and Motivation 60 | 61 | There are two parts of Tethys: the surface language, and the core 62 | calculus. The core calculus is the intermediate representation of 63 | Tethys which is used for type checking and inference, and for 64 | interpretation. The surface language is the higher-level interface 65 | that is eventually desugared by the compiler/interpreter to the core 66 | calculus. 67 | 68 | The reference implementation in Haskell will probably not use any 69 | particular "tricks" in terms of interpretation (VM, JIT, AOT 70 | compilation to native, etc.), instead just using a simple tree-walk 71 | interpreter or similar. (At some point, it may end up compiling to 72 | Calypso's SaturnVM.) 73 | 74 | This language was created in order to conduct informal research (i.e., 75 | not actually discovering anything interesting, probably) on type 76 | systems, especially bidirectional typechecking and 77 | polymorphism. Tethys is named as such as it is the name of the 78 | co-orbital moon to Calypso; as my work on this language is 79 | "co-orbital", so to speak, to my work on Calypso. 80 | 81 | More information on Calypso (the language, of course) is available at 82 | [https://calypso-lang.github.io][calypso]. 83 | 84 | This implementation is partially based on an implementation of 85 | bidirectional typechecking with unification of higher-rank 86 | polymorphism created by [Mark Barbone (aka 87 | MBones/mb64)](https://github.com/mb64). A slightly modified version of 88 | this implementation is available at 89 | [`support/tychk_nbe.ml`](./support/tychk_nbe.ml). The original is 90 | available [in this 91 | Gist](https://gist.github.com/mb64/f49ccb1bbf2349c8026d8ccf29bd158e#file-tychk_nbe-ml). 92 | 93 | Additionally, this implementation is based on an algorithm described 94 | by [András Kovács](https://github.com/AndrasKovacs) in their 95 | [elaboration 96 | zoo](https://github.com/AndrasKovacs/elaboration-zoo/tree/master/03_holes). Further 97 | inspiration comes from [Bálint Kocsis](https://github.com/balint99)'s 98 | [SFPL](https://github.com/balint99/sfpl). The core algorithm, created 99 | for dependent types but made specific to System F, is available in 100 | [`support/typeck`](./support/typeck). 101 | 102 | A list of resources and bibliography used to make this is available in 103 | the [paper](#paper). Note that this list is not necessarily up to 104 | date. I'll try to update it as soon as I can, when I use new 105 | resources. 106 | 107 | ### "Paper" 108 | 109 | The source for the "paper" (really just a typeset informal writeup) is 110 | in [`paper/paper.tex`](paper/paper.tex) (requires pdftex). At some 111 | points there may be a PDF in the repository but there is no guarantee 112 | that it is up-to-date with the LaTeX source. 113 | 114 | ## Contribution 115 | 116 | I have yet to draft up a CONTRIBUTING.md. If you'd like to contribute 117 | and don't know where to start, feel free to open an issue, ping me on 118 | Discord or contact me elsewhere. I'll let you know if there's anything 119 | I think you can help with, and if so give you some 120 | pointers. Contributions are greatly appreciated! 121 | 122 | There is a semi-functional VSCode extension / TextMate grammar in 123 | [`support/highlight`](./support/highlight). It's not perfect, but it 124 | works well enough. 125 | 126 | ### Repository Overview 127 | 128 | - `paper/`: Source and occasionally PDF for the "paper" (really just a 129 | typeset informal writeup). See [this section](#paper) for a bit more 130 | information. 131 | - `support/`: Support files, including grammars, examples, tests, 132 | etc. See [its README](./support/README.md) for a bit more 133 | information. 134 | 135 | ## License 136 | 137 | This project is licensed under the BSD 3-Clause license: 138 | [LICENSE](LICENSE) or https://spdx.org/licenses/BSD-3-Clause.html. 139 | 140 | ## Contribution 141 | 142 | Unless you explicitly state otherwise, any contribution intentionally 143 | submitted for inclusion in the work by you, shall be licensed as 144 | above, without any additional terms or conditions. 145 | -------------------------------------------------------------------------------- /paper/Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | latexmk -pdf paper.tex 3 | clean: 4 | latexmk -c 5 | rm -f paper.dvi 6 | rm -f paper.bbl 7 | rm -f paper.run.xml 8 | rm -f paper.synctex.gz 9 | -------------------------------------------------------------------------------- /paper/biblio.bib: -------------------------------------------------------------------------------- 1 | @inproceedings{Dunfield13:bidir, 2 | author = {Jana Dunfield and Neelakantan R. Krishnaswami}, 3 | title = {Complete and Easy Bidirectional Typechecking for Higher-Rank Polymorphism}, 4 | booktitle = {Int'l Conf. Functional Programming}, 5 | month = aug, 6 | year = {2020}, 7 | eprinttype = {arxiv}, 8 | eprintclass = {cs.PL}, 9 | eprint = {1306.6032v2}, 10 | } 11 | 12 | @inproceedings{Cai16:sysfw, 13 | author = {Cai, Yufei and Giarrusso, Paolo G. and Ostermann, Klaus}, 14 | title = {System {F-Omega} with Equirecursive Types for Datatype-Generic Programming}, 15 | year = {2016}, 16 | publisher = {Association for Computing Machinery}, 17 | doi = {10.1145/2837614.2837660}, 18 | booktitle = {Proceedings of the 43rd Annual ACM SIGPLAN-SIGACT Symposium on Principles of Programming Languages}, 19 | pages = {30–43}, 20 | numpages = {14}, 21 | series = {POPL '16}, 22 | } 23 | 24 | @online{Jez17:sysfw, 25 | author = {Zimmerman, Jake}, 26 | title = {System {F$\omega$} and Parameterization}, 27 | date = {2017-09-27}, 28 | url = {https://blog.jez.io/system-f-param/} 29 | } 30 | 31 | @misc{recty-lec, 32 | author = {Nate Foster}, 33 | title = {{CS} 4110 – {Programming} Languages and Logics}, 34 | subtitle = {Lecture \#27: Recursive Types}, 35 | year = {2012}, 36 | url = {https://www.cs.cornell.edu/courses/cs4110/2012fa/lectures/lecture27.pdf} 37 | } 38 | 39 | @misc{tychk-nbe, 40 | author = {Barbone, Mark}, 41 | title = {Typechecking for higher-rank polymorphism}, 42 | year = {2021}, 43 | url = {https://gist.github.com/mb64/f49ccb1bbf2349c8026d8ccf29bd158e} 44 | } 45 | 46 | @article{jones_1995, 47 | title={A system of constructor classes: overloading and implicit higher-order polymorphism}, 48 | volume={5}, 49 | DOI={10.1017/S0956796800001210}, 50 | number={1}, 51 | journal={Journal of Functional Programming}, 52 | publisher={Cambridge University Press}, 53 | author={Jones, Mark P.}, 54 | year={1995}, 55 | pages={1–35} 56 | } -------------------------------------------------------------------------------- /paper/paper.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThePuzzlemaker/tethys/209079a9a9374e6aa85fb3ccd25a3f20809fdb84/paper/paper.pdf -------------------------------------------------------------------------------- /paper/paper.tex: -------------------------------------------------------------------------------- 1 | % chktex-file 1 2 | % chktex-file 26 3 | % chktex-file 35 4 | \documentclass[11pt]{article} 5 | \usepackage[ligature,reserved]{semantic} 6 | \usepackage[utf8]{inputenc} 7 | \usepackage[american]{babel} 8 | \usepackage{csquotes} 9 | \usepackage[dvipsnames]{xcolor} 10 | \usepackage{hyperref} 11 | \usepackage{hyphenat} 12 | \usepackage{amsfonts} 13 | \usepackage{amsmath} \allowdisplaybreaks 14 | \usepackage{amssymb} 15 | \usepackage{amsthm} 16 | \usepackage[margin=1.25in,letterpaper]{geometry} 17 | \usepackage{mathtools} 18 | \usepackage{mathpartir} 19 | \usepackage{scalerel} 20 | \usepackage{stackengine} 21 | 22 | \usepackage[ 23 | backend=biber, 24 | style=apa, 25 | ]{biblatex} 26 | 27 | \addbibresource{biblio.bib} 28 | 29 | \hypersetup{ 30 | colorlinks=true, 31 | linkcolor=blue, 32 | filecolor=magenta, 33 | urlcolor=cyan, 34 | citecolor=Green, 35 | pdfpagemode=FullScreen, 36 | } 37 | 38 | 39 | % thanks goes to einargs for creating this setup (BNF, \typerule, \typeaxiom) 40 | % (rest is my hacky LaTeX) 41 | % Syntax group 42 | \newcommand{\syng}[2]{#1 \bnf& & \text{#2} \\} 43 | % Line in a syntax group 44 | \newcommand{\syn}[2]{& #1 & \text{#2} \\} 45 | \newcommand{\bnf}{\mathrel{::=}\;} 46 | 47 | \newcommand{\tchk}{\Leftarrow} 48 | \newcommand{\tsyn}{\Rightarrow} 49 | \newcommand{\rarr}{\rightarrow} 50 | 51 | % found on tex.SE, tortured into compliance 52 | \newlength\arrowheight 53 | \newcommand\doubleRightarrow{ 54 | \mathrel{\ThisStyle{ 55 | \setlength{\arrowheight}{\heightof{$\SavedStyle\Downarrow$}} 56 | \scalerel*{\rotatebox{90}{\stackengine{.5\arrowheight}{$\SavedStyle\Downarrow$} 57 | {$\SavedStyle\Downarrow$}{O}{c}{F}{F}{L}}}{\rotatebox[origin=c]{90}{$\Downarrow$}}} 58 | }} 59 | 60 | % function & applicand type notation 61 | % stolen from Dunfield 2013 62 | \newcommand{\tapp}[3]{ 63 | #1 \bullet #2 \doubleRightarrow #3 64 | } 65 | 66 | \newcommand{\alphahat}{\hat{\alpha}} 67 | \newcommand{\betahat}{\hat{\beta}} 68 | 69 | \newcommand{\tprod}[1]{\{#1\}} 70 | \newcommand{\tsum}[1]{\langle#1\rangle} 71 | \newcommand{\ttauibar}{\overline{\tau_i}} 72 | \newcommand{\tall}[3]{\forall (#1 :: #2).#3} 73 | \newcommand{\tlam}[3]{\lambda (#1 :: #2).#3} 74 | 75 | \newcommand{\eeibar}{\overline{e_i}} 76 | \newcommand{\ecase}[2]{\\ #1\ \\ #2} 77 | 78 | \newcommand{\Nf}{^{\textsf{Nf}}} 79 | \newcommand{\Ne}{^{\textsf{Ne}}} 80 | 81 | \newcommand{\typerule}[3]{ 82 | \inferrule{#2}{#3}\quad(\textsf{#1}) 83 | } 84 | 85 | \newcommand{\typeaxiom}[2]{ 86 | \inferrule{ }{#2}\quad(\textsf{#1}) 87 | } 88 | 89 | \reservestyle{\literal}{\texttt} 90 | \reservestyle{\keyword}{\textbf} 91 | \literal{true, false, bool, int, cond, fix, mod, and, or, not, project, embed, eq, neq} 92 | \keyword{case, of} 93 | 94 | \newtheorem{theorem}{Theorem}[section] 95 | \newtheorem{corollary}{Corollary}[theorem] 96 | \newtheorem{lemma}[theorem]{Lemma} 97 | 98 | \title{Tethys\\ 99 | \large A Toy Functional Programming Language\\ 100 | with a System F$\omega$-based Core Calculus\\ 101 | \url{https://github.com/ThePuzzlemaker/tethys}} 102 | \author{James [Redacted] (aka ThePuzzlemaker)} 103 | \date{\today} 104 | 105 | \begin{document} 106 | 107 | \maketitle 108 | 109 | \tableofcontents 110 | \newpage 111 | 112 | \section{Introduction} 113 | 114 | This ``paper'' (which is really just a well-typeset, but somewhat informal 115 | write-up) introduces Tethys, a toy functional programming language based on 116 | a System F$\omega$-based core calculus. Hence the title. 117 | 118 | There are two parts of Tethys: the surface language, and the core calculus. The 119 | core calculus is the intermediate representation of Tethys which is used 120 | for type checking and inference, and for interpretation. The surface 121 | language is the higher-level interface that is eventually desugared by the 122 | compiler/interpreter to the core calculus. 123 | 124 | The reference implementation in Rust will not use any particular ``tricks'' in 125 | terms of interpretation, instead just using a simple tree-walk interpreter 126 | or similar. 127 | 128 | This language was created in order to conduct informal research (i.e., not 129 | actually discovering anything interesting, probably) on type systems, 130 | especially bidirectional typechecking and polymorphism. Tethys is named as 131 | such as it is the name of the co-orbital moon to Calypso; as my work on 132 | this language is ``co-orbital'', so to speak, to my work on 133 | \href{https://calypso-lang.github.io}{Calypso}. 134 | 135 | Please note that I am not an expert in pretty much any subject this writeup 136 | covers. If you notice something you don't understand, or that you think is 137 | a mistake, don't fret to point it out. 138 | 139 | \subsection{Background} 140 | 141 | \begin{quote} 142 | Note: This section is taken from 143 | \href{https://thepuzzlemaker.info/static/tethys-slides/}{some slides I 144 | prepared for a lightning talk} in the 145 | \href{https://discord.gg/26X6ChQQcG}{r/PL Discord server}. If it seems 146 | a bit un-prose-y, that's why. 147 | \end{quote} 148 | 149 | Originally, I thought I needed to use System F$\omega$, an extension of the 150 | polymorphic typed lambda calculus (System F), with the extension of type 151 | lambdas. However, it has some problems: 152 | 153 | \begin{itemize} 154 | \item System F is already undecidable enough, with respect to inference. 155 | Adding \emph{more} types of types does \textbf{not} help. 156 | \item Theoretically, bidirectional typechecking (my chosen strategy) 157 | doesn't \emph{need} unification, but it sure makes it nice. And having 158 | type lambdas means you need higher order unfication, which is scary 159 | because anything including ``higher-order'' in its name is automatically 160 | scary. Also it's undecidable, so you have to restrict yourself to 161 | higher-order pattern unification, which is still quite scary. 162 | \item I just can't really find much good introductory literature on 163 | System F$\omega$ (at least, literature that I can understand). 164 | \end{itemize} 165 | 166 | As it turns out, System F$\omega$ is more expressive than F, but \textbf{way} 167 | much more of a pain. Not even Haskell, the paragon of kitchen sink type 168 | system features has it. (By default, because of course GHC's gonna have it 169 | as an extension). 170 | 171 | Here are some of the key theoretical differences: 172 | 173 | \begin{itemize} 174 | \item Within the type system, type constructors are opaque. Defining a type 175 | just creates opaque term-level and type-level constants, which don't 176 | normalize to the type or term they ``really'' mean. Of course, these 177 | are stripped out after typechecking, once they're no longer needed. 178 | \item Type constructors are injective, i.e. for some type constructor $C$ 179 | and types $X$ and $Y$, if $C\;X$ and $C\;Y$ are the same type, then $X$ 180 | and $Y$ are the same type. 181 | \item Differently named type constructors are never equal, even if they 182 | ``mean'' the same thing, i.e. for some type constructors $C1$ and $C2$, 183 | and type $T$, $C1\;T$ never equals $C2\;T$, no matter how they are 184 | defined. 185 | \end{itemize} 186 | 187 | Note that this is pretty much nominal typing! (If my understanding of nominal 188 | typing is correct.) 189 | 190 | 191 | \section{The Surface Language} 192 | 193 | This section has not been started yet. 194 | 195 | \section{The Declarative Core Calculus} 196 | 197 | This section is, unsurprisingly, work-in-progress. 198 | 199 | \section{References} 200 | 201 | \subsection{Acknowledements} 202 | 203 | Thanks to Brendan Zabarauskas and András Kovács, who have helped with many type 204 | system details. Many thanks to Mark Barbone (aka MBones/mb64), who provided 205 | a code sample of their bidirectionally typed (with unification) algorithm 206 | for typechecking higher-rank System F (\cite{tychk-nbe}), and who also 207 | noted some issues they had with \citetitle{Dunfield13:bidir} 208 | (\cite{Dunfield13:bidir}). 209 | 210 | 211 | \subsection{References} 212 | 213 | \nocite{*} 214 | 215 | \printbibliography[heading=none] 216 | 217 | \end{document} 218 | -------------------------------------------------------------------------------- /src/ast/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module implements Tethys's AST. 2 | 3 | use std::{ 4 | borrow::Cow, 5 | cell::{Cell, RefCell}, 6 | collections::HashMap, 7 | }; 8 | 9 | use calypso_base::symbol::Ident; 10 | use id_arena::{Arena, Id}; 11 | 12 | use crate::{ctxt::GlobalCtxt, parse::Span, resolve::ResolutionData}; 13 | 14 | pub const DUMMY_AST_ID: AstId = AstId { _raw: 0 }; 15 | 16 | pub mod pretty; 17 | 18 | index_vec::define_index_type! { 19 | pub struct AstId = u32; 20 | 21 | DISABLE_MAX_INDEX_CHECK = cfg!(not(debug_assertions)); 22 | DEBUG_FORMAT = "AstId({})"; 23 | DISPLAY_FORMAT = "{}"; 24 | IMPL_RAW_CONVERSIONS = true; 25 | } 26 | 27 | #[derive(Clone, Debug)] 28 | pub struct Item { 29 | pub id: AstId, 30 | pub ident: Ident, 31 | pub kind: ItemKind, 32 | pub span: Span, 33 | } 34 | 35 | impl Item { 36 | pub fn new(gcx: &GlobalCtxt, ident: Ident, kind: ItemKind, span: Span) -> Id { 37 | let id = gcx.arenas.ast.next_ast_id(); 38 | let item = gcx.arenas.ast.item.borrow_mut().alloc(Item { 39 | id, 40 | kind, 41 | ident, 42 | span, 43 | }); 44 | gcx.arenas.ast.insert_node(id, Node::Item(item)); 45 | item 46 | } 47 | } 48 | 49 | #[derive(Clone, Debug)] 50 | pub enum ItemKind { 51 | /// A value definition, as defined by `def`. 52 | Value(Id, Id), 53 | /// A type alias, as defined by `type`. 54 | TyAlias(Id), 55 | /// An enum, as defined by `enum`. 56 | Enum( 57 | im::Vector, 58 | im::Vector<(Ident, im::Vector>)>, 59 | Span, 60 | ), 61 | } 62 | 63 | #[derive(Clone, Debug)] 64 | pub struct Expr { 65 | pub id: AstId, 66 | pub kind: ExprKind, 67 | pub span: Span, 68 | } 69 | 70 | impl Expr { 71 | pub fn new(gcx: &GlobalCtxt, kind: ExprKind, span: Span) -> Id { 72 | let id = gcx.arenas.ast.next_ast_id(); 73 | let expr = gcx 74 | .arenas 75 | .ast 76 | .expr 77 | .borrow_mut() 78 | .alloc(Expr { id, kind, span }); 79 | gcx.arenas.ast.insert_node(id, Node::Expr(expr)); 80 | expr 81 | } 82 | } 83 | 84 | #[derive(Clone, Debug)] 85 | pub enum ExprKind { 86 | Unit, 87 | Name(Ident), 88 | Apply(Id, Id), 89 | Lambda(Ident, Id), 90 | Let(Ident, Recursive, Option>, Id, Id), 91 | Number(i64), 92 | BinaryOp { 93 | left: Id, 94 | kind: BinOpKind, 95 | right: Id, 96 | }, 97 | UnaryMinus(Id), 98 | UnaryNot(Id), 99 | Boolean(bool), 100 | If(Id, Id, Id), 101 | /// Tuples, excluding unit, which is [`ExprKind::Unit`]. 102 | Tuple(im::Vector>), 103 | TupleProj(Id, u64), 104 | /// A placeholder for an expression that was not syntactically 105 | /// well-formed. 106 | Err, 107 | } 108 | 109 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 110 | pub enum BinOpKind { 111 | Power, 112 | Multiply, 113 | Divide, 114 | Modulo, 115 | Add, 116 | Subtract, 117 | BitShiftLeft, 118 | BitShiftRight, 119 | BitAnd, 120 | BitXor, 121 | BitOr, 122 | Equal, 123 | NotEqual, 124 | LessThan, 125 | GreaterThan, 126 | LessEqual, 127 | GreaterEqual, 128 | LogicalAnd, 129 | LogicalOr, 130 | } 131 | 132 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 133 | pub enum Recursive { 134 | NotRecursive, 135 | Recursive(Span), 136 | } 137 | 138 | #[derive(Clone, Debug)] 139 | pub struct Ty { 140 | pub id: AstId, 141 | pub kind: TyKind, 142 | pub span: Span, 143 | } 144 | 145 | impl Ty { 146 | pub fn new(gcx: &GlobalCtxt, kind: TyKind, span: Span) -> Id { 147 | let id = gcx.arenas.ast.next_ast_id(); 148 | let ty = gcx.arenas.ast.ty.borrow_mut().alloc(Ty { id, kind, span }); 149 | gcx.arenas.ast.insert_node(id, Node::Ty(ty)); 150 | ty 151 | } 152 | } 153 | 154 | #[derive(Clone, Debug)] 155 | pub enum TyKind { 156 | Unit, 157 | Name(Ident), 158 | Data(Ident, im::Vector>), 159 | Arrow(Id, Id), 160 | Forall(Ident, Id), 161 | /// Tuples, excluding 0-tuples which are [`TyKind::Unit`] 162 | Tuple(im::Vector>), 163 | /// A placeholder for a type that was not syntactically 164 | /// well-formed 165 | Err, 166 | } 167 | 168 | impl TyKind { 169 | pub fn description(&'_ self) -> Cow<'_, str> { 170 | match self { 171 | TyKind::Unit => "unit".into(), 172 | TyKind::Name(..) => "type".into(), 173 | TyKind::Arrow(..) => "arrow".into(), 174 | TyKind::Forall(..) => "forall".into(), 175 | TyKind::Data(..) => "type".into(), 176 | TyKind::Tuple(..) => "tuple".into(), 177 | TyKind::Err => "invalid type".into(), 178 | } 179 | } 180 | } 181 | 182 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] 183 | pub enum Res { 184 | /// A primitive type, e.g. `Integer`. 185 | /// 186 | /// **Belongs to the type namespace.** 187 | PrimTy(PrimTy), 188 | /// A primitive function, e.g. `add` 189 | PrimFunc(PrimFunc), 190 | /// Corresponds to something defined in user code, with a unique [`AstId`]. 191 | /// 192 | /// **Does not belong to a specific namespace.** 193 | Defn(DefnKind, AstId), 194 | /// A local definition, in a `let`- or lambda-expression. 195 | /// 196 | /// The [`AstId`] here refers to the `let`- or lambda-expression where the 197 | /// value is declared. 198 | /// 199 | /// **Belongs to the value namespace.** 200 | Local(AstId), 201 | /// A type variable. 202 | /// 203 | /// Similarly to [`Res::Local`], the [`AstId`] here refers to the 204 | /// `forall`-type where the type variable is declared. 205 | /// 206 | /// **Belongs to the type namespace.** 207 | TyVar(AstId), 208 | /// A generic parameter. 209 | /// 210 | /// Similarly to [`Res::Local`], the [`AstId`] here refers to the 211 | /// datatype definition where the type variable is declared. 212 | /// 213 | /// **Belongs to the type namespace.** 214 | Generic(AstId, usize), 215 | /// A dummy [`Res`] variant representing a resolution error, so compilation 216 | /// can continue to gather further errors before crashing. 217 | /// 218 | /// **Does not belong to a specific namespace.** 219 | Err, 220 | } 221 | 222 | impl Res { 223 | pub fn id(self) -> Option { 224 | match self { 225 | Res::PrimTy(_) | Res::Err | Res::PrimFunc(_) => None, 226 | Res::Defn(_, id) | Res::Local(id) | Res::TyVar(id) | Res::Generic(id, _) => Some(id), 227 | } 228 | } 229 | } 230 | 231 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] 232 | pub enum PrimTy { 233 | Integer, 234 | Boolean, 235 | } 236 | 237 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] 238 | pub enum PrimFunc { 239 | Add, 240 | } 241 | 242 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] 243 | pub enum DefnKind { 244 | Primitive, 245 | Value, 246 | TyAlias, 247 | Enum, 248 | EnumConstructor(usize), 249 | EnumRecursor, 250 | Generic(usize), 251 | } 252 | 253 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 254 | pub enum Node { 255 | Item(Id), 256 | Expr(Id), 257 | Ty(Id), 258 | Syn(Id), 259 | } 260 | impl Node { 261 | pub fn span(self, gcx: &GlobalCtxt) -> Span { 262 | match self { 263 | Self::Item(id) => gcx.arenas.ast.item(id).span, 264 | Self::Expr(id) => gcx.arenas.ast.expr(id).span, 265 | Self::Ty(id) => gcx.arenas.ast.ty(id).span, 266 | Self::Syn(id) => gcx.arenas.ast.syn(id).span, 267 | } 268 | } 269 | 270 | pub fn ident(self, gcx: &GlobalCtxt) -> Option { 271 | match self { 272 | Self::Item(id) => Some(gcx.arenas.ast.item(id).ident), 273 | Self::Expr(id) => match gcx.arenas.ast.expr(id).kind { 274 | ExprKind::Unit 275 | | ExprKind::Apply(_, _) 276 | | ExprKind::Err 277 | | ExprKind::Number(_) 278 | | ExprKind::BinaryOp { .. } 279 | | ExprKind::UnaryMinus(_) 280 | | ExprKind::UnaryNot(_) 281 | | ExprKind::Boolean(_) 282 | | ExprKind::If(..) 283 | | ExprKind::Tuple(_) 284 | | ExprKind::TupleProj(..) => None, 285 | ExprKind::Name(ident) 286 | | ExprKind::Lambda(ident, _) 287 | | ExprKind::Let(ident, _, _, _, _) => Some(ident), 288 | }, 289 | Self::Ty(id) => match gcx.arenas.ast.ty(id).kind { 290 | TyKind::Unit | TyKind::Arrow(_, _) | TyKind::Err | TyKind::Tuple(_) => None, 291 | TyKind::Name(ident) | TyKind::Forall(ident, _) | TyKind::Data(ident, _) => { 292 | Some(ident) 293 | } 294 | }, 295 | Self::Syn(id) => gcx.arenas.ast.syn(id).ident, 296 | } 297 | } 298 | 299 | pub fn id(self, gcx: &GlobalCtxt) -> AstId { 300 | match self { 301 | Self::Item(id) => gcx.arenas.ast.item(id).id, 302 | Self::Expr(id) => gcx.arenas.ast.expr(id).id, 303 | Self::Ty(id) => gcx.arenas.ast.ty(id).id, 304 | Self::Syn(syn) => gcx.arenas.ast.syn(syn).id, 305 | } 306 | } 307 | } 308 | 309 | #[derive(Debug)] 310 | pub struct AstArenas { 311 | pub expr: RefCell>, 312 | pub item: RefCell>, 313 | pub ty: RefCell>, 314 | pub res_data: RefCell, 315 | pub syn: RefCell>, 316 | next_ast_id: Cell, 317 | ast_id_to_node: RefCell>, 318 | } 319 | 320 | /// HACK: make IR ids and use that instead of AstIds in 321 | /// (V)TyKind::Forall 322 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 323 | pub struct Synthetic { 324 | pub id: AstId, 325 | pub span: Span, 326 | pub ident: Option, 327 | } 328 | 329 | impl Synthetic { 330 | pub fn new(gcx: &GlobalCtxt, span: Span, ident: Option) -> Id { 331 | let id = gcx.arenas.ast.next_ast_id(); 332 | let syn = gcx 333 | .arenas 334 | .ast 335 | .syn 336 | .borrow_mut() 337 | .alloc(Synthetic { id, span, ident }); 338 | gcx.arenas 339 | .ast 340 | .ast_id_to_node 341 | .borrow_mut() 342 | .insert(id, Node::Syn(syn)); 343 | syn 344 | } 345 | } 346 | 347 | impl AstArenas { 348 | pub fn clear(&self) { 349 | self.res_data.borrow_mut().clear(); 350 | self.next_ast_id.replace(1); 351 | self.ast_id_to_node.borrow_mut().clear(); 352 | } 353 | 354 | pub fn expr(&self, id: Id) -> Expr { 355 | self.expr.borrow()[id].clone() 356 | } 357 | 358 | pub fn item(&self, id: Id) -> Item { 359 | self.item.borrow()[id].clone() 360 | } 361 | 362 | pub fn ty(&self, id: Id) -> Ty { 363 | self.ty.borrow()[id].clone() 364 | } 365 | 366 | pub fn syn(&self, id: Id) -> Synthetic { 367 | self.syn.borrow()[id] 368 | } 369 | 370 | pub fn next_ast_id(&self) -> AstId { 371 | let id = self.next_ast_id.get(); 372 | assert!(id < u32::MAX); 373 | self.next_ast_id.replace(id + 1); 374 | AstId::from_raw(id) 375 | } 376 | 377 | pub fn get_node_by_id(&self, id: AstId) -> Option { 378 | self.ast_id_to_node.borrow().get(&id).copied() 379 | } 380 | 381 | pub fn into_iter_nodes(&self) -> impl Iterator { 382 | let v = self.ast_id_to_node.borrow(); 383 | v.values().copied().collect::>().into_iter() 384 | } 385 | 386 | pub(crate) fn insert_node(&self, id: AstId, node: Node) { 387 | self.ast_id_to_node.borrow_mut().insert(id, node); 388 | } 389 | 390 | // pub fn count_binders_between_tys(&self, root_binder: AstId, bound_var: AstId) -> usize { 391 | // let mut binders = 0; 392 | 393 | // let mut parentage = self.parentage.borrow(); 394 | // println!("{:#?}", self); 395 | 396 | // let mut node = bound_var; 397 | // loop { 398 | // println!("count: {:?} {:?}", node, root_binder); 399 | // if node == root_binder { 400 | // break; 401 | // } 402 | 403 | // if let Some(parent) = parentage.scope_map.get(&node) { 404 | // match self.ast_id_to_node.borrow().get(&node) { 405 | // Some(Node::Item(item)) => { /* does not bind types */ } 406 | // Some(Node::Expr(expr)) => { /* does not bind types */ } 407 | // Some(Node::Ty(ty)) => match self.ty(*ty).kind { 408 | // ast::TyKind::Unit => {} 409 | // ast::TyKind::Name(_) => {} 410 | // ast::TyKind::Arrow(_, _) => {} 411 | // ast::TyKind::Forall(_, _) => binders += 1, 412 | // ast::TyKind::Err => {} 413 | // }, 414 | // None => unreachable!(), 415 | // } 416 | 417 | // node = *parent; 418 | // } else { 419 | // panic!("count_binders_between_tys: root_binder was not an ancestor of bound_var"); 420 | // } 421 | // } 422 | 423 | // binders 424 | // } 425 | } 426 | 427 | impl Default for AstArenas { 428 | fn default() -> Self { 429 | Self { 430 | expr: Default::default(), 431 | item: Default::default(), 432 | ty: Default::default(), 433 | syn: Default::default(), 434 | res_data: RefCell::new(ResolutionData::default()), 435 | next_ast_id: Cell::new(1), 436 | ast_id_to_node: RefCell::new(std::collections::HashMap::new()), 437 | } 438 | } 439 | } 440 | -------------------------------------------------------------------------------- /src/ast/pretty.rs: -------------------------------------------------------------------------------- 1 | use id_arena::Id; 2 | use pretty::{DocAllocator, RcAllocator, RcDoc}; 3 | 4 | use crate::ctxt::GlobalCtxt; 5 | 6 | use super::{BinOpKind, Expr, ExprKind, Item, ItemKind, Recursive, Ty, TyKind}; 7 | 8 | const PREC_TY_PRIMARY: usize = 3; 9 | const PREC_TY_ARROW: usize = 2; 10 | const PREC_TY_FORALL: usize = 1; 11 | 12 | fn maybe_paren(x: usize, y: usize, doc: RcDoc<'_>) -> RcDoc<'_> { 13 | if y < x { 14 | RcDoc::text("(").append(doc).append(")").group() 15 | } else { 16 | doc 17 | } 18 | } 19 | 20 | pub fn pp_ty(prec: usize, gcx: &GlobalCtxt, ty: Id) -> RcDoc<'_> { 21 | match gcx.arenas.ast.ty(ty).kind { 22 | TyKind::Unit => RcDoc::text("()"), 23 | TyKind::Name(nm) => RcDoc::text(nm.as_str()), 24 | TyKind::Data(nm, generics) => RcAllocator 25 | .text(nm.as_str()) 26 | .append( 27 | RcAllocator 28 | .intersperse( 29 | generics 30 | .iter() 31 | .copied() 32 | .map(|x| pp_ty(PREC_TY_FORALL, gcx, x)), 33 | RcDoc::text(",").append(RcDoc::space()), 34 | ) 35 | .brackets(), 36 | ) 37 | .into_doc(), 38 | TyKind::Arrow(a, b) => { 39 | let a = pp_ty(PREC_TY_PRIMARY, gcx, a); 40 | let b = pp_ty(PREC_TY_FORALL, gcx, b); 41 | maybe_paren( 42 | prec, 43 | PREC_TY_ARROW, 44 | a.append(RcDoc::line()) 45 | .append("->") 46 | .append(RcDoc::space()) 47 | .append(b), 48 | ) 49 | } 50 | TyKind::Forall(x, a) => { 51 | let a = pp_ty(PREC_TY_FORALL, gcx, a); 52 | maybe_paren( 53 | prec, 54 | PREC_TY_FORALL, 55 | RcAllocator 56 | .text("forall ") 57 | .append(x.as_str()) 58 | .append(".") 59 | .append(RcDoc::line()) 60 | .append(a.group()) 61 | .group() 62 | .align() 63 | .into_doc(), 64 | ) 65 | } 66 | TyKind::Tuple(v) => { 67 | let v_multi = v.iter().copied().map(|x| { 68 | RcAllocator 69 | .nil() 70 | .append(pp_ty(PREC_TY_FORALL, gcx, x)) 71 | .nest(2) 72 | .append(",") 73 | .append(RcDoc::line()) 74 | }); 75 | let v_flat = if v.len() > 1 { 76 | RcAllocator 77 | .intersperse( 78 | v.iter() 79 | .copied() 80 | .map(|x| { 81 | RcAllocator 82 | .nil() 83 | .append(pp_ty(PREC_TY_FORALL, gcx, x)) 84 | .nest(2) 85 | }) 86 | .collect::>(), 87 | RcAllocator.text(",").append(" "), 88 | ) 89 | .parens() 90 | } else { 91 | RcAllocator 92 | .nil() 93 | .append(pp_ty(PREC_TY_FORALL, gcx, v[0]).nest(2)) 94 | .append(",") 95 | .parens() 96 | }; 97 | RcAllocator 98 | .text("(") 99 | .append(RcDoc::line()) 100 | .append(RcAllocator.intersperse(v_multi, RcDoc::nil()).indent(4)) 101 | .append(")") 102 | .flat_alt(v_flat) 103 | .group() 104 | .into_doc() 105 | } 106 | TyKind::Err => RcDoc::text(""), 107 | } 108 | } 109 | 110 | pub const PREC_EXPR_PRIMARY: usize = 150; 111 | pub const PREC_EXPR_TUPLE_PROJ: usize = 145; 112 | pub const PREC_EXPR_APPL: usize = 140; 113 | pub const PREC_EXPR_UNARY: usize = 120; 114 | pub const PREC_EXPR_LAMBDA: usize = 20; 115 | pub const PREC_EXPR_IF: usize = 15; 116 | pub const PREC_EXPR_LET: usize = 10; 117 | 118 | pub fn prec_binop(kind: BinOpKind) -> usize { 119 | match kind { 120 | BinOpKind::LogicalOr => 30, 121 | BinOpKind::LogicalAnd => 40, 122 | BinOpKind::Equal 123 | | BinOpKind::NotEqual 124 | | BinOpKind::LessThan 125 | | BinOpKind::LessEqual 126 | | BinOpKind::GreaterThan 127 | | BinOpKind::GreaterEqual => 50, 128 | BinOpKind::BitOr => 60, 129 | BinOpKind::BitXor => 70, 130 | BinOpKind::BitAnd => 80, 131 | BinOpKind::BitShiftLeft | BinOpKind::BitShiftRight => 90, 132 | BinOpKind::Add | BinOpKind::Subtract => 100, 133 | BinOpKind::Multiply | BinOpKind::Divide | BinOpKind::Modulo => 110, 134 | BinOpKind::Power => 130, 135 | } 136 | } 137 | 138 | pub fn pp_expr(prec: usize, gcx: &GlobalCtxt, expr: Id) -> RcDoc<'_> { 139 | match gcx.arenas.ast.expr(expr).kind { 140 | ExprKind::Unit => RcDoc::text("()"), 141 | ExprKind::Name(n) => RcDoc::text(n.as_str()), 142 | ExprKind::TupleProj(expr, ix) => { 143 | let expr = pp_expr(PREC_EXPR_TUPLE_PROJ, gcx, expr); 144 | maybe_paren( 145 | prec, 146 | PREC_EXPR_TUPLE_PROJ, 147 | expr.append(".").append(ix.to_string()), 148 | ) 149 | } 150 | ExprKind::Tuple(v) => { 151 | let v_multi = v.iter().copied().map(|x| { 152 | RcAllocator 153 | .nil() 154 | .append(pp_expr(PREC_EXPR_LET, gcx, x)) 155 | .nest(2) 156 | .append(",") 157 | .append(RcDoc::line()) 158 | }); 159 | let v_flat = if v.len() > 1 { 160 | RcAllocator 161 | .intersperse( 162 | v.iter() 163 | .copied() 164 | .map(|x| { 165 | RcAllocator 166 | .nil() 167 | .append(pp_expr(PREC_EXPR_LET, gcx, x)) 168 | .nest(2) 169 | }) 170 | .collect::>(), 171 | RcAllocator.text(",").append(" "), 172 | ) 173 | .parens() 174 | } else { 175 | RcAllocator 176 | .nil() 177 | .append(pp_expr(PREC_EXPR_LET, gcx, v[0]).nest(2)) 178 | .append(",") 179 | .parens() 180 | }; 181 | RcAllocator 182 | .text("(") 183 | .append(RcDoc::line()) 184 | .append(RcAllocator.intersperse(v_multi, RcDoc::nil()).indent(4)) 185 | .append(")") 186 | .flat_alt(v_flat) 187 | .group() 188 | .into_doc() 189 | } 190 | ExprKind::Lambda(x, body) => { 191 | let body = pp_expr(PREC_EXPR_LET, gcx, body); 192 | maybe_paren( 193 | prec, 194 | PREC_EXPR_LAMBDA, 195 | RcDoc::text("λ").append(x.as_str()).append(".").append(body), 196 | ) 197 | } 198 | ExprKind::Apply(f, x) => { 199 | let f = pp_expr(PREC_EXPR_APPL, gcx, f); 200 | let x = pp_expr(PREC_EXPR_PRIMARY, gcx, x); 201 | maybe_paren( 202 | prec, 203 | PREC_EXPR_APPL, 204 | (RcAllocator.nil().append(f).append(RcDoc::line())) 205 | .align() 206 | .append(RcAllocator.nil().append(x)) 207 | .align() 208 | .group() 209 | .into_doc(), 210 | ) 211 | } 212 | ExprKind::BinaryOp { left, kind, right } => { 213 | // TODO: precedence 214 | let left = pp_expr(prec_binop(kind), gcx, left); 215 | let right = pp_expr(prec_binop(kind), gcx, right); 216 | maybe_paren( 217 | prec, 218 | prec_binop(kind), 219 | left.append(RcDoc::space()) 220 | .append(match kind { 221 | BinOpKind::LogicalOr => "||", 222 | BinOpKind::LogicalAnd => "&&", 223 | BinOpKind::BitOr => "|", 224 | BinOpKind::BitAnd => "&", 225 | BinOpKind::BitXor => "^", 226 | BinOpKind::Equal => "==", 227 | BinOpKind::NotEqual => "!=", 228 | BinOpKind::LessThan => "<", 229 | BinOpKind::GreaterThan => ">", 230 | BinOpKind::LessEqual => "<=", 231 | BinOpKind::GreaterEqual => ">=", 232 | BinOpKind::BitShiftLeft => "<<", 233 | BinOpKind::BitShiftRight => ">>", 234 | BinOpKind::Add => "+", 235 | BinOpKind::Subtract => "-", 236 | BinOpKind::Multiply => "*", 237 | BinOpKind::Divide => "/", 238 | BinOpKind::Modulo => "%", 239 | BinOpKind::Power => "**", 240 | }) 241 | .append(RcDoc::space()) 242 | .append(right), 243 | ) 244 | } 245 | ExprKind::UnaryMinus(expr) => maybe_paren( 246 | prec, 247 | PREC_EXPR_UNARY, 248 | RcAllocator 249 | .text("-") 250 | .append(pp_expr(PREC_EXPR_UNARY, gcx, expr)) 251 | .into_doc(), 252 | ), 253 | 254 | ExprKind::UnaryNot(expr) => maybe_paren( 255 | prec, 256 | PREC_EXPR_UNARY, 257 | RcAllocator 258 | .text("!") 259 | .append(pp_expr(PREC_EXPR_UNARY, gcx, expr)) 260 | .into_doc(), 261 | ), 262 | ExprKind::If(cond, then, then_else) => { 263 | let cond = pp_expr(PREC_EXPR_LAMBDA, gcx, cond); 264 | let then = pp_expr(PREC_EXPR_LET, gcx, then); 265 | let then_else = pp_expr(PREC_EXPR_LET, gcx, then_else); 266 | 267 | maybe_paren( 268 | prec, 269 | PREC_EXPR_IF, 270 | RcAllocator 271 | .text("if") 272 | .append(RcDoc::space()) 273 | .append(cond) 274 | .append(RcDoc::softline()) 275 | .append( 276 | RcAllocator 277 | .text("then") 278 | .append(RcDoc::space()) 279 | .append(then) 280 | .into_doc(), 281 | ) 282 | .align() 283 | .append(RcDoc::softline()) 284 | .append( 285 | RcAllocator 286 | .text("else") 287 | .append(RcDoc::space()) 288 | .append(then_else) 289 | .into_doc(), 290 | ) 291 | .align() 292 | .into_doc(), 293 | ) 294 | } 295 | ExprKind::Let(x, rec, t, e1, e2) => { 296 | let t = t.map(|t| pp_ty(PREC_TY_FORALL, gcx, t)); 297 | let e1 = pp_expr(PREC_EXPR_LET, gcx, e1); 298 | let e2 = pp_expr(PREC_EXPR_LET, gcx, e2); 299 | let t = match t { 300 | Some(t) => RcAllocator 301 | .text(":") 302 | .append(RcDoc::space()) 303 | .append(RcAllocator.nil().append(t).align().group()) 304 | // TODO: add some kind of heuristic to make this a 305 | // line for more complex inner values (flat_alt?) 306 | .append(RcDoc::softline()) 307 | .append("=") 308 | .append(RcDoc::space()) 309 | .append(e1), 310 | None => RcAllocator.text("=").append(RcDoc::space()).append(e1), 311 | }; 312 | maybe_paren( 313 | prec, 314 | PREC_EXPR_LET, 315 | RcAllocator 316 | .text("let") 317 | .append(if rec != Recursive::NotRecursive { 318 | RcDoc::space().append("rec") 319 | } else { 320 | RcDoc::nil() 321 | }) 322 | .append(RcDoc::space()) 323 | .append(x.as_str()) 324 | .append(RcDoc::space()) 325 | .append(t.align()) 326 | .append(RcDoc::space()) 327 | .append("in") 328 | .append(RcDoc::line()) 329 | .append(e2) 330 | .align() 331 | .into_doc(), 332 | ) 333 | } 334 | ExprKind::Number(n) => RcDoc::text(n.to_string()), 335 | ExprKind::Boolean(b) => RcDoc::text(b.to_string()), 336 | ExprKind::Err => RcDoc::text(""), 337 | } 338 | } 339 | 340 | pub fn pp_item(gcx: &GlobalCtxt, item: Id) -> RcDoc<'_> { 341 | let item = gcx.arenas.ast.item(item); 342 | match item.kind { 343 | ItemKind::Value(t, e) => { 344 | let t = pp_ty(0, gcx, t); 345 | let e = pp_expr(0, gcx, e); 346 | let t = RcAllocator 347 | .text(":") 348 | .append(RcDoc::space()) 349 | .append(RcAllocator.nil().append(t).align().group()) 350 | .append(RcDoc::line()) 351 | .append("=") 352 | .append(RcDoc::space()) 353 | .append(e); 354 | RcAllocator 355 | .text("def") 356 | .append(RcDoc::space()) 357 | .append(item.ident.as_str()) 358 | .append(RcDoc::space()) 359 | .append(t.align()) 360 | .into_doc() 361 | } 362 | ItemKind::TyAlias(t) => { 363 | let t = pp_ty(0, gcx, t); 364 | RcAllocator 365 | .text("type") 366 | .append(RcDoc::space()) 367 | .append(item.ident.as_str()) 368 | .append(RcDoc::space()) 369 | .append("=") 370 | .append(RcDoc::space()) 371 | .append(t) 372 | .into_doc() 373 | } 374 | ItemKind::Enum(generics, cons, _) => { 375 | let doc = if cons.is_empty() { 376 | RcAllocator.text("|") 377 | } else { 378 | let mut doc = RcAllocator.nil().align(); 379 | let mut sep = RcAllocator.nil(); 380 | for (id, tys) in cons { 381 | doc = doc 382 | .append(sep.clone()) 383 | .append(id.as_str()) 384 | .append(RcDoc::space()) 385 | .append(RcAllocator.intersperse( 386 | tys.into_iter().map(|t| pp_ty(PREC_TY_PRIMARY, gcx, t)), 387 | RcDoc::space(), 388 | )); 389 | sep = RcAllocator.line().append("|").append(RcDoc::space()); 390 | } 391 | doc 392 | }; 393 | 394 | RcAllocator 395 | .text("enum") 396 | .append(RcDoc::space()) 397 | .append(item.ident.as_str()) 398 | .append({ 399 | let doc = RcAllocator.intersperse( 400 | generics.iter().copied().map(|x| RcDoc::text(x.as_str())), 401 | RcDoc::text(",").append(RcDoc::space()), 402 | ); 403 | if generics.is_empty() { 404 | RcAllocator.nil() 405 | } else { 406 | doc.brackets() 407 | } 408 | }) 409 | .append(RcDoc::space()) 410 | .append( 411 | RcAllocator 412 | .text("=") 413 | .append(RcDoc::space()) 414 | .append(doc) 415 | .align(), 416 | ) 417 | .into_doc() 418 | } 419 | } 420 | } 421 | -------------------------------------------------------------------------------- /src/bin/debug-lsp/main.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Display; 2 | use std::sync::Arc; 3 | 4 | use calypso_base::symbol::Ident; 5 | use color_eyre::eyre; 6 | 7 | use tethys::ast::{AstId, Node}; 8 | use tethys::ctxt::GlobalCtxt; 9 | use tethys::parse::Span; 10 | use tethys::{parse, resolve}; 11 | use tokio::sync::mpsc::{self, Receiver, Sender}; 12 | use tokio::sync::{Mutex, RwLock}; 13 | use tower_lsp::jsonrpc::Result; 14 | use tower_lsp::jsonrpc::{Error as LspError, ErrorCode}; 15 | use tower_lsp::lsp_types::*; 16 | use tower_lsp::{Client, LanguageServer, LspService, Server}; 17 | use tracing::{debug, trace}; 18 | use tracing_subscriber::EnvFilter; 19 | 20 | #[derive(Debug)] 21 | struct Backend { 22 | client: Client, 23 | file: Arc>>, 24 | tx: Sender, 25 | rx: Arc>>>, 26 | } 27 | 28 | #[derive(Clone, Debug, PartialEq, Eq)] 29 | enum Request { 30 | UpdateFile, 31 | GetAstId(u32), 32 | GetActualSpan(u32), 33 | GetDeclarationOf(AstId), 34 | GetSpanOf(AstId), 35 | GetIdent(AstId), 36 | } 37 | 38 | #[derive(Clone, Debug, PartialEq, Eq)] 39 | enum Response { 40 | Ok, 41 | AstId(AstId), 42 | Span(Span), 43 | Ident(Ident), 44 | NoDeclaration, 45 | } 46 | 47 | impl Response { 48 | pub fn expect_ast_id(self) -> AstId { 49 | if let Response::AstId(ast_id) = self { 50 | ast_id 51 | } else { 52 | panic!("expected Response::AstId") 53 | } 54 | } 55 | 56 | pub fn expect_declaration(self) -> Option { 57 | match self { 58 | Response::AstId(ast_id) => Some(ast_id), 59 | Response::NoDeclaration => None, 60 | _ => panic!("expected Response::AstId or Response::NoDeclaration"), 61 | } 62 | } 63 | 64 | pub fn expect_ok(self) { 65 | if let Response::Ok = self { 66 | } else { 67 | panic!("expected Response::Ok") 68 | } 69 | } 70 | 71 | pub fn expect_ident(self) -> Ident { 72 | if let Response::Ident(ident) = self { 73 | ident 74 | } else { 75 | panic!("expected Response::Ident"); 76 | } 77 | } 78 | 79 | pub fn expect_span(self) -> Span { 80 | if let Response::Span(span) = self { 81 | span 82 | } else { 83 | panic!("expected Response::Span") 84 | } 85 | } 86 | } 87 | 88 | #[tower_lsp::async_trait] 89 | impl LanguageServer for Backend { 90 | async fn initialize(&self, _: InitializeParams) -> Result { 91 | Ok(InitializeResult { 92 | capabilities: ServerCapabilities { 93 | text_document_sync: Some(TextDocumentSyncCapability::Kind( 94 | TextDocumentSyncKind::FULL, 95 | )), 96 | hover_provider: Some(HoverProviderCapability::Simple(true)), 97 | definition_provider: Some(OneOf::Left(true)), 98 | ..Default::default() 99 | }, 100 | server_info: Some(ServerInfo { 101 | name: "Tethys Debugging LSP".to_string(), 102 | version: None, 103 | }), 104 | }) 105 | } 106 | 107 | async fn initialized(&self, _: InitializedParams) { 108 | self.client 109 | .log_message(MessageType::INFO, "server initialized!") 110 | .await; 111 | } 112 | 113 | async fn shutdown(&self) -> Result<()> { 114 | Ok(()) 115 | } 116 | 117 | async fn goto_definition( 118 | &self, 119 | params: GotoDefinitionParams, 120 | ) -> Result> { 121 | let file = self.file.read().await; 122 | if let Some((uri, src)) = file.as_ref() { 123 | if uri == ¶ms.text_document_position_params.text_document.uri { 124 | let offset = offset_of(params.text_document_position_params.position, src); 125 | self.tx.send(Request::GetAstId(offset)).await.unwrap(); 126 | let ast_id = self.rx.lock().await.recv().await.unwrap()?.expect_ast_id(); 127 | self.tx 128 | .send(Request::GetDeclarationOf(ast_id)) 129 | .await 130 | .unwrap(); 131 | let decl_ast_id = self 132 | .rx 133 | .lock() 134 | .await 135 | .recv() 136 | .await 137 | .unwrap()? 138 | .expect_declaration(); 139 | if decl_ast_id.is_none() { 140 | return Ok(None); 141 | } 142 | self.tx 143 | .send(Request::GetIdent(decl_ast_id.unwrap())) 144 | .await 145 | .unwrap(); 146 | let ident = self.rx.lock().await.recv().await.unwrap()?.expect_ident(); 147 | 148 | Ok(Some(GotoDefinitionResponse::Scalar(Location { 149 | uri: uri.clone(), 150 | range: range_of(ident.span.into(), src), 151 | }))) 152 | } else { 153 | Ok(None) 154 | } 155 | } else { 156 | Ok(None) 157 | } 158 | } 159 | 160 | async fn hover(&self, params: HoverParams) -> Result> { 161 | let file = self.file.read().await; 162 | if let Some((uri, src)) = file.as_ref() { 163 | if uri == ¶ms.text_document_position_params.text_document.uri { 164 | let offset = offset_of(params.text_document_position_params.position, src); 165 | self.tx.send(Request::GetAstId(offset)).await.unwrap(); 166 | let ast_id = self.rx.lock().await.recv().await.unwrap()?.expect_ast_id(); 167 | self.tx.send(Request::GetActualSpan(offset)).await.unwrap(); 168 | let span = self.rx.lock().await.recv().await.unwrap()?.expect_span(); 169 | 170 | Ok(Some(Hover { 171 | contents: HoverContents::Scalar(MarkedString::String(format!( 172 | "AST ID: {:?}", 173 | ast_id 174 | ))), 175 | range: Some(range_of(span, src)), 176 | })) 177 | } else { 178 | Ok(Some(Hover { 179 | contents: HoverContents::Scalar(MarkedString::String( 180 | "No content available".to_string(), 181 | )), 182 | range: None, 183 | })) 184 | } 185 | } else { 186 | Ok(Some(Hover { 187 | contents: HoverContents::Scalar(MarkedString::String( 188 | "No content available".to_string(), 189 | )), 190 | range: None, 191 | })) 192 | } 193 | } 194 | 195 | async fn did_open(&self, params: DidOpenTextDocumentParams) { 196 | let mut file = self.file.write().await; 197 | match file.take() { 198 | Some((uri, content)) => { 199 | if uri == params.text_document.uri { 200 | *file = Some((uri, params.text_document.text)); 201 | drop(file); 202 | self.tx.send(Request::UpdateFile).await.unwrap(); 203 | self.rx 204 | .lock() 205 | .await 206 | .recv() 207 | .await 208 | .unwrap() 209 | .unwrap() 210 | .expect_ok(); 211 | } else { 212 | *file = Some((uri, content)) 213 | } 214 | } 215 | None => { 216 | *file = Some((params.text_document.uri, params.text_document.text)); 217 | drop(file); 218 | self.tx.send(Request::UpdateFile).await.unwrap(); 219 | self.rx 220 | .lock() 221 | .await 222 | .recv() 223 | .await 224 | .unwrap() 225 | .unwrap() 226 | .expect_ok(); 227 | } 228 | } 229 | } 230 | } 231 | 232 | fn offset_of(pos: Position, src: &str) -> u32 { 233 | let mut cur_col = 0; 234 | let mut cur_line = 0; 235 | for (offset, ch) in src.char_indices() { 236 | if cur_line == pos.line && cur_col == pos.character { 237 | return offset.try_into().unwrap(); 238 | } 239 | 240 | if ch == '\n' { 241 | cur_line += 1; 242 | cur_col = 0; 243 | } else { 244 | cur_col += 1; 245 | } 246 | } 247 | 0 248 | } 249 | 250 | fn position_of(offset: u32, src: &str) -> Position { 251 | let mut cur_col = 0; 252 | let mut cur_line = 0; 253 | assert!(src.is_char_boundary(offset as usize)); 254 | for (cur_offset, ch) in src.char_indices() { 255 | if cur_offset == offset as usize { 256 | return Position { 257 | line: cur_line, 258 | character: cur_col, 259 | }; 260 | } 261 | 262 | if ch == '\n' { 263 | cur_line += 1; 264 | cur_col = 0; 265 | } else { 266 | cur_col += 1; 267 | } 268 | } 269 | if offset >= src.len() as u32 { 270 | return Position { 271 | line: cur_line, 272 | character: cur_col, 273 | }; 274 | } 275 | Position { 276 | line: 0, 277 | character: 0, 278 | } 279 | } 280 | 281 | fn range_of(span: Span, src: &str) -> Range { 282 | Range { 283 | start: position_of(span.lo(), src), 284 | end: position_of(span.hi(), src), 285 | } 286 | } 287 | 288 | #[tokio::main] 289 | async fn main() -> eyre::Result<()> { 290 | tracing_subscriber::fmt::fmt() 291 | .with_env_filter(EnvFilter::new("tower_lsp=trace,debug_lsp=trace")) 292 | .with_writer(std::io::stderr) 293 | .with_ansi(false) 294 | .init(); 295 | color_eyre::install()?; 296 | 297 | let stdin = tokio::io::stdin(); 298 | let stdout = tokio::io::stdout(); 299 | 300 | let (txreq, rxreq) = mpsc::channel(16); 301 | let (txres, rxres) = mpsc::channel(16); 302 | let file = Arc::new(RwLock::new(None)); 303 | let file_worker = Arc::clone(&file); 304 | std::thread::spawn(|| worker_thread(file_worker, rxreq, txres)); 305 | 306 | let (service, socket) = LspService::new(|client| Backend { 307 | client, 308 | file, 309 | tx: txreq, 310 | rx: Arc::new(Mutex::new(rxres)), 311 | }); 312 | Server::new(stdin, stdout, socket).serve(service).await; 313 | 314 | Ok(()) 315 | } 316 | 317 | fn worker_thread( 318 | file: Arc>>, 319 | mut rx: Receiver, 320 | tx: Sender>, 321 | ) { 322 | let gcx = GlobalCtxt::new(); 323 | let mut items = Vec::new(); 324 | while let Some(req) = rx.blocking_recv() { 325 | match req { 326 | Request::UpdateFile => { 327 | items.clear(); 328 | gcx.clear(); 329 | let file_guard = file.blocking_read(); 330 | let (url, src) = file_guard.as_ref().unwrap(); 331 | items = parse::run(src.as_ref(), &gcx); 332 | match resolve::resolve_code_unit(&gcx, &items) { 333 | Err(err) => tx.blocking_send(Err(stringify_error(err))).unwrap(), 334 | Ok(_) => tx.blocking_send(Ok(Response::Ok)).unwrap(), 335 | } 336 | trace!("UpdateFile({}): done", url); 337 | } 338 | Request::GetAstId(offset) => match get_node(&gcx, offset) { 339 | Some(node) => { 340 | let id = node.id(&gcx); 341 | debug!("GetAstId({:?}): {:?}", offset, id); 342 | tx.blocking_send(Ok(Response::AstId(id))).unwrap(); 343 | } 344 | None => { 345 | debug!("GetAstId({:?}): none found", offset); 346 | tx.blocking_send(Err(stringify_error( 347 | "Failed to find a node containing the given offset", 348 | ))) 349 | .unwrap() 350 | } 351 | }, 352 | Request::GetActualSpan(offset) => match get_node(&gcx, offset) { 353 | Some(node) => { 354 | let span = node.span(&gcx); 355 | debug!("GetActualSpan({:?}): {:?}", offset, span); 356 | tx.blocking_send(Ok(Response::Span(span))).unwrap(); 357 | } 358 | None => { 359 | debug!("GetActualSpan({:?}): none found", offset); 360 | tx.blocking_send(Err(stringify_error( 361 | "Failed to find a node containing the given offset", 362 | ))) 363 | .unwrap() 364 | } 365 | }, 366 | Request::GetDeclarationOf(ast_id) => { 367 | if let Some(res) = gcx.arenas.ast.res_data.borrow().get_by_id(ast_id) { 368 | let id = res.id(); 369 | if let Some(id) = id { 370 | debug!("GetDeclarationOf({:?}: {:?}", ast_id, id); 371 | tx.blocking_send(Ok(Response::AstId(id))).unwrap(); 372 | } else { 373 | debug!( 374 | "GetDeclarationOf({:?}): declaration found ({:?}), but it didn't have an id", 375 | ast_id, id 376 | ); 377 | tx.blocking_send(Ok(Response::NoDeclaration)).unwrap(); 378 | } 379 | } else { 380 | debug!("GetDeclarationOf({:?}): none found", ast_id); 381 | tx.blocking_send(Ok(Response::NoDeclaration)).unwrap(); 382 | } 383 | } 384 | Request::GetSpanOf(ast_id) => { 385 | if let Some(node) = gcx.arenas.ast.get_node_by_id(ast_id) { 386 | let span = node.span(&gcx); 387 | debug!("GetSpanOf({:?}): {:?}", ast_id, span); 388 | tx.blocking_send(Ok(Response::Span(span))).unwrap(); 389 | } else { 390 | debug!("GetSpanOf({:?}): none found", ast_id); 391 | tx.blocking_send(Err(stringify_error( 392 | "Failed to find a node by the given AST ID", 393 | ))) 394 | .unwrap(); 395 | } 396 | } 397 | Request::GetIdent(ast_id) => { 398 | if let Some(node) = gcx.arenas.ast.get_node_by_id(ast_id) { 399 | let ident = node.ident(&gcx); 400 | debug!("GetSymbol({:?}): {:?}", ast_id, ident); 401 | let res = match ident { 402 | Some(ident) => Ok(Response::Ident(ident)), 403 | None => Err(stringify_error( 404 | "Failed to find a symbol for the given AST ID", 405 | )), 406 | }; 407 | tx.blocking_send(res).unwrap(); 408 | } else { 409 | debug!("GetSpanOf({:?}): none found", ast_id); 410 | tx.blocking_send(Err(stringify_error( 411 | "Failed to find a node by the given AST ID", 412 | ))) 413 | .unwrap(); 414 | } 415 | } 416 | } 417 | } 418 | } 419 | 420 | /// Find the node with the smallest span that includes a given binary offset. 421 | fn get_node(gcx: &GlobalCtxt, offset: u32) -> Option { 422 | gcx.arenas 423 | .ast 424 | .into_iter_nodes() 425 | .filter(|node| { 426 | let span = node.span(gcx); 427 | 428 | span.lo() <= offset && offset <= span.hi() 429 | }) 430 | .min_by_key(|node| node.span(gcx).len()) 431 | } 432 | 433 | fn stringify_error(e: E) -> LspError { 434 | LspError { 435 | code: ErrorCode::ServerError(0), 436 | data: None, 437 | message: e.to_string(), 438 | } 439 | } 440 | -------------------------------------------------------------------------------- /src/bin/tethys.rs: -------------------------------------------------------------------------------- 1 | use std::{env, fs}; 2 | 3 | use tethys::ctxt::GlobalCtxt; 4 | 5 | use color_eyre::eyre; 6 | 7 | fn main() -> eyre::Result<()> { 8 | tracing_subscriber::fmt::init(); 9 | color_eyre::install()?; 10 | let src = fs::read_to_string(env::args().nth(1).expect("Expected file argument")) 11 | .expect("Failed to read file"); 12 | 13 | let gcx = GlobalCtxt::default(); 14 | let _ = tethys::run(&src, &gcx, false); 15 | 16 | Ok(()) 17 | } 18 | -------------------------------------------------------------------------------- /src/bin/uitest/header.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | #[derive(Clone, Debug, Deserialize, PartialEq, Eq)] 4 | pub struct UitestHeader { 5 | /// The test's description. 6 | pub desc: String, 7 | /// Which mode should this test be run in? 8 | pub mode: UitestMode, 9 | /// Which diagnostics should we expect? 10 | #[serde(default)] 11 | pub diags: Vec, 12 | } 13 | 14 | #[derive(Copy, Clone, Debug, Deserialize, PartialEq, Eq)] 15 | #[serde(rename_all = "kebab-case")] 16 | pub enum UitestMode { 17 | /// Typechecking this file should pass. 18 | CheckPass, 19 | /// This file should be typechecked and errors should be collected. 20 | Diag, 21 | } 22 | 23 | #[derive(Clone, Debug, Deserialize, PartialEq, Eq)] 24 | pub struct UitestDiag { 25 | pub kind: UitestDiagKind, 26 | pub message: String, 27 | #[serde(default)] 28 | pub labels: Vec, 29 | } 30 | 31 | #[derive(Clone, Debug, Deserialize, PartialEq, Eq)] 32 | pub struct UitestLabel { 33 | pub loc: String, 34 | pub message: String, 35 | } 36 | 37 | #[derive(Clone, Debug, Deserialize, PartialEq, Eq)] 38 | #[serde(rename_all = "kebab-case")] 39 | pub enum UitestDiagKind { 40 | Error, 41 | } 42 | -------------------------------------------------------------------------------- /src/bin/uitest/main.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use eyre::{bail, eyre, Context}; 3 | use header::UitestHeader; 4 | use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; 5 | use runners::run_test; 6 | use std::path::PathBuf; 7 | use std::{fs, thread}; 8 | use walkdir::WalkDir; 9 | 10 | pub mod header; 11 | pub mod runners; 12 | 13 | #[derive(Clone, Debug, Parser, PartialEq, Eq)] 14 | /// The UI testing framework used in Tethys. 15 | struct Args { 16 | #[clap(short = 'v')] 17 | #[clap(long = "verbose")] 18 | verbose: bool, 19 | #[clap(required = true)] 20 | tests: Vec, 21 | } 22 | 23 | fn main() -> eyre::Result<()> { 24 | tracing_subscriber::fmt::init(); 25 | color_eyre::install()?; 26 | 27 | let args = Args::parse(); 28 | let mut tests = vec![]; 29 | for path in args.tests { 30 | if !path.exists() { 31 | bail!("Test directory or file does not exist: {}", path.display()); 32 | } 33 | if path.is_dir() { 34 | for entry in WalkDir::new(path).sort_by_file_name() { 35 | let entry = entry.wrap_err(eyre!("While searching for tests"))?; 36 | if entry.file_type().is_file() { 37 | let path = entry.path(); 38 | if path.extension().and_then(|s| s.to_str()).unwrap_or("") == "tys" { 39 | tests.push(path.to_path_buf()); 40 | } 41 | } 42 | } 43 | } else if path.is_file() { 44 | tests.push(path); 45 | } else { 46 | bail!( 47 | "Test path was neither a file nor a directory: {}", 48 | path.display() 49 | ); 50 | } 51 | } 52 | 53 | let tests = tests 54 | .into_iter() 55 | .map(|path| { 56 | fs::read_to_string(&path) 57 | .wrap_err(eyre!("While reading test: `{}`", path.display())) 58 | .map(|x| (path.to_str().unwrap().to_string(), x)) 59 | }) 60 | .collect::, _>>()? 61 | .into_iter() 62 | .map(|(path, contents)| { 63 | let (header, code): (Vec<&str>, Vec<&str>) = 64 | contents.lines().partition(|&x| x.starts_with('#')); 65 | let header = header 66 | .into_iter() 67 | .map(|line| line.strip_prefix("# ").unwrap()) 68 | .collect::>() 69 | .join("\n"); 70 | let code = code.join("\n"); 71 | if header.is_empty() { 72 | bail!("Test `{}` does not have a header", &path); 73 | } 74 | let header = serde_yaml::from_str::(&header) 75 | .wrap_err(eyre!("While parsing test header of `{}`", &path)); 76 | header.map(|x| (path, x, code)) 77 | }) 78 | .collect::, _>>()?; 79 | 80 | todo!(); 81 | 82 | let pb = MultiProgress::new(); 83 | let total_pb = pb.add( 84 | ProgressBar::new(tests.len() as u64).with_style( 85 | ProgressStyle::default_bar() 86 | .template("[{bar}] {pos:>3}/{len:3} Elapsed: {elapsed} ETA: {eta}") 87 | .progress_chars(".. "), 88 | ), 89 | ); 90 | let fail_spin = pb.add( 91 | ProgressBar::new_spinner() 92 | .with_style(ProgressStyle::default_spinner().tick_chars(r#"|/-\"#)), 93 | ); 94 | thread::spawn(move || { 95 | pb.join().unwrap(); 96 | }); 97 | let mut failures = vec![]; 98 | for (path, header, code) in tests { 99 | let failure = run_test(&path, header, code); 100 | total_pb.inc(1); 101 | if let Some(failure) = failure { 102 | failures.push((path, failure)); 103 | fail_spin.set_message(format!("{} failures", failures.len())); 104 | } 105 | } 106 | fail_spin.finish_and_clear(); 107 | 108 | Ok(()) 109 | } 110 | -------------------------------------------------------------------------------- /src/bin/uitest/runners.rs: -------------------------------------------------------------------------------- 1 | use tethys::ctxt::GlobalCtxt; 2 | 3 | use crate::header::{UitestHeader, UitestMode}; 4 | 5 | pub fn run_test(path: &str, header: UitestHeader, code: String) -> Option> { 6 | let gcx = GlobalCtxt::default(); 7 | header.run(&gcx, code); 8 | header.verify(&gcx) 9 | } 10 | 11 | impl UitestHeader { 12 | fn run(&self, gcx: &GlobalCtxt, code: String) { 13 | match self.mode { 14 | // Check-pass and diag both just run typechecking, but the verification is different 15 | UitestMode::CheckPass | UitestMode::Diag => { 16 | let _ = tethys::run(&code, gcx, false); 17 | } 18 | } 19 | } 20 | 21 | fn verify(&self, _gcx: &GlobalCtxt) -> Option> { 22 | todo!() 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/codegen/closure.rs: -------------------------------------------------------------------------------- 1 | //! Closure conversion (and lambda lifting) for lambda values. 2 | //! 3 | //! Converts expressions such as `λx.λy.λz.x+y+z+a` into (ML-like 4 | //! syntax) `let f a x y z = x+y+z+a in (f a)` where `f a` is a struct 5 | //! containing a code pointer and a list of already-applied arguments, 6 | //! and `a` is a free value used by the lambda. 7 | //! 8 | //! A more complex example is as follows, using ML-like syntax: 9 | //! ``` 10 | //! let incr a xs = map (\x. x + a) xs 11 | //! ``` 12 | //! becomes 13 | //! ``` 14 | //! let aux a x = x + a 15 | //! let incr a xs = map (aux a) xs 16 | //! ``` 17 | //! 18 | //! With even more free variables: 19 | //! ``` 20 | //! let incr2 a b xs = map (\x. x + a + b) xs 21 | //! ``` 22 | //! becomes 23 | //! ``` 24 | //! let aux a b xs = x + a + b 25 | //! let incr2 a b xs = map (aux a b) xs 26 | //! ``` 27 | //! 28 | //! Frankly, I'll be honest: I don't entirely know why this works. But 29 | //! it does, so I'm not gonna prod at it cause that will only bring 30 | //! pain. 31 | 32 | use std::collections::HashMap; 33 | 34 | use id_arena::Id; 35 | 36 | use crate::{ 37 | ast::Recursive, 38 | ctxt::GlobalCtxt, 39 | parse::Span, 40 | typeck::{ 41 | ast::{CoreAstId, DeBruijnIdx, DeBruijnLvl, Expr, ExprKind}, 42 | norm::{self, VTy, VTyKind}, 43 | pretty::pp_ty, 44 | }, 45 | }; 46 | 47 | struct ConvCtxt { 48 | lifted: Vec>, 49 | scopes: Vec, 50 | env: Vec, 51 | letrec: HashMap, 52 | } 53 | 54 | impl ConvCtxt {} 55 | 56 | pub fn closure_convert(gcx: &GlobalCtxt, term: Id) -> (Vec>, Id) { 57 | let mut ctx = ConvCtxt { 58 | lifted: vec![], 59 | env: vec![], 60 | scopes: vec![], 61 | letrec: HashMap::new(), 62 | }; 63 | 64 | let res = convert(gcx, &mut ctx, term, 0, Mode::Root); 65 | (ctx.lifted, res) 66 | } 67 | 68 | #[derive(Clone, Debug, Default, PartialEq, Eq)] 69 | struct Scope { 70 | offset: usize, 71 | len: usize, 72 | free: im::Vector, 73 | } 74 | 75 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 76 | enum Mode { 77 | Collect, 78 | Traverse, 79 | Root, 80 | LetRec(CoreAstId), 81 | } 82 | 83 | // TODO: this could be made way simpler if I didn't unfold lambdas 84 | 85 | #[allow(clippy::only_used_in_recursion)] 86 | fn convert( 87 | gcx: &GlobalCtxt, 88 | ctx: &mut ConvCtxt, 89 | term: Id, 90 | scope_ix: usize, 91 | mode: Mode, 92 | ) -> Id { 93 | let sp = gcx.arenas.core.expr(term).span; 94 | let id = gcx.arenas.core.expr(term).id; 95 | let ty = gcx.arenas.core.ty_of_expr(id); 96 | match gcx.arenas.core.expr(term).kind { 97 | ExprKind::Var(id) => { 98 | if let Some(new_id) = ctx.letrec.get(&id) { 99 | Expr::new( 100 | gcx, 101 | gcx.arenas.core.next_id(), 102 | ExprKind::LiftedLamRef(*new_id), 103 | sp, 104 | Some(ty), 105 | ) 106 | } else { 107 | let scope = &mut ctx.scopes[scope_ix]; 108 | let lvl = ctx 109 | .env 110 | .iter() 111 | .copied() 112 | .enumerate() 113 | .find_map(|(ix, x)| (x == id).then_some(ix)) 114 | .unwrap(); 115 | if lvl < scope.offset { 116 | let i = ctx.env[lvl]; 117 | scope.free.push_back(i); 118 | Expr::new( 119 | gcx, 120 | gcx.arenas.core.next_id(), 121 | ExprKind::LiftedFree(id), 122 | sp, 123 | Some(ty), 124 | ) 125 | } else { 126 | Expr::new( 127 | gcx, 128 | gcx.arenas.core.next_id(), 129 | ExprKind::LiftedVar(id), 130 | sp, 131 | Some(ty), 132 | ) 133 | } 134 | } 135 | } 136 | ExprKind::Lam(i, _, body) if mode == Mode::Collect => { 137 | let scope = &mut ctx.scopes[scope_ix]; 138 | scope.len += 1; 139 | ctx.env.push(i); 140 | 141 | convert(gcx, ctx, body, scope_ix, Mode::Collect) 142 | } 143 | ExprKind::Lam(i, _, body) 144 | if mode == Mode::Traverse || mode == Mode::Root || matches!(mode, Mode::LetRec(..)) => 145 | { 146 | let mut scope = Scope { 147 | offset: ctx.env.len(), 148 | ..Scope::default() 149 | }; 150 | scope.len += 1; 151 | ctx.env.push(i); 152 | ctx.scopes.push(scope); 153 | 154 | let next_scope_ix = if ctx.scopes.len() == 1 { 155 | scope_ix 156 | } else { 157 | scope_ix + 1 158 | }; 159 | 160 | let res = convert(gcx, ctx, body, next_scope_ix, Mode::Collect); 161 | 162 | let vars = &ctx.env[ctx.scopes[next_scope_ix].offset 163 | ..ctx.scopes[next_scope_ix].offset + ctx.scopes[next_scope_ix].len]; 164 | let ids = ctx.scopes[next_scope_ix] 165 | .free 166 | .iter() 167 | .chain(vars.iter()) 168 | .copied() 169 | .collect(); 170 | let base_ty = gcx.arenas.core.ty_of_expr(vars[0]); 171 | let ty = ctx.scopes[next_scope_ix] 172 | .free 173 | .iter() 174 | .fold(base_ty, |acc, x| { 175 | let VTyKind::Arrow(a, _) = 176 | gcx.arenas.tyck.vty(gcx.arenas.core.ty_of_expr(*x)).kind 177 | else { 178 | unreachable!() 179 | }; 180 | 181 | VTy::new( 182 | gcx, 183 | gcx.arenas.core.next_id(), 184 | VTyKind::Arrow(a, acc), 185 | Span((0..0).into()), 186 | ) 187 | }); 188 | println!( 189 | "base ty: {}", 190 | pp_ty( 191 | 0, 192 | gcx, 193 | 0usize.into(), 194 | im::Vector::new(), 195 | norm::quote_ty(gcx, 0usize.into(), base_ty) 196 | ) 197 | .group() 198 | .pretty(80) 199 | ); 200 | println!( 201 | "accum ty: {}", 202 | pp_ty( 203 | 0, 204 | gcx, 205 | 0usize.into(), 206 | im::Vector::new(), 207 | norm::quote_ty(gcx, 0usize.into(), ty) 208 | ) 209 | .group() 210 | .pretty(80) 211 | ); 212 | let new_id = if let Mode::LetRec(id) = mode { 213 | id 214 | } else { 215 | gcx.arenas.core.next_id() 216 | }; 217 | let res = Expr::new(gcx, new_id, ExprKind::LiftedLam(ids, res), sp, Some(ty)); 218 | if mode != Mode::Root { 219 | ctx.lifted.push(res); 220 | } 221 | let res_ref = if mode == Mode::Root { 222 | res 223 | } else { 224 | Expr::new( 225 | gcx, 226 | gcx.arenas.core.next_id(), 227 | ExprKind::LiftedLamRef(new_id), 228 | sp, 229 | Some(ty), 230 | ) 231 | }; 232 | 233 | let xs = ctx.scopes[next_scope_ix] 234 | .free 235 | .clone() 236 | .iter() 237 | .map(|id| { 238 | let mut free_ix = None; 239 | let mut free_scope_ix = None; 240 | 241 | for (scope_ix, scope) in ctx.scopes.iter().take(next_scope_ix).enumerate() { 242 | if let Some(x) = ctx.env[scope.offset..scope.offset + scope.len] 243 | .iter() 244 | .rev() 245 | .enumerate() 246 | .find_map(|(x, i)| (i == id).then_some(x)) 247 | { 248 | free_ix = Some(x); 249 | free_scope_ix = Some(scope_ix); 250 | }; 251 | } 252 | 253 | if ctx.scopes.len() != 1 { 254 | for scope in free_scope_ix.unwrap() + 1..next_scope_ix { 255 | let scope_offset = ctx.scopes[scope].offset; 256 | let scope_len = ctx.scopes[scope].len; 257 | ctx.scopes[scope].free.push_back( 258 | ctx.env[ctx.env.len() + scope_len 259 | - free_ix.unwrap() 260 | - scope_offset 261 | - 1], 262 | ); 263 | } 264 | } 265 | 266 | let VTyKind::Arrow(a, _) = 267 | gcx.arenas.tyck.vty(gcx.arenas.core.ty_of_expr(*id)).kind 268 | else { 269 | unreachable!() 270 | }; 271 | 272 | Expr::new( 273 | gcx, 274 | gcx.arenas.core.next_id(), 275 | ExprKind::LiftedVar(*id), 276 | sp, 277 | Some(a), 278 | ) 279 | }) 280 | .collect::>(); 281 | 282 | let res = if xs.is_empty() { 283 | res_ref 284 | } else { 285 | Expr::new( 286 | gcx, 287 | gcx.arenas.core.next_id(), 288 | ExprKind::LiftedApp(res_ref, xs), 289 | sp, 290 | Some(base_ty), 291 | ) 292 | }; 293 | 294 | ctx.env 295 | .truncate(ctx.env.len() - ctx.scopes[next_scope_ix].len); 296 | ctx.scopes.pop(); 297 | 298 | res 299 | } 300 | ExprKind::App(f, x) => { 301 | let f = convert(gcx, ctx, f, scope_ix, Mode::Traverse); 302 | let x = convert(gcx, ctx, x, scope_ix, Mode::Traverse); 303 | Expr::new( 304 | gcx, 305 | gcx.arenas.core.next_id(), 306 | ExprKind::App(f, x), 307 | sp, 308 | Some(ty), 309 | ) 310 | } 311 | ExprKind::TyApp(e, _) | ExprKind::TyAbs(_, _, e) => { 312 | convert(gcx, ctx, e, scope_ix, Mode::Traverse) 313 | } 314 | // TODO: lower `let`s elsewhere? 315 | ExprKind::Let(x, i, Recursive::NotRecursive, t, e1, e2) => { 316 | let e1 = convert(gcx, ctx, e1, scope_ix, Mode::Traverse); 317 | 318 | let mut scope = Scope { 319 | offset: ctx.env.len(), 320 | ..Scope::default() 321 | }; 322 | scope.len += 1; 323 | ctx.env.push(x); 324 | ctx.scopes.push(scope); 325 | 326 | let e2 = convert(gcx, ctx, e2, scope_ix, Mode::Traverse); 327 | ctx.scopes.pop(); 328 | ctx.env.pop(); 329 | Expr::new( 330 | gcx, 331 | gcx.arenas.core.next_id(), 332 | ExprKind::Let(x, i, Recursive::NotRecursive, t, e1, e2), 333 | sp, 334 | Some(ty), 335 | ) 336 | } 337 | ExprKind::Let(x, i, Recursive::Recursive(sp), t, e1, e2) => { 338 | let new_id = gcx.arenas.core.next_id(); 339 | ctx.letrec.insert(x, new_id); 340 | let e1 = convert(gcx, ctx, e1, scope_ix, Mode::LetRec(new_id)); 341 | convert(gcx, ctx, e2, scope_ix, Mode::Traverse) 342 | } 343 | 344 | ExprKind::BinaryOp { left, kind, right } => { 345 | let left = convert(gcx, ctx, left, scope_ix, Mode::Traverse); 346 | let right = convert(gcx, ctx, right, scope_ix, Mode::Traverse); 347 | Expr::new( 348 | gcx, 349 | gcx.arenas.core.next_id(), 350 | ExprKind::BinaryOp { left, kind, right }, 351 | sp, 352 | Some(ty), 353 | ) 354 | } 355 | ExprKind::If(cond, then, then_else) => { 356 | let cond = convert(gcx, ctx, cond, scope_ix, Mode::Traverse); 357 | let then = convert(gcx, ctx, then, scope_ix, Mode::Traverse); 358 | let then_else = convert(gcx, ctx, then_else, scope_ix, Mode::Traverse); 359 | Expr::new( 360 | gcx, 361 | gcx.arenas.core.next_id(), 362 | ExprKind::If(cond, then, then_else), 363 | sp, 364 | Some(ty), 365 | ) 366 | } 367 | ExprKind::Tuple(v) => Expr::new( 368 | gcx, 369 | gcx.arenas.core.next_id(), 370 | ExprKind::Tuple( 371 | v.into_iter() 372 | .map(|x| convert(gcx, ctx, x, scope_ix, Mode::Traverse)) 373 | .collect(), 374 | ), 375 | sp, 376 | Some(ty), 377 | ), 378 | ExprKind::TupleProj(x, n) => { 379 | let x = convert(gcx, ctx, x, scope_ix, Mode::Traverse); 380 | Expr::new( 381 | gcx, 382 | gcx.arenas.core.next_id(), 383 | ExprKind::TupleProj(x, n), 384 | sp, 385 | Some(ty), 386 | ) 387 | } 388 | // Free with respect to the global context. These are 389 | // represented as global values, so we don't need to worry 390 | // about lifting them 391 | ExprKind::Free(_) => term, 392 | _ => term, 393 | } 394 | } 395 | -------------------------------------------------------------------------------- /src/codegen/ir.rs: -------------------------------------------------------------------------------- 1 | use cranelift_entity::{entity_impl, PrimaryMap}; 2 | use id_arena::Id; 3 | use im::vector; 4 | use spinneret::encoder::ValType; 5 | 6 | use crate::{ 7 | ast::{AstId, BinOpKind, PrimTy, Recursive}, 8 | ctxt::GlobalCtxt, 9 | typeck::{ 10 | ast::{self as core, CoreAstId, DeBruijnIdx, DeBruijnLvl}, 11 | norm::{self}, 12 | }, 13 | }; 14 | 15 | use super::llvm::{Context, Type}; 16 | 17 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 18 | pub struct Expr(u32); 19 | entity_impl!(Expr); 20 | 21 | #[derive(Clone, Debug)] 22 | pub struct ExprData { 23 | pub ty: Ty, 24 | pub kind: ExprKind, 25 | } 26 | 27 | #[derive(Clone, Debug)] 28 | pub enum ExprKind { 29 | Unit, 30 | LiftedVar(CoreAstId), 31 | LiftedFree(CoreAstId), 32 | LiftedLamRef(CoreAstId), 33 | Lam(im::Vector, Expr), 34 | App(Expr, im::Vector), 35 | Free(AstId), 36 | // TODO: do I need these? 37 | // TyApp(Expr, im::Vector), 38 | // TyAbs(im::Vector, Expr), 39 | EnumConstructor(AstId, usize), 40 | EnumRecursor(AstId), 41 | Number(i64), 42 | BinaryOp { 43 | left: Expr, 44 | kind: BinOpKind, 45 | right: Expr, 46 | }, 47 | Boolean(bool), 48 | If(Expr, Expr, Expr), 49 | Let(CoreAstId, Recursive, Expr, Expr), 50 | Tuple(im::Vector), 51 | TupleProj(Expr, u64), 52 | } 53 | 54 | impl Expr { 55 | pub fn new(cgir: &mut CgIrArenas, ty: Ty, kind: ExprKind) -> Self { 56 | cgir.exprs.push(ExprData { ty, kind }) 57 | } 58 | 59 | pub fn from_core(gcx: &GlobalCtxt, cgir: &mut CgIrArenas, expr: Id) -> Self { 60 | let ty = gcx.arenas.core.ty_of_expr(gcx.arenas.core.expr(expr).id); 61 | let ty = norm::quote_ty(gcx, DeBruijnLvl::from(0usize), ty); 62 | let ty = Ty::from_core(gcx, cgir, ty); 63 | match gcx.arenas.core.expr(expr).kind { 64 | core::ExprKind::Unit => Expr::new(cgir, ty, ExprKind::Unit), 65 | core::ExprKind::Var(_) => unreachable!(), 66 | core::ExprKind::LiftedVar(ix) => Expr::new(cgir, ty, ExprKind::LiftedVar(ix)), 67 | core::ExprKind::LiftedFree(lvl) => Expr::new(cgir, ty, ExprKind::LiftedFree(lvl)), 68 | core::ExprKind::Lam(_, _, _) => unimplemented!(), 69 | core::ExprKind::LiftedLam(ids, e) => { 70 | let e = Expr::from_core(gcx, cgir, e); 71 | Expr::new(cgir, ty, ExprKind::Lam(ids, e)) 72 | } 73 | core::ExprKind::LiftedLamRef(id) => Expr::new(cgir, ty, ExprKind::LiftedLamRef(id)), 74 | core::ExprKind::LiftedApp(e, es) => { 75 | let e = Expr::from_core(gcx, cgir, e); 76 | let es = es 77 | .into_iter() 78 | .map(|x| Expr::from_core(gcx, cgir, x)) 79 | .collect(); 80 | Expr::new(cgir, ty, ExprKind::App(e, es)) 81 | } 82 | core::ExprKind::App(mut head, v) => { 83 | let mut vec = vector![Expr::from_core(gcx, cgir, v)]; 84 | while let core::ExprKind::App(x, v) = gcx.arenas.core.expr(head).kind { 85 | vec.push_front(Expr::from_core(gcx, cgir, v)); 86 | head = x; 87 | } 88 | let head = Expr::from_core(gcx, cgir, head); 89 | Expr::new(cgir, ty, ExprKind::App(head, vec)) 90 | } 91 | core::ExprKind::TyApp(e, _) => Expr::from_core(gcx, cgir, e), 92 | core::ExprKind::Let(x, _, rec, _, e1, e2) => { 93 | let e1 = Expr::from_core(gcx, cgir, e1); 94 | let e2 = Expr::from_core(gcx, cgir, e2); 95 | Expr::new(cgir, ty, ExprKind::Let(x, rec, e1, e2)) 96 | } 97 | core::ExprKind::TyAbs(_, _, e) => Expr::from_core(gcx, cgir, e), 98 | core::ExprKind::Free(id) => Expr::new(cgir, ty, ExprKind::Free(id)), 99 | core::ExprKind::EnumConstructor(id, ix) => { 100 | Expr::new(cgir, ty, ExprKind::EnumConstructor(id, ix)) 101 | } 102 | core::ExprKind::EnumRecursor(id) => Expr::new(cgir, ty, ExprKind::EnumRecursor(id)), 103 | core::ExprKind::Number(n) => Expr::new(cgir, ty, ExprKind::Number(n)), 104 | core::ExprKind::BinaryOp { left, kind, right } => { 105 | let left = Expr::from_core(gcx, cgir, left); 106 | let right = Expr::from_core(gcx, cgir, right); 107 | Expr::new(cgir, ty, ExprKind::BinaryOp { left, kind, right }) 108 | } 109 | core::ExprKind::Boolean(b) => Expr::new(cgir, ty, ExprKind::Boolean(b)), 110 | core::ExprKind::Err(_) => unimplemented!(), 111 | core::ExprKind::If(cond, then, then_else) => { 112 | let cond = Expr::from_core(gcx, cgir, cond); 113 | let then = Expr::from_core(gcx, cgir, then); 114 | let then_else = Expr::from_core(gcx, cgir, then_else); 115 | Expr::new(cgir, ty, ExprKind::If(cond, then, then_else)) 116 | } 117 | core::ExprKind::Tuple(es) => { 118 | let es = es 119 | .into_iter() 120 | .map(|x| Expr::from_core(gcx, cgir, x)) 121 | .collect(); 122 | Expr::new(cgir, ty, ExprKind::Tuple(es)) 123 | } 124 | core::ExprKind::TupleProj(e, ix) => { 125 | let e = Expr::from_core(gcx, cgir, e); 126 | Expr::new(cgir, ty, ExprKind::TupleProj(e, ix)) 127 | } 128 | } 129 | } 130 | } 131 | 132 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 133 | pub struct Ty(u32); 134 | entity_impl!(Ty); 135 | 136 | #[derive(Clone, Debug)] 137 | pub struct TyData { 138 | pub kind: TyKind, 139 | } 140 | 141 | #[derive(Clone, Debug)] 142 | pub enum TyKind { 143 | Unit, 144 | Primitive(PrimTy), 145 | Var(CoreAstId, DeBruijnIdx), 146 | Arrow(im::Vector, Ty), 147 | Forall(CoreAstId, Ty), 148 | Free(AstId), 149 | Enum(AstId, im::Vector), 150 | Tuple(im::Vector), 151 | } 152 | 153 | #[derive(Debug, Default)] 154 | pub struct CgIrArenas { 155 | pub exprs: PrimaryMap, 156 | pub tys: PrimaryMap, 157 | pub unit: Option, 158 | } 159 | 160 | impl Ty { 161 | #[inline] 162 | pub fn from_core(gcx: &GlobalCtxt, cgir: &mut CgIrArenas, ty: Id) -> Self { 163 | ty_from_core_inner(gcx, cgir, ty) 164 | } 165 | 166 | pub fn new(cgir: &mut CgIrArenas, kind: TyKind) -> Self { 167 | cgir.tys.push(TyData { kind }) 168 | } 169 | 170 | /// N.B. this function only checks for outer monotypes. 171 | /// Higher-rank function inputs may still be present. Use this in 172 | /// combination with [`Self::is_higher_rank`] to check for full 173 | /// monotypes. 174 | pub fn is_monotype(self, cgir: &CgIrArenas) -> bool { 175 | !matches!(&cgir.tys[self].kind, TyKind::Forall(..)) 176 | } 177 | 178 | pub fn is_higher_rank(self, cgir: &CgIrArenas) -> bool { 179 | fn inner(cgir: &CgIrArenas, this: Ty, outer: bool) -> bool { 180 | match &cgir.tys[this].kind { 181 | TyKind::Unit | TyKind::Primitive(..) | TyKind::Var(..) | TyKind::Free(..) => false, 182 | TyKind::Arrow(a, b) => { 183 | a.iter().any(|x| inner(cgir, *x, false)) || inner(cgir, *b, false) 184 | } 185 | TyKind::Forall(_, b) if outer => inner(cgir, *b, true), 186 | TyKind::Forall(..) => true, 187 | TyKind::Enum(_, spine) | TyKind::Tuple(spine) => { 188 | spine.iter().any(|x| inner(cgir, *x, false)) 189 | } 190 | } 191 | } 192 | inner(cgir, self, true) 193 | } 194 | 195 | /// Returns the scalar type this type corresponds to, if 196 | /// any. Returns `None` for any value which is boxed (always uses 197 | /// `i32`). 198 | pub fn scalar_type(self, cgir: &CgIrArenas) -> Option { 199 | match &cgir.tys[self].kind { 200 | TyKind::Unit => None, 201 | TyKind::Primitive(PrimTy::Boolean) => Some(ValType::I32), 202 | TyKind::Primitive(PrimTy::Integer) => Some(ValType::I64), 203 | TyKind::Tuple(spine) if spine.len() == 1 => spine.front().unwrap().scalar_type(cgir), 204 | _ => None, 205 | } 206 | } 207 | 208 | pub fn unboxed_type(self, cgir: &mut CgIrArenas, ctx: &Context) -> Option { 209 | match cgir.tys[self].kind.clone() { 210 | TyKind::Unit => None, 211 | TyKind::Primitive(PrimTy::Boolean) => Some(Type::i1(ctx)), 212 | TyKind::Primitive(PrimTy::Integer) => Some(Type::i64(ctx)), 213 | _ => todo!(), 214 | } 215 | } 216 | 217 | pub fn is_arrow(self, cgir: &CgIrArenas) -> bool { 218 | match &cgir.tys[self].kind { 219 | TyKind::Arrow(..) => true, 220 | TyKind::Forall(_, b) => b.is_arrow(cgir), 221 | _ => false, 222 | } 223 | } 224 | 225 | #[allow(clippy::match_like_matches_macro)] 226 | pub fn is_zero_sized(self, cgir: &CgIrArenas) -> bool { 227 | match &cgir.tys[self].kind { 228 | TyKind::Unit => true, 229 | _ => false, 230 | } 231 | } 232 | } 233 | 234 | fn ty_from_core_inner(gcx: &GlobalCtxt, cgir: &mut CgIrArenas, ty: Id) -> Ty { 235 | match gcx.arenas.core.ty(ty).kind.clone() { 236 | core::TyKind::Unit => Ty::new(cgir, TyKind::Unit), 237 | core::TyKind::Primitive(prim) => Ty::new(cgir, TyKind::Primitive(prim)), 238 | core::TyKind::Var(c, x) => Ty::new(cgir, TyKind::Var(c, x)), 239 | core::TyKind::Arrow(a, mut b) => { 240 | let mut tys = im::vector![ty_from_core_inner(gcx, cgir, a)]; 241 | while let core::TyKind::Arrow(a1, b1) = gcx.arenas.core.ty(b).kind { 242 | tys.push_back(ty_from_core_inner(gcx, cgir, a1)); 243 | b = b1; 244 | } 245 | let b = ty_from_core_inner(gcx, cgir, b); 246 | Ty::new(cgir, TyKind::Arrow(tys, b)) 247 | } 248 | core::TyKind::Forall(x, _, b) => { 249 | let b = ty_from_core_inner(gcx, cgir, b); 250 | Ty::new(cgir, TyKind::Forall(x, b)) 251 | } 252 | core::TyKind::Meta(_, _) => unreachable!(), 253 | core::TyKind::InsertedMeta(_) => unreachable!(), 254 | core::TyKind::Free(x) => Ty::new(cgir, TyKind::Free(x)), 255 | core::TyKind::Enum(x, tys) => { 256 | let tys = tys 257 | .into_iter() 258 | .map(|x| ty_from_core_inner(gcx, cgir, x)) 259 | .collect(); 260 | 261 | Ty::new(cgir, TyKind::Enum(x, tys)) 262 | } 263 | core::TyKind::Tuple(tys) => { 264 | let tys = tys 265 | .into_iter() 266 | .map(|x| ty_from_core_inner(gcx, cgir, x)) 267 | .collect(); 268 | Ty::new(cgir, TyKind::Tuple(tys)) 269 | } 270 | core::TyKind::TupleFlex(_) => unreachable!(), 271 | } 272 | } 273 | -------------------------------------------------------------------------------- /src/codegen/llvm.rs: -------------------------------------------------------------------------------- 1 | use std::{ffi::CString, io, path::Path, ptr, rc::Rc}; 2 | 3 | use llvm_sys::{ 4 | analysis::{LLVMVerifierFailureAction, LLVMVerifyModule}, 5 | bit_writer::LLVMWriteBitcodeToFile, 6 | core::{ 7 | LLVMAddFunction, LLVMAddIncoming, LLVMAppendBasicBlockInContext, LLVMBuildAdd, LLVMBuildBr, 8 | LLVMBuildCall2, LLVMBuildCondBr, LLVMBuildICmp, LLVMBuildMul, LLVMBuildPhi, LLVMBuildRet, 9 | LLVMBuildRetVoid, LLVMBuildSDiv, LLVMBuildSub, LLVMConstInt, LLVMConstNamedStruct, 10 | LLVMConstStructInContext, LLVMContextCreate, LLVMContextDispose, LLVMCountParams, 11 | LLVMCreateBuilderInContext, LLVMDisposeBuilder, LLVMDisposeMessage, LLVMDisposeModule, 12 | LLVMDumpModule, LLVMFunctionType, LLVMGetInsertBlock, LLVMGetNamedFunction, LLVMGetParam, 13 | LLVMInt1TypeInContext, LLVMInt64TypeInContext, LLVMModuleCreateWithNameInContext, 14 | LLVMPositionBuilderAtEnd, LLVMStructCreateNamed, LLVMStructTypeInContext, 15 | LLVMVoidTypeInContext, 16 | }, 17 | prelude::{ 18 | LLVMBasicBlockRef, LLVMBool, LLVMBuilderRef, LLVMContextRef, LLVMModuleRef, LLVMTypeRef, 19 | LLVMValueRef, 20 | }, 21 | LLVMIntPredicate, 22 | }; 23 | 24 | #[derive(Clone, Debug)] 25 | pub struct Context { 26 | inner: Rc, 27 | } 28 | 29 | #[derive(Debug)] 30 | struct ContextInner(LLVMContextRef); 31 | 32 | impl Context { 33 | pub fn new() -> Self { 34 | Self { 35 | // SAFETY: This is always safe. 36 | inner: Rc::new(ContextInner(unsafe { LLVMContextCreate() })), 37 | } 38 | } 39 | } 40 | 41 | impl Default for Context { 42 | fn default() -> Self { 43 | Self::new() 44 | } 45 | } 46 | 47 | impl Drop for ContextInner { 48 | fn drop(&mut self) { 49 | // SAFETY: We have not been dropped before. 50 | unsafe { LLVMContextDispose(self.0) } 51 | } 52 | } 53 | 54 | #[derive(Clone, Debug)] 55 | pub struct Module { 56 | inner: Rc, 57 | ctx: Context, 58 | } 59 | 60 | #[derive(Debug)] 61 | struct ModuleInner(LLVMModuleRef); 62 | 63 | impl Module { 64 | pub fn new(ctx: &Context, module_id: &str) -> Self { 65 | let module_id = CString::new(module_id).unwrap(); 66 | // SAFETY: ctx is valid and so is module_id 67 | let module = unsafe { LLVMModuleCreateWithNameInContext(module_id.as_ptr(), ctx.inner.0) }; 68 | Module { 69 | inner: Rc::new(ModuleInner(module)), 70 | ctx: ctx.clone(), 71 | } 72 | } 73 | 74 | pub fn dump(&self) { 75 | // SAFETY: we are valid. 76 | unsafe { 77 | LLVMDumpModule(self.inner.0); 78 | } 79 | } 80 | 81 | pub fn add_function(&self, name: &str, ty: Type) -> Function { 82 | let name = CString::new(name).unwrap(); 83 | // SAFETY: self and ty are valid 84 | let val = unsafe { LLVMAddFunction(self.inner.0, name.as_ptr(), ty.inner) }; 85 | Function { 86 | inner: val, 87 | ctx: self.ctx.clone(), 88 | } 89 | } 90 | 91 | pub fn verify(&self) { 92 | let mut error = ptr::null_mut(); 93 | // SAFETY: self.inner is valid, and error is initialized by 94 | // `verify` before `dispose`. 95 | unsafe { 96 | LLVMVerifyModule( 97 | self.inner.0, 98 | LLVMVerifierFailureAction::LLVMAbortProcessAction, 99 | &mut error, 100 | ); 101 | LLVMDisposeMessage(error); 102 | }; 103 | } 104 | 105 | pub fn write_to_file(&self, p: &Path) -> Result<(), io::Error> { 106 | let path = CString::new(p.as_os_str().to_str().unwrap()).unwrap(); 107 | // SAFETY: self.inner and path are valid. 108 | let res = unsafe { LLVMWriteBitcodeToFile(self.inner.0, path.as_ptr()) }; 109 | if res == 0 { 110 | Ok(()) 111 | } else { 112 | Err(io::Error::new( 113 | io::ErrorKind::Other, 114 | "error writing bitcode to file", 115 | )) 116 | } 117 | } 118 | 119 | pub fn get_named_function(&self, name: &str) -> Option { 120 | let name = CString::new(name).unwrap(); 121 | // SAFETY: self and ty are valid 122 | let val = unsafe { LLVMGetNamedFunction(self.inner.0, name.as_ptr()) }; 123 | if val.is_null() { 124 | return None; 125 | } 126 | Some(Function { 127 | inner: val, 128 | ctx: self.ctx.clone(), 129 | }) 130 | } 131 | } 132 | 133 | #[derive(Clone, Debug)] 134 | pub struct Function { 135 | inner: LLVMValueRef, 136 | ctx: Context, 137 | } 138 | 139 | impl Function { 140 | pub fn append_basic_block(&self, name: &str) -> BasicBlock { 141 | let name = CString::new(name).unwrap(); 142 | // SAFETY: self, ctx, and name are valid. 143 | let bb = 144 | unsafe { LLVMAppendBasicBlockInContext(self.ctx.inner.0, self.inner, name.as_ptr()) }; 145 | BasicBlock { 146 | inner: bb, 147 | ctx: self.ctx.clone(), 148 | } 149 | } 150 | 151 | pub fn count_params(&self) -> usize { 152 | // SAFETY: self.inner is valid. 153 | (unsafe { LLVMCountParams(self.inner) }) as usize 154 | } 155 | 156 | pub fn get_param(&self, index: usize) -> Value { 157 | assert!(index < self.count_params()); 158 | // SAFETY: self.inner and index are valid. 159 | let val = unsafe { LLVMGetParam(self.inner, index.try_into().unwrap()) }; 160 | Value { 161 | inner: val, 162 | ctx: self.ctx.clone(), 163 | } 164 | } 165 | } 166 | 167 | #[derive(Clone, Debug)] 168 | pub struct Value { 169 | inner: LLVMValueRef, 170 | ctx: Context, 171 | } 172 | 173 | impl Value { 174 | pub fn const_struct(ctx: &Context, vals: &[Value], packed: bool) -> Self { 175 | let mut vals = vals.iter().map(|x| x.inner).collect::>(); 176 | // SAFETY: All vals are valid. 177 | let val = unsafe { 178 | LLVMConstStructInContext( 179 | ctx.inner.0, 180 | vals.as_mut_ptr(), 181 | vals.len().try_into().unwrap(), 182 | packed as LLVMBool, 183 | ) 184 | }; 185 | 186 | Value { 187 | inner: val, 188 | ctx: ctx.clone(), 189 | } 190 | } 191 | 192 | pub fn const_named_struct(ctx: &Context, ty: Type, vals: &[Value]) -> Self { 193 | let mut vals = vals.iter().map(|x| x.inner).collect::>(); 194 | 195 | // SAFETY: If ty is not a struct type, this is invalid, but 196 | // not UB. ty is valid otherwise. All vals are valid, too. 197 | let val = unsafe { 198 | LLVMConstNamedStruct(ty.inner, vals.as_mut_ptr(), vals.len().try_into().unwrap()) 199 | }; 200 | Value { 201 | inner: val, 202 | ctx: ctx.clone(), 203 | } 204 | } 205 | 206 | pub fn const_int(ctx: &Context, ty: Type, n: u64, sign_extend: bool) -> Self { 207 | // SAFETY: If ty is not an int type, this is invalid, but not UB. ty is valid otherwise. 208 | let val = unsafe { LLVMConstInt(ty.inner, n, sign_extend as LLVMBool) }; 209 | Value { 210 | inner: val, 211 | ctx: ctx.clone(), 212 | } 213 | } 214 | 215 | pub fn add_incoming(&self, incoming: &[(Value, BasicBlock)]) { 216 | let (mut vals, mut blocks): (Vec<_>, Vec<_>) = 217 | incoming.iter().map(|(x, y)| (x.inner, y.inner)).unzip(); 218 | 219 | // SAFETY: self, vals, and blocks are valid. We have asserted 220 | // they are the same length. Additionally, adding incoming to 221 | // a non-phi is invalid but not UB. 222 | unsafe { 223 | LLVMAddIncoming( 224 | self.inner, 225 | vals.as_mut_ptr(), 226 | blocks.as_mut_ptr(), 227 | vals.len().try_into().unwrap(), 228 | ) 229 | } 230 | } 231 | } 232 | 233 | #[derive(Clone, Debug)] 234 | pub struct BasicBlock { 235 | inner: LLVMBasicBlockRef, 236 | ctx: Context, 237 | } 238 | 239 | impl Drop for Module { 240 | fn drop(&mut self) { 241 | // SAFETY: We have not been dropped before. 242 | unsafe { LLVMDisposeModule(self.inner.0) } 243 | } 244 | } 245 | 246 | #[derive(Clone, Debug)] 247 | pub struct Type { 248 | // N.B. LLVMTypeRef is destroyed with the context. 249 | inner: LLVMTypeRef, 250 | ctx: Context, 251 | } 252 | 253 | impl Type { 254 | pub fn struct_named(ctx: Context, name: &str) -> Self { 255 | let name = CString::new(name.as_bytes()).unwrap(); 256 | // SAFETY: ctx is valid and so is name 257 | let ty = unsafe { LLVMStructCreateNamed(ctx.inner.0, name.as_ptr()) }; 258 | Type { inner: ty, ctx } 259 | } 260 | 261 | pub fn i64(ctx: &Context) -> Self { 262 | let ty = unsafe { LLVMInt64TypeInContext(ctx.inner.0) }; 263 | Type { 264 | inner: ty, 265 | ctx: ctx.clone(), 266 | } 267 | } 268 | 269 | pub fn i1(ctx: &Context) -> Self { 270 | let ty = unsafe { LLVMInt1TypeInContext(ctx.inner.0) }; 271 | Type { 272 | inner: ty, 273 | ctx: ctx.clone(), 274 | } 275 | } 276 | 277 | pub fn void(ctx: &Context) -> Self { 278 | let ty = unsafe { LLVMVoidTypeInContext(ctx.inner.0) }; 279 | Type { 280 | inner: ty, 281 | ctx: ctx.clone(), 282 | } 283 | } 284 | 285 | pub fn function(ctx: &Context, result: Type, params: &[Type]) -> Self { 286 | let mut tys = params.iter().map(|x| x.inner).collect::>(); 287 | // SAFETY: result and tys are valid. 288 | let ty = unsafe { 289 | LLVMFunctionType( 290 | result.inner, 291 | tys.as_mut_ptr(), 292 | tys.len().try_into().unwrap(), 293 | false as LLVMBool, 294 | ) 295 | }; 296 | Type { 297 | inner: ty, 298 | ctx: ctx.clone(), 299 | } 300 | } 301 | 302 | pub fn struct_(ctx: &Context, elements: &[Type], packed: bool) -> Type { 303 | let mut tys = elements.iter().map(|x| x.inner).collect::>(); 304 | let ty = unsafe { 305 | LLVMStructTypeInContext( 306 | ctx.inner.0, 307 | tys.as_mut_ptr(), 308 | tys.len().try_into().unwrap(), 309 | packed as LLVMBool, 310 | ) 311 | }; 312 | Type { 313 | inner: ty, 314 | ctx: ctx.clone(), 315 | } 316 | } 317 | } 318 | 319 | #[derive(Debug, Clone)] 320 | pub struct Builder { 321 | inner: Rc, 322 | ctx: Context, 323 | } 324 | 325 | impl Builder { 326 | pub fn new(ctx: &Context) -> Self { 327 | Builder { 328 | // SAFETY: The context is valid. 329 | inner: Rc::new(BuilderInner(unsafe { 330 | LLVMCreateBuilderInContext(ctx.inner.0) 331 | })), 332 | ctx: ctx.clone(), 333 | } 334 | } 335 | 336 | pub fn position_at_end(&self, bb: &BasicBlock) { 337 | // SAFETY: self and bb are valid 338 | unsafe { LLVMPositionBuilderAtEnd(self.inner.0, bb.inner) } 339 | } 340 | 341 | pub fn build_ret(&self, val: Value) -> Value { 342 | // SAFETY: self and val are valid. 343 | let val = unsafe { LLVMBuildRet(self.inner.0, val.inner) }; 344 | Value { 345 | inner: val, 346 | ctx: self.ctx.clone(), 347 | } 348 | } 349 | 350 | pub fn build_add(&self, left: Value, right: Value, name: &str) -> Value { 351 | let name = CString::new(name).unwrap(); 352 | // SAFETY: self, left, right, and name are valid. 353 | let val = unsafe { LLVMBuildAdd(self.inner.0, left.inner, right.inner, name.as_ptr()) }; 354 | Value { 355 | inner: val, 356 | ctx: self.ctx.clone(), 357 | } 358 | } 359 | 360 | pub fn build_sub(&self, left: Value, right: Value, name: &str) -> Value { 361 | let name = CString::new(name).unwrap(); 362 | // SAFETY: self.inner, left, right, and name are valid. 363 | let val = unsafe { LLVMBuildSub(self.inner.0, left.inner, right.inner, name.as_ptr()) }; 364 | Value { 365 | inner: val, 366 | ctx: self.ctx.clone(), 367 | } 368 | } 369 | 370 | pub fn build_mul(&self, left: Value, right: Value, name: &str) -> Value { 371 | let name = CString::new(name).unwrap(); 372 | // SAFETY: self.inner, left, right, and name are valid. 373 | let val = unsafe { LLVMBuildMul(self.inner.0, left.inner, right.inner, name.as_ptr()) }; 374 | Value { 375 | inner: val, 376 | ctx: self.ctx.clone(), 377 | } 378 | } 379 | 380 | pub fn build_sdiv(&self, left: Value, right: Value, name: &str) -> Value { 381 | let name = CString::new(name).unwrap(); 382 | // SAFETY: self.inner, left, right, and name are valid. 383 | let val = unsafe { LLVMBuildSDiv(self.inner.0, left.inner, right.inner, name.as_ptr()) }; 384 | Value { 385 | inner: val, 386 | ctx: self.ctx.clone(), 387 | } 388 | } 389 | 390 | pub fn build_icmp(&self, op: LLVMIntPredicate, left: Value, right: Value, name: &str) -> Value { 391 | let name = CString::new(name).unwrap(); 392 | // SAFETY: self.inner, left, right, and name are valid. 393 | let val = 394 | unsafe { LLVMBuildICmp(self.inner.0, op, left.inner, right.inner, name.as_ptr()) }; 395 | Value { 396 | inner: val, 397 | ctx: self.ctx.clone(), 398 | } 399 | } 400 | 401 | pub fn build_ret_void(&self) -> Value { 402 | // SAFETY: self.inner is valid. 403 | let val = unsafe { LLVMBuildRetVoid(self.inner.0) }; 404 | Value { 405 | inner: val, 406 | ctx: self.ctx.clone(), 407 | } 408 | } 409 | 410 | pub fn build_phi(&self, ty: Type, name: &str) -> Value { 411 | let name = CString::new(name).unwrap(); 412 | // SAFETY: self.inner, ty, and name are valid. 413 | let val = unsafe { LLVMBuildPhi(self.inner.0, ty.inner, name.as_ptr()) }; 414 | Value { 415 | inner: val, 416 | ctx: self.ctx.clone(), 417 | } 418 | } 419 | 420 | pub fn build_br(&self, block: &BasicBlock) -> Value { 421 | // SAFETY: self and block are valid. 422 | let val = unsafe { LLVMBuildBr(self.inner.0, block.inner) }; 423 | Value { 424 | inner: val, 425 | ctx: self.ctx.clone(), 426 | } 427 | } 428 | 429 | pub fn build_cond_br(&self, cond: &Value, then: &BasicBlock, then_else: &BasicBlock) -> Value { 430 | // SAFETY: self, cond, then, and then_else are valid. 431 | let val = unsafe { LLVMBuildCondBr(self.inner.0, cond.inner, then.inner, then_else.inner) }; 432 | Value { 433 | inner: val, 434 | ctx: self.ctx.clone(), 435 | } 436 | } 437 | 438 | pub fn build_call(&self, func: &Function, ty: &Type, args: &[Value], name: &str) -> Value { 439 | let name = CString::new(name).unwrap(); 440 | let mut args = args.iter().map(|x| x.inner).collect::>(); 441 | let val = unsafe { 442 | LLVMBuildCall2( 443 | self.inner.0, 444 | ty.inner, 445 | func.inner, 446 | args.as_mut_ptr(), 447 | args.len().try_into().unwrap(), 448 | name.as_ptr(), 449 | ) 450 | }; 451 | Value { 452 | inner: val, 453 | ctx: self.ctx.clone(), 454 | } 455 | } 456 | 457 | pub fn get_insert_block(&self) -> Option { 458 | // SAFETY: self is valid 459 | let block = unsafe { LLVMGetInsertBlock(self.inner.0) }; 460 | if block.is_null() { 461 | None 462 | } else { 463 | Some(BasicBlock { 464 | inner: block, 465 | ctx: self.ctx.clone(), 466 | }) 467 | } 468 | } 469 | } 470 | 471 | #[derive(Debug)] 472 | struct BuilderInner(LLVMBuilderRef); 473 | 474 | impl Drop for BuilderInner { 475 | fn drop(&mut self) { 476 | // SAFETY: We haven't been dropped before. 477 | unsafe { LLVMDisposeBuilder(self.0) } 478 | } 479 | } 480 | -------------------------------------------------------------------------------- /src/ctxt.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | 3 | use crate::{ 4 | ast::AstArenas, 5 | diag::DiagReportCtxt, 6 | typeck::{ast::CoreAstArenas, norm::TyckArenas}, 7 | }; 8 | 9 | #[derive(Default, Debug)] 10 | pub struct GlobalCtxt { 11 | pub arenas: Arenas, 12 | pub drcx: RefCell, 13 | } 14 | 15 | impl GlobalCtxt { 16 | pub fn new() -> Self { 17 | Self::default() 18 | } 19 | 20 | pub fn clear(&self) { 21 | self.arenas.clear(); 22 | self.drcx.borrow_mut().clear(); 23 | } 24 | } 25 | 26 | #[derive(Default, Debug)] 27 | pub struct Arenas { 28 | pub ast: AstArenas, 29 | pub core: CoreAstArenas, 30 | pub tyck: TyckArenas, 31 | } 32 | 33 | impl Arenas { 34 | pub fn clear(&self) { 35 | self.ast.clear(); 36 | self.core.clear(); 37 | self.tyck.clear(); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/diag.rs: -------------------------------------------------------------------------------- 1 | //! The global reporting context for diagnostics. 2 | 3 | use std::fmt; 4 | 5 | use crate::parse::Span; 6 | 7 | pub type Diagnostic = ariadne::Report<'static, Span>; 8 | 9 | /// The global reporting context for diagnostics. 10 | pub struct DiagReportCtxt { 11 | errors: Vec, 12 | nonfatals: Vec, 13 | fatal: Option, 14 | } 15 | 16 | impl fmt::Debug for DiagReportCtxt { 17 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 18 | f.debug_struct("DiagReportCtxt").finish_non_exhaustive() 19 | } 20 | } 21 | 22 | impl Default for DiagReportCtxt { 23 | fn default() -> Self { 24 | Self::new() 25 | } 26 | } 27 | 28 | impl DiagReportCtxt { 29 | /// Create a new `DiagReportCtxt`. 30 | #[must_use] 31 | pub fn new() -> Self { 32 | Self { 33 | errors: Vec::new(), 34 | nonfatals: Vec::new(), 35 | fatal: None, 36 | } 37 | } 38 | 39 | /// Clear the list of synchronized errors. 40 | pub fn clear_syncd(&mut self) { 41 | self.errors.clear(); 42 | } 43 | 44 | /// Clear the list of nonfatals. 45 | pub fn clear_nonfatals(&mut self) { 46 | self.nonfatals.clear(); 47 | } 48 | 49 | /// Clear the current fatal error. 50 | pub fn clear_fatal(&mut self) { 51 | self.fatal = None; 52 | } 53 | 54 | /// Clear the entire reporting context 55 | pub fn clear(&mut self) { 56 | self.clear_fatal(); 57 | self.clear_nonfatals(); 58 | self.clear_syncd(); 59 | } 60 | 61 | /// Report an error that was synchronizable. 62 | pub fn report_syncd(&mut self, value: Diagnostic) { 63 | self.errors.push(value); 64 | } 65 | 66 | /// Report a non-fatal error. 67 | pub fn report_non_fatal(&mut self, value: Diagnostic) { 68 | self.nonfatals.push(value); 69 | } 70 | 71 | /// Report a fatal error. If there is already a fatal error reported, it 72 | /// will not be replaced. 73 | pub fn report_fatal(&mut self, value: Diagnostic) { 74 | if self.fatal.is_none() { 75 | self.fatal = Some(value); 76 | } 77 | } 78 | 79 | /// Get the list of nonfatal errors. 80 | #[must_use] 81 | pub fn nonfatals(&self) -> &[Diagnostic] { 82 | &self.nonfatals 83 | } 84 | 85 | /// Get the current fatal error, if any. 86 | #[must_use] 87 | pub fn fatal(&self) -> Option<&Diagnostic> { 88 | self.fatal.as_ref() 89 | } 90 | 91 | /// Get the list of synchronizable errors. 92 | #[must_use] 93 | pub fn errors(&self) -> &[Diagnostic] { 94 | &self.errors 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{Debug, Display}; 2 | 3 | use thiserror::Error; 4 | 5 | /// The error type used within Tethys. 6 | #[derive(Error, Debug)] 7 | pub enum TysError { 8 | /// IO errors 9 | #[error("i/o error")] 10 | Io(#[from] std::io::Error), 11 | /// UTF-8 decoding errors 12 | #[error("utf-8 decoding error")] 13 | FromUtf8(#[from] std::string::FromUtf8Error), 14 | /// Formatting errors 15 | #[error("formatting error")] 16 | Fmt(#[from] std::fmt::Error), 17 | /// A fatal error was reported within the global diagnostic reporting 18 | /// context. 19 | #[error( 20 | "internal diagnostic representation was printed incorrectly, please file a bug report" 21 | )] 22 | Diagnostic, 23 | /// Any other error, using [`eyre`] 24 | #[error(transparent)] 25 | Other(#[from] eyre::Report), 26 | } 27 | 28 | impl TysError { 29 | /// Try to downcast the error into a concrete type, if the error is a 30 | /// [`TysError::Other`]. 31 | /// 32 | /// # Errors 33 | /// 34 | /// `self` is returned if the error could not be downcast. 35 | pub fn try_downcast(self) -> Result 36 | where 37 | E: Display + Debug + Send + Sync + 'static, 38 | { 39 | if let TysError::Other(err) = self { 40 | let x = err.downcast()?; 41 | Ok(x) 42 | } else { 43 | Err(self) 44 | } 45 | } 46 | 47 | /// Try to downcast a reference to the error into a reference to a concrete 48 | /// type, if the error is a [`TysError::Other`]. 49 | #[must_use] 50 | pub fn try_downcast_ref(&self) -> Option<&E> 51 | where 52 | E: Display + Debug + Send + Sync + 'static, 53 | { 54 | if let TysError::Other(err) = self { 55 | err.downcast_ref() 56 | } else { 57 | None 58 | } 59 | } 60 | 61 | /// Try to downcast a mutable reference to the error into a mutable 62 | /// reference to a concrete type, if the error is a [`TysError::Other`]. 63 | pub fn try_downcast_mut(&mut self) -> Option<&mut E> 64 | where 65 | E: Display + Debug + Send + Sync + 'static, 66 | { 67 | if let TysError::Other(err) = self { 68 | err.downcast_mut() 69 | } else { 70 | None 71 | } 72 | } 73 | } 74 | 75 | /// A handy alias for [`Result`], genericized over `T`. 76 | pub type TysResult = Result; 77 | -------------------------------------------------------------------------------- /src/intern.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | cell::RefCell, 3 | collections::HashMap, 4 | hash::{BuildHasher, Hash, Hasher}, 5 | }; 6 | 7 | use id_arena::{Arena, Id}; 8 | 9 | use self::private::Private; 10 | 11 | #[derive(Debug)] 12 | pub struct Interner { 13 | data: RefCell>>, 14 | } 15 | 16 | impl Default for Interner { 17 | fn default() -> Self { 18 | Self::new() 19 | } 20 | } 21 | 22 | impl Interner { 23 | pub fn new() -> Self { 24 | Self { 25 | data: RefCell::new(HashMap::new()), 26 | } 27 | } 28 | 29 | pub fn clear(&self) { 30 | self.data.borrow_mut().clear(); 31 | } 32 | 33 | pub fn intern(&self, arena: &mut Arena, value: T) -> Interned { 34 | let mut data = self.data.borrow_mut(); 35 | let mut hasher = data.hasher().build_hasher(); 36 | value.hash(&mut hasher); 37 | let hash = hasher.finish(); 38 | *data 39 | .entry(hash) 40 | .or_insert_with(|| Interned::new_unchecked(arena.alloc(value))) 41 | } 42 | } 43 | 44 | #[derive(Debug)] 45 | pub struct Interned(pub Id, Private); 46 | 47 | impl Interned { 48 | pub(crate) fn new_unchecked(t: Id) -> Self { 49 | Interned(t, private::Private) 50 | } 51 | } 52 | 53 | impl Copy for Interned {} 54 | impl Clone for Interned { 55 | fn clone(&self) -> Self { 56 | *self 57 | } 58 | } 59 | 60 | impl PartialEq> for Interned { 61 | fn eq(&self, other: &Interned) -> bool { 62 | self.0 == other.0 63 | } 64 | } 65 | 66 | impl Eq for Interned {} 67 | impl Hash for Interned { 68 | fn hash(&self, state: &mut H) { 69 | self.0.hash(state) 70 | } 71 | } 72 | 73 | mod private { 74 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 75 | pub struct Private; 76 | } 77 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(clippy::undocumented_unsafe_blocks)] 2 | use std::{collections::HashMap, path::Path}; 3 | 4 | use ariadne::Source; 5 | use calypso_base::symbol::Symbol; 6 | use ctxt::GlobalCtxt; 7 | use error::TysResult; 8 | use spinneret::{encoder::ExportKind, Function, InsnBuilderBase}; 9 | 10 | use crate::{ 11 | ast::ItemKind, 12 | typeck::{ 13 | ast::{DeBruijnLvl, Expr}, 14 | norm::{eval_ty, nf_ty_force}, 15 | surf_ty_to_core, TypeExpectation, TypeckCtxt, 16 | }, 17 | }; 18 | 19 | pub mod ast; 20 | pub mod codegen; 21 | pub mod ctxt; 22 | pub mod diag; 23 | pub mod error; 24 | pub mod intern; 25 | pub mod parse; 26 | pub mod resolve; 27 | pub mod typeck; 28 | 29 | pub fn run(src: &str, gcx: &GlobalCtxt, suppress_output: bool) -> TysResult<()> { 30 | let items = parse::run(src, gcx); 31 | 32 | resolve::resolve_code_unit(gcx, &items)?; 33 | // let cu = lowering::lower_code_unit(gcx, decls)?; 34 | 35 | let mut cont = true; 36 | if !suppress_output { 37 | { 38 | let mut drcx = gcx.drcx.borrow_mut(); 39 | for err in drcx.errors() { 40 | err.eprint(Source::from(&src))?; 41 | } 42 | 43 | if let Some(fatal) = drcx.fatal() { 44 | fatal.eprint(Source::from(&src))?; 45 | } 46 | drcx.clear(); 47 | } 48 | 49 | let mut values = HashMap::new(); 50 | let mut lifted = Vec::new(); 51 | for &item in &items { 52 | let mut w = Vec::new(); 53 | let doc = ast::pretty::pp_item(gcx, item); 54 | doc.render(80, &mut w).unwrap(); 55 | println!("{}", String::from_utf8(w).unwrap()); 56 | 57 | if let ItemKind::Value(t, e) = gcx.arenas.ast.item(item).kind { 58 | let t = surf_ty_to_core(gcx, TypeckCtxt::default(), t); 59 | { 60 | let mut drcx = gcx.drcx.borrow_mut(); 61 | for err in drcx.errors() { 62 | err.eprint(Source::from(&src))?; 63 | } 64 | 65 | if let Some(fatal) = drcx.fatal() { 66 | fatal.eprint(Source::from(&src))?; 67 | drcx.clear(); 68 | continue; 69 | } 70 | drcx.clear(); 71 | } 72 | let t = t.unwrap(); 73 | 74 | let e = typeck::check( 75 | gcx, 76 | TypeckCtxt::default(), 77 | e, 78 | eval_ty(gcx, im::Vector::new(), t), 79 | TypeExpectation::Definition(gcx.arenas.core.ty(t).span), 80 | ); 81 | 82 | { 83 | let mut drcx = gcx.drcx.borrow_mut(); 84 | for err in drcx.errors() { 85 | cont = false; 86 | err.eprint(Source::from(&src))?; 87 | } 88 | 89 | if let Some(fatal) = drcx.fatal() { 90 | fatal.eprint(Source::from(&src))?; 91 | drcx.clear(); 92 | cont = false; 93 | continue; 94 | } 95 | drcx.clear(); 96 | } 97 | let e = e.unwrap(); 98 | Expr::report_deferred(e, gcx); 99 | { 100 | let mut drcx = gcx.drcx.borrow_mut(); 101 | for err in drcx.errors() { 102 | cont = false; 103 | err.eprint(Source::from(&src))?; 104 | } 105 | 106 | if let Some(fatal) = drcx.fatal() { 107 | fatal.eprint(Source::from(&src))?; 108 | drcx.clear(); 109 | cont = false; 110 | continue; 111 | } 112 | drcx.clear(); 113 | } 114 | let t = nf_ty_force(gcx, DeBruijnLvl::from(0usize), im::Vector::new(), t); 115 | 116 | let mut w = Vec::new(); 117 | let doc = typeck::pretty::pp_expr( 118 | 0, 119 | gcx, 120 | DeBruijnLvl::from(0usize), 121 | im::Vector::new(), 122 | e, 123 | ); 124 | doc.render(80, &mut w).unwrap(); 125 | 126 | let mut w1 = Vec::new(); 127 | let doc = 128 | typeck::pretty::pp_ty(0, gcx, DeBruijnLvl::from(0usize), im::Vector::new(), t) 129 | .group(); 130 | doc.render(80, &mut w1).unwrap(); 131 | 132 | println!( 133 | "\n{}\n{}", 134 | String::from_utf8(w).unwrap(), 135 | String::from_utf8(w1).unwrap() 136 | ); 137 | if !cont { 138 | return Ok(()); 139 | } 140 | 141 | let (lift, e) = codegen::closure::closure_convert(gcx, e); 142 | lifted.extend(lift.into_iter()); 143 | values.insert(gcx.arenas.ast.item(item).id, (e, t)); 144 | 145 | println!(); 146 | } 147 | } 148 | 149 | let item = items 150 | .iter() 151 | .find(|&&x| gcx.arenas.ast.item(x).ident.as_str() == "main") 152 | .unwrap(); 153 | let id = gcx.arenas.ast.item(*item).id; 154 | let &(main, _) = values.get(&id).unwrap(); 155 | { 156 | let mut w = Vec::new(); 157 | let doc = 158 | typeck::pretty::pp_expr(0, gcx, DeBruijnLvl::from(0usize), im::Vector::new(), main); 159 | doc.render(80, &mut w).unwrap(); 160 | println!("{}", String::from_utf8(w).unwrap()); 161 | } 162 | 163 | // println!("== Lifted =="); 164 | // for &val in &lift { 165 | // let mut w = Vec::new(); 166 | // let doc = 167 | // typeck::pretty::pp_expr(0, gcx, DeBruijnLvl::from(0usize), im::Vector::new(), val); 168 | // doc.render(80, &mut w).unwrap(); 169 | // println!( 170 | // "{}: {}", 171 | // gcx.arenas.core.expr(val).id, 172 | // String::from_utf8(w).unwrap() 173 | // ); 174 | // } 175 | // println!("== Main term =="); 176 | // { 177 | // let mut w = Vec::new(); 178 | // let doc = 179 | // typeck::pretty::pp_expr(0, gcx, DeBruijnLvl::from(0usize), im::Vector::new(), main); 180 | // doc.render(80, &mut w).unwrap(); 181 | // println!("{}", String::from_utf8(w).unwrap()); 182 | // } 183 | 184 | let mut ccx = codegen::CodegenCtxt::new(gcx, values.clone()); 185 | 186 | let t = ccx.module.types().function([], []); 187 | let mut func = ccx.module.function(t); 188 | 189 | let mut func_counter = 1; 190 | let mut bodies = HashMap::new(); 191 | let mut statics = HashMap::new(); 192 | let mut values1 = HashMap::new(); 193 | let mut sorted = vec![]; 194 | 195 | for (id, (expr, _)) in values { 196 | let expr = codegen::ir::Expr::from_core(gcx, &mut ccx.arenas, expr); 197 | values1.insert(id, expr); 198 | if ccx.arenas.exprs[expr].ty.is_arrow(&ccx.arenas) { 199 | bodies.insert(expr, Function(func_counter)); 200 | } else { 201 | bodies.insert(expr, Function(func_counter)); 202 | statics.insert(expr, Function(func_counter)); 203 | func.call(Function(func_counter)); 204 | }; 205 | sorted.push(( 206 | Function(func_counter), 207 | expr, 208 | gcx.arenas 209 | .ast 210 | .get_node_by_id(id) 211 | .unwrap() 212 | .ident(gcx) 213 | .unwrap() 214 | .symbol, 215 | )); 216 | func_counter += 1; 217 | } 218 | for val in lifted { 219 | let id = gcx.arenas.core.expr(val).id; 220 | let val = codegen::ir::Expr::from_core(gcx, &mut ccx.arenas, val); 221 | bodies.insert(val, Function(func_counter)); 222 | ccx.lifted.insert(id, val); 223 | sorted.push(( 224 | Function(func_counter), 225 | val, 226 | Symbol::intern(&format!("lifted.{}", id)), 227 | )); 228 | func_counter += 1; 229 | } 230 | ccx.bodies1 = bodies; 231 | ccx.values = values1; 232 | 233 | let func = func.finish(&mut ccx.module); 234 | ccx.module.export("_start", ExportKind::Func, func.0); 235 | 236 | for (_, expr, name) in &sorted { 237 | ccx.declare_func(*name, *expr); 238 | } 239 | for (_, expr, name) in sorted { 240 | ccx.build_func(name, expr); 241 | } 242 | 243 | ccx.lmodule.verify(); 244 | 245 | // let expr = eval::eval_expr(gcx, &mut ecx, im::Vector::new(), main); 246 | // let expr = eval::force_barrier(&mut ecx, expr); 247 | // let expr = eval::quote_expr(gcx, &mut ecx, DeBruijnLvl::from(0usize), expr); 248 | 249 | // let mut w = Vec::new(); 250 | // //let doc = eval::pretty::pp_expr(0, gcx, &mut ecx, expr); 251 | // let doc = 252 | // typeck::pretty::pp_expr(0, gcx, DeBruijnLvl::from(0usize), im::Vector::new(), expr); 253 | // doc.render(80, &mut w).unwrap(); 254 | // println!("{}", String::from_utf8(w).unwrap()); 255 | // println!("{:#?}", items); 256 | // println!("{:#?}", gcx.arenas.ast.res_data.borrow().to_hash_map()); 257 | // println!("\n{:#?}", gcx); 258 | 259 | // let item = gcx.arenas.ast.item(*items.first().unwrap()); 260 | // let mut tyck = TypeckCtxt::new(gcx); 261 | // typeck::check_item(&mut tyck, gcx, *items.first().unwrap()); 262 | 263 | // println!("\n{:#?}", gcx); 264 | // println!("\n{:#?}", tyck); 265 | 266 | ccx.lmodule.write_to_file(Path::new("out.bc")).unwrap(); 267 | 268 | //std::fs::write("out.wasm", ccx.finish_module()).unwrap(); 269 | } 270 | 271 | Ok(()) 272 | 273 | // let tya = Arena::new(); 274 | // let expra = Arena::new(); 275 | // let decla = Arena::new(); 276 | // let mut rcx = ResCtxt { 277 | // ty: &tya, 278 | // expr: &expra, 279 | // decl: &decla, 280 | // decls: HashMap::new(), 281 | // expr_names: vec![], 282 | // ty_names: vec![], 283 | // }; 284 | // let ast = *ast.unwrap().resolve(&mut rcx)?.get(1).unwrap(); 285 | // let ppa = pretty::Arena::new(); 286 | // match ast.kind { 287 | // DeclKind::Defn(_, ty, expr) => { 288 | // println!("{}", ty.pretty(&ppa).pretty(80)); 289 | // println!("{}", expr.pretty(&ppa).pretty(80)); 290 | // } 291 | // } 292 | } 293 | -------------------------------------------------------------------------------- /src/typeck/ast.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | cell::{Cell, RefCell}, 3 | collections::{hash_map::Entry, HashMap}, 4 | hash::Hash, 5 | rc::Rc, 6 | }; 7 | 8 | use ariadne::{Color, Label, Report, ReportKind}; 9 | use calypso_base::symbol::{Ident, Symbol}; 10 | use id_arena::{Arena, Id}; 11 | 12 | use crate::{ 13 | ast::{AstId, BinOpKind, PrimTy, Recursive}, 14 | ctxt::GlobalCtxt, 15 | parse::Span, 16 | }; 17 | 18 | use super::{ 19 | norm::{nf_ty_force, VTy}, 20 | TypeckCtxt, 21 | }; 22 | 23 | index_vec::define_index_type! { 24 | pub struct DeBruijnIdx = u32; 25 | 26 | DISABLE_MAX_INDEX_CHECK = cfg!(not(debug_assertions)); 27 | DEBUG_FORMAT = "DebruijnIdx({})"; 28 | DISPLAY_FORMAT = "{}"; 29 | IMPL_RAW_CONVERSIONS = true; 30 | } 31 | 32 | index_vec::define_index_type! { 33 | pub struct DeBruijnLvl = u32; 34 | 35 | DISABLE_MAX_INDEX_CHECK = cfg!(not(debug_assertions)); 36 | DEBUG_FORMAT = "DebruijnLvl({})"; 37 | DISPLAY_FORMAT = "{}"; 38 | IMPL_RAW_CONVERSIONS = true; 39 | } 40 | 41 | pub const DUMMY_CORE_AST_ID: CoreAstId = CoreAstId { _raw: 0 }; 42 | 43 | index_vec::define_index_type! { 44 | pub struct CoreAstId = u32; 45 | 46 | DISABLE_MAX_INDEX_CHECK = cfg!(not(debug_assertions)); 47 | DEBUG_FORMAT = "CoreAstId({})"; 48 | DISPLAY_FORMAT = "{}"; 49 | IMPL_RAW_CONVERSIONS = true; 50 | } 51 | 52 | #[derive(Clone, Debug)] 53 | pub struct Ty { 54 | pub id: CoreAstId, 55 | pub kind: TyKind, 56 | pub span: Span, 57 | } 58 | 59 | impl Ty { 60 | pub fn new(gcx: &GlobalCtxt, id: CoreAstId, kind: TyKind, span: Span) -> Id { 61 | let x = gcx.arenas.core.ty.borrow_mut().alloc(Ty { id, kind, span }); 62 | assert_eq!( 63 | gcx.arenas 64 | .core 65 | .core_id_to_node 66 | .borrow_mut() 67 | .insert(id, Node::Ty(x)), 68 | None 69 | ); 70 | x 71 | } 72 | 73 | pub fn is_arrow(gcx: &GlobalCtxt, mut this: Id) -> bool { 74 | while let TyKind::Forall(_, _, b) = gcx.arenas.core.ty(this).kind { 75 | this = b; 76 | } 77 | matches!(gcx.arenas.core.ty(this).kind, TyKind::Arrow(..)) 78 | } 79 | 80 | /// N.B. this function only checks for outer monotypes. 81 | /// Higher-rank function inputs may still be present. Use this in 82 | /// combination with [`Self::is_higher_rank`] to check for full 83 | /// monotypes. 84 | pub fn is_monotype(gcx: &GlobalCtxt, this: Id) -> bool { 85 | !matches!(gcx.arenas.core.ty(this).kind, TyKind::Forall(..)) 86 | } 87 | 88 | pub fn contains_holes( 89 | gcx: &GlobalCtxt, 90 | this: Id, 91 | l: DeBruijnLvl, 92 | mut e: im::Vector>, 93 | ) -> bool { 94 | match gcx 95 | .arenas 96 | .core 97 | .ty(nf_ty_force(gcx, l, e.clone(), this)) 98 | .kind 99 | { 100 | TyKind::Unit | TyKind::Primitive(_) | TyKind::Free(_) | TyKind::Var(..) => false, 101 | TyKind::Arrow(a, b) => { 102 | Self::contains_holes(gcx, a, l, e.clone()) || Self::contains_holes(gcx, b, l, e) 103 | } 104 | TyKind::Forall(x, _, b) => { 105 | e.push_back(VTy::rigid(gcx, gcx.arenas.core.next_id(), x, l)); 106 | Self::contains_holes(gcx, b, l + 1, e) 107 | } 108 | TyKind::Meta(_, _) | TyKind::InsertedMeta(_) => true, 109 | TyKind::Enum(_, spine) | TyKind::Tuple(spine) => spine 110 | .iter() 111 | .any(|x| Self::contains_holes(gcx, *x, l, e.clone())), 112 | // Flex-tuples act as holes 113 | TyKind::TupleFlex(_) => true, 114 | } 115 | } 116 | 117 | pub fn is_higher_rank( 118 | gcx: &GlobalCtxt, 119 | this: Id, 120 | l: DeBruijnLvl, 121 | e: im::Vector>, 122 | ) -> bool { 123 | fn inner( 124 | gcx: &GlobalCtxt, 125 | this: Id, 126 | l: DeBruijnLvl, 127 | mut e: im::Vector>, 128 | outer: bool, 129 | ) -> bool { 130 | match gcx 131 | .arenas 132 | .core 133 | .ty(nf_ty_force(gcx, l, e.clone(), this)) 134 | .kind 135 | { 136 | TyKind::Unit | TyKind::Primitive(_) | TyKind::Var(..) | TyKind::Free(_) => false, 137 | TyKind::Arrow(a, b) => { 138 | inner(gcx, a, l, e.clone(), false) || inner(gcx, b, l, e, false) 139 | } 140 | TyKind::Forall(x, _, b) if outer => { 141 | e.push_back(VTy::rigid(gcx, gcx.arenas.core.next_id(), x, l)); 142 | inner(gcx, b, l, e, true) 143 | } 144 | TyKind::Forall(_, _, _) => true, 145 | TyKind::Meta(_, _) | TyKind::InsertedMeta(_) => false, 146 | TyKind::Enum(_, spine) | TyKind::Tuple(spine) | TyKind::TupleFlex(spine) => { 147 | spine.iter().any(|x| inner(gcx, *x, l, e.clone(), false)) 148 | } 149 | } 150 | } 151 | inner(gcx, this, l, e, true) 152 | } 153 | } 154 | 155 | #[derive(Clone, Debug)] 156 | pub enum TyKind { 157 | Unit, 158 | Primitive(PrimTy), 159 | Var(CoreAstId, DeBruijnIdx), 160 | Arrow(Id, Id), 161 | Forall(CoreAstId, Ident, Id), 162 | Meta(MetaVar, im::Vector>), 163 | InsertedMeta(MetaVar), 164 | Free(AstId), 165 | Enum(AstId, im::Vector>), 166 | Tuple(im::Vector>), 167 | TupleFlex(im::Vector>), 168 | } 169 | 170 | #[derive(Clone, Debug)] 171 | pub struct Expr { 172 | pub id: CoreAstId, 173 | pub kind: ExprKind, 174 | pub span: Span, 175 | } 176 | 177 | impl Expr { 178 | pub fn new( 179 | gcx: &GlobalCtxt, 180 | id: CoreAstId, 181 | kind: ExprKind, 182 | span: Span, 183 | ty: Option>, 184 | ) -> Id { 185 | let x = gcx 186 | .arenas 187 | .core 188 | .expr 189 | .borrow_mut() 190 | .alloc(Expr { id, kind, span }); 191 | if let Some(ty) = ty { 192 | gcx.arenas.core.ty_map.borrow_mut().insert(id, ty); 193 | } 194 | assert_eq!( 195 | gcx.arenas 196 | .core 197 | .core_id_to_node 198 | .borrow_mut() 199 | .insert(id, Node::Expr(x)), 200 | None 201 | ); 202 | x 203 | } 204 | } 205 | 206 | #[derive(Clone, Debug)] 207 | pub enum ExprKind { 208 | Unit, 209 | Var(CoreAstId), 210 | LiftedVar(CoreAstId), 211 | /// N.B. this level is only valid within the most recent 212 | /// [`ExprKind::LiftedLam`] binder 213 | LiftedFree(CoreAstId), 214 | LiftedLamRef(CoreAstId), 215 | Lam(CoreAstId, Ident, Id), 216 | LiftedLam(im::Vector, Id), 217 | LiftedApp(Id, im::Vector>), 218 | App(Id, Id), 219 | TyApp(Id, Id), 220 | Let(CoreAstId, Ident, Recursive, Id, Id, Id), 221 | TyAbs(CoreAstId, Ident, Id), 222 | Free(AstId), 223 | EnumConstructor(AstId, usize), 224 | EnumRecursor(AstId), 225 | Number(i64), 226 | BinaryOp { 227 | left: Id, 228 | kind: BinOpKind, 229 | right: Id, 230 | }, 231 | Boolean(bool), 232 | Err(ExprDeferredError), 233 | If(Id, Id, Id), 234 | Tuple(im::Vector>), 235 | TupleProj(Id, u64), 236 | } 237 | 238 | #[derive(Clone, Debug)] 239 | #[non_exhaustive] 240 | pub enum ExprDeferredError { 241 | Discarded(Id, TypeckCtxt), 242 | } 243 | 244 | impl ExprDeferredError { 245 | #[allow(irrefutable_let_patterns)] 246 | pub fn build(self, gcx: &GlobalCtxt, span: Span) -> Report<'static, Span> { 247 | if let ExprDeferredError::Discarded(t, tcx) = self { 248 | let mut w = Vec::new(); 249 | let t = nf_ty_force(gcx, tcx.lvl, tcx.env.clone(), t); 250 | let doc = super::pretty::pp_ty(0, gcx, tcx.lvl, tcx.env, t); 251 | doc.render(80, &mut w).unwrap(); 252 | let t = String::from_utf8(w).unwrap(); 253 | 254 | let report = Report::build(ReportKind::Error, (), span.lo() as usize) 255 | .with_message("invalid identifier `_`") 256 | .with_label( 257 | Label::new(span) 258 | .with_message("invalid identifier here") 259 | .with_color(Color::Blue), 260 | ) 261 | .with_help(format!("this value was expected to be of type {t}")) 262 | .with_note( 263 | "`_` is only valid on the left-hand side of a variable definition, e.g. `λ_.x`", 264 | ); 265 | 266 | report.finish() 267 | } else { 268 | todo!() 269 | } 270 | } 271 | } 272 | 273 | impl Expr { 274 | pub fn report_deferred(e: Id, gcx: &GlobalCtxt) { 275 | match gcx.arenas.core.expr(e).kind { 276 | ExprKind::Lam(_, _, x) => Self::report_deferred(x, gcx), 277 | ExprKind::App(f, x) => { 278 | Self::report_deferred(f, gcx); 279 | Self::report_deferred(x, gcx); 280 | } 281 | ExprKind::TyApp(f, _) => Self::report_deferred(f, gcx), 282 | ExprKind::Let(_, _, _, _, e1, e2) => { 283 | Self::report_deferred(e1, gcx); 284 | Self::report_deferred(e2, gcx); 285 | } 286 | ExprKind::TyAbs(_, _, x) => Self::report_deferred(x, gcx), 287 | ExprKind::Err(err) => { 288 | gcx.drcx 289 | .borrow_mut() 290 | .report_syncd(err.build(gcx, gcx.arenas.core.expr(e).span)); 291 | } 292 | _ => {} 293 | } 294 | } 295 | } 296 | 297 | #[derive(Clone, Debug)] 298 | pub struct MetaVar(pub Rc>); 299 | 300 | #[derive(Debug)] 301 | pub enum MetaEntry { 302 | Solved(Id), 303 | Unsolved, 304 | } 305 | 306 | #[derive(Debug)] 307 | pub struct MetaInfo { 308 | pub level: DeBruijnLvl, 309 | pub name: Symbol, 310 | pub span: Span, 311 | } 312 | 313 | #[derive(Debug)] 314 | pub struct CoreAstArenas { 315 | pub expr: RefCell>, 316 | pub ty: RefCell>, 317 | next_ast_id: Cell, 318 | core_id_to_node: RefCell>, 319 | surf_to_core: RefCell>, 320 | ty_map: RefCell>>, 321 | } 322 | 323 | impl Default for CoreAstArenas { 324 | fn default() -> Self { 325 | Self { 326 | expr: Default::default(), 327 | ty: Default::default(), 328 | next_ast_id: Cell::new(1), 329 | core_id_to_node: Default::default(), 330 | surf_to_core: Default::default(), 331 | ty_map: Default::default(), 332 | } 333 | } 334 | } 335 | 336 | impl CoreAstArenas { 337 | pub fn ty_of_expr(&self, id: CoreAstId) -> Id { 338 | *self.ty_map.borrow().get(&id).unwrap() 339 | } 340 | 341 | pub fn clear(&self) { 342 | self.next_ast_id.set(1); 343 | self.core_id_to_node.borrow_mut().clear(); 344 | self.surf_to_core.borrow_mut().clear(); 345 | } 346 | 347 | pub fn expr(&self, id: Id) -> Expr { 348 | self.expr.borrow()[id].clone() 349 | } 350 | 351 | pub fn ty(&self, id: Id) -> Ty { 352 | self.ty.borrow()[id].clone() 353 | } 354 | 355 | pub fn next_id(&self) -> CoreAstId { 356 | let id = self.next_ast_id.get(); 357 | assert!(id < u32::MAX); 358 | self.next_ast_id.replace(id + 1); 359 | CoreAstId::from_raw(id) 360 | } 361 | 362 | pub fn lower_id(&self, id: AstId) -> CoreAstId { 363 | match self.surf_to_core.borrow_mut().entry(id) { 364 | Entry::Occupied(entry) => *entry.get(), 365 | Entry::Vacant(entry) => { 366 | let id = self.next_id(); 367 | entry.insert(id); 368 | id 369 | } 370 | } 371 | } 372 | 373 | pub fn raise_id(&self, id: CoreAstId) -> Option { 374 | self.surf_to_core 375 | .borrow() 376 | .iter() 377 | .find_map(|(&surf, &core)| (core == id).then_some(surf)) 378 | } 379 | 380 | pub fn get_node_by_id(&self, id: CoreAstId) -> Option { 381 | self.core_id_to_node.borrow().get(&id).copied() 382 | } 383 | } 384 | 385 | // TODO: Add VTy to Node? 386 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 387 | pub enum Node { 388 | Expr(Id), 389 | Ty(Id), 390 | } 391 | 392 | impl Node { 393 | pub fn span(self, gcx: &GlobalCtxt) -> Span { 394 | match self { 395 | Self::Expr(expr) => gcx.arenas.core.expr(expr).span, 396 | Self::Ty(ty) => gcx.arenas.core.ty(ty).span, 397 | } 398 | } 399 | 400 | pub fn ident(self, gcx: &GlobalCtxt) -> Option { 401 | match self { 402 | Self::Expr(expr) => match gcx.arenas.core.expr(expr).kind { 403 | ExprKind::Unit 404 | | ExprKind::Var(_) 405 | | ExprKind::App(_, _) 406 | | ExprKind::TyApp(_, _) 407 | | ExprKind::Free(_) 408 | | ExprKind::EnumConstructor(_, _) 409 | | ExprKind::EnumRecursor(_) 410 | | ExprKind::Err(_) 411 | | ExprKind::Number(_) 412 | | ExprKind::BinaryOp { .. } 413 | | ExprKind::Boolean(_) 414 | | ExprKind::If(..) 415 | | ExprKind::Tuple(..) 416 | | ExprKind::TupleProj(..) => None, 417 | ExprKind::LiftedLam(..) 418 | | ExprKind::LiftedVar(..) 419 | | ExprKind::LiftedFree(..) 420 | | ExprKind::LiftedApp(..) 421 | | ExprKind::LiftedLamRef(..) => { 422 | unimplemented!() 423 | } 424 | ExprKind::Lam(_, id, _) 425 | | ExprKind::Let(_, id, _, _, _, _) 426 | | ExprKind::TyAbs(_, id, _) => Some(id), 427 | }, 428 | Self::Ty(ty) => match gcx.arenas.core.ty(ty).kind { 429 | TyKind::Unit 430 | | TyKind::Var(_, _) 431 | | TyKind::Arrow(_, _) 432 | | TyKind::Meta(_, _) 433 | | TyKind::InsertedMeta(_) 434 | | TyKind::Free(_) 435 | | TyKind::Enum(_, _) 436 | | TyKind::Primitive(_) 437 | | TyKind::Tuple(_) 438 | | TyKind::TupleFlex(_) => None, 439 | TyKind::Forall(_, id, _) => Some(id), 440 | }, 441 | } 442 | } 443 | 444 | pub fn id(self, gcx: &GlobalCtxt) -> CoreAstId { 445 | match self { 446 | Self::Expr(id) => gcx.arenas.core.expr(id).id, 447 | Self::Ty(id) => gcx.arenas.core.ty(id).id, 448 | } 449 | } 450 | } 451 | -------------------------------------------------------------------------------- /src/typeck/norm.rs: -------------------------------------------------------------------------------- 1 | use std::{cell::RefCell, rc::Rc}; 2 | 3 | use calypso_base::symbol::Ident; 4 | use id_arena::{Arena, Id}; 5 | 6 | use crate::{ 7 | ast::{AstId, PrimTy}, 8 | ctxt::GlobalCtxt, 9 | parse::Span, 10 | }; 11 | 12 | use super::ast::{CoreAstId, DeBruijnIdx, DeBruijnLvl, MetaEntry, MetaVar, Ty, TyKind}; 13 | 14 | pub type Env = im::Vector>; 15 | pub type VSpine = im::Vector>; 16 | #[derive(Clone, Debug)] 17 | pub struct Closure(pub Env, pub Id); 18 | 19 | #[derive(Clone, Debug)] 20 | pub struct VTy { 21 | pub id: CoreAstId, 22 | pub kind: VTyKind, 23 | pub span: Span, 24 | } 25 | 26 | impl VTy { 27 | pub fn new(gcx: &GlobalCtxt, id: CoreAstId, kind: VTyKind, span: Span) -> Id { 28 | gcx.arenas 29 | .tyck 30 | .vty 31 | .borrow_mut() 32 | .alloc(VTy { id, kind, span }) 33 | } 34 | 35 | pub fn rigid(gcx: &GlobalCtxt, id: CoreAstId, astid: CoreAstId, lvl: DeBruijnLvl) -> Id { 36 | VTy::new( 37 | gcx, 38 | id, 39 | VTyKind::Rigid(astid, lvl), 40 | Span((u32::MAX..u32::MAX).into()), 41 | ) 42 | } 43 | } 44 | 45 | #[derive(Clone, Debug)] 46 | pub enum VTyKind { 47 | Flex(MetaVar, VSpine), 48 | // TODO: make applyTyClosure somehow take the var span from TVar 49 | Rigid(CoreAstId, DeBruijnLvl), 50 | Unit, 51 | Arrow(Id, Id), 52 | Forall(CoreAstId, Ident, Closure), 53 | Free(AstId), 54 | Enum(AstId, VSpine), 55 | Primitive(PrimTy), 56 | Tuple(VSpine), 57 | TupleFlex(Rc>), 58 | } 59 | 60 | #[derive(Debug)] 61 | pub enum FlexTuple { 62 | Rigid(VSpine), 63 | Flex(VSpine), 64 | } 65 | 66 | pub fn apply_ty_closure(gcx: &GlobalCtxt, Closure(mut env, t): Closure, u: Id) -> Id { 67 | env.push_back(u); 68 | eval_ty(gcx, env, t) 69 | } 70 | 71 | pub fn apply_meta(gcx: &GlobalCtxt, a: Id, sp: VSpine) -> Id { 72 | eval_ty(gcx, sp, a) 73 | } 74 | 75 | pub fn eval_meta(gcx: &GlobalCtxt, i: CoreAstId, p: Span, m: MetaVar, sp: VSpine) -> Id { 76 | let m1 = m.clone(); 77 | match &*m.0.borrow() { 78 | (MetaEntry::Solved(v), _) => apply_meta(gcx, *v, sp), 79 | (MetaEntry::Unsolved, _) => VTy::new(gcx, i, VTyKind::Flex(m1, sp), p), 80 | } 81 | } 82 | 83 | pub fn eval_ty(gcx: &GlobalCtxt, env: Env, ty: Id) -> Id { 84 | let ty = gcx.arenas.core.ty(ty); 85 | match ty.kind { 86 | TyKind::Var(_, i) => env[i.index()], 87 | TyKind::Unit => VTy::new(gcx, ty.id, VTyKind::Unit, ty.span), 88 | TyKind::Arrow(a, b) => VTy::new( 89 | gcx, 90 | ty.id, 91 | VTyKind::Arrow(eval_ty(gcx, env.clone(), a), eval_ty(gcx, env, b)), 92 | ty.span, 93 | ), 94 | TyKind::Free(id) => VTy::new(gcx, ty.id, VTyKind::Free(id), ty.span), 95 | TyKind::Meta(m, sp) => eval_meta(gcx, ty.id, ty.span, m, eval_spine(gcx, env, sp)), 96 | TyKind::InsertedMeta(m) => eval_meta(gcx, ty.id, ty.span, m, env), 97 | TyKind::Forall(x, i, t) => { 98 | VTy::new(gcx, ty.id, VTyKind::Forall(x, i, Closure(env, t)), ty.span) 99 | } 100 | TyKind::Enum(id, spine) => VTy::new( 101 | gcx, 102 | ty.id, 103 | VTyKind::Enum(id, eval_spine(gcx, env, spine)), 104 | ty.span, 105 | ), 106 | TyKind::Primitive(prim) => VTy::new(gcx, ty.id, VTyKind::Primitive(prim), ty.span), 107 | TyKind::Tuple(sp) => VTy::new( 108 | gcx, 109 | ty.id, 110 | VTyKind::Tuple(eval_spine(gcx, env, sp)), 111 | ty.span, 112 | ), 113 | TyKind::TupleFlex(sp) => VTy::new( 114 | gcx, 115 | ty.id, 116 | VTyKind::TupleFlex(Rc::new(RefCell::new(FlexTuple::Flex(eval_spine( 117 | gcx, env, sp, 118 | ))))), 119 | ty.span, 120 | ), 121 | } 122 | } 123 | 124 | pub fn eval_spine(gcx: &GlobalCtxt, env: Env, spine: im::Vector>) -> VSpine { 125 | spine 126 | .into_iter() 127 | .map(move |t| eval_ty(gcx, env.clone(), t)) 128 | .collect() 129 | } 130 | 131 | pub fn force(gcx: &GlobalCtxt, ty: Id) -> Id { 132 | let vty = gcx.arenas.tyck.vty(ty); 133 | match vty.kind { 134 | VTyKind::Flex(m, sp) => match m.clone().0.borrow().0 { 135 | MetaEntry::Solved(t) => force(gcx, apply_meta(gcx, t, sp)), 136 | MetaEntry::Unsolved => ty, 137 | }, 138 | VTyKind::TupleFlex(m) => match &*m.borrow() { 139 | FlexTuple::Rigid(spine) => force( 140 | gcx, 141 | VTy::new( 142 | gcx, 143 | gcx.arenas.core.next_id(), 144 | VTyKind::Tuple(spine.clone()), 145 | Span((u32::MAX..u32::MAX).into()), 146 | ), 147 | ), 148 | FlexTuple::Flex(_) => ty, 149 | }, 150 | _ => ty, 151 | } 152 | } 153 | 154 | pub fn lvl2ix(l: DeBruijnLvl, x: DeBruijnLvl) -> DeBruijnIdx { 155 | DeBruijnIdx::from(l.index() - x.index() - 1) 156 | } 157 | 158 | pub fn quote_ty(gcx: &GlobalCtxt, l: DeBruijnLvl, t: Id) -> Id { 159 | let t = force(gcx, t); 160 | let t = gcx.arenas.tyck.vty(t); 161 | match t.kind { 162 | VTyKind::Rigid(id, l1) => Ty::new( 163 | gcx, 164 | gcx.arenas.core.next_id(), 165 | TyKind::Var(id, lvl2ix(l, l1)), 166 | t.span, 167 | ), 168 | VTyKind::Flex(m, sp) => Ty::new( 169 | gcx, 170 | gcx.arenas.core.next_id(), 171 | TyKind::Meta(m, quote_ty_spine(gcx, l, sp)), 172 | t.span, 173 | ), 174 | VTyKind::Unit => Ty::new(gcx, gcx.arenas.core.next_id(), TyKind::Unit, t.span), 175 | VTyKind::Arrow(a, b) => Ty::new( 176 | gcx, 177 | gcx.arenas.core.next_id(), 178 | TyKind::Arrow(quote_ty(gcx, l, a), quote_ty(gcx, l, b)), 179 | t.span, 180 | ), 181 | VTyKind::Forall(x, i, b) => Ty::new( 182 | gcx, 183 | gcx.arenas.core.next_id(), 184 | TyKind::Forall( 185 | x, 186 | i, 187 | quote_ty( 188 | gcx, 189 | l + 1, 190 | apply_ty_closure( 191 | gcx, 192 | b, 193 | VTy::new( 194 | gcx, 195 | gcx.arenas.core.next_id(), 196 | VTyKind::Rigid(x, l), 197 | Span((u32::MAX..u32::MAX).into()), 198 | ), 199 | ), 200 | ), 201 | ), 202 | t.span, 203 | ), 204 | VTyKind::Free(id) => Ty::new(gcx, gcx.arenas.core.next_id(), TyKind::Free(id), t.span), 205 | VTyKind::Enum(id, spine) => Ty::new( 206 | gcx, 207 | gcx.arenas.core.next_id(), 208 | TyKind::Enum(id, quote_ty_spine(gcx, l, spine)), 209 | t.span, 210 | ), 211 | VTyKind::Primitive(ty) => Ty::new( 212 | gcx, 213 | gcx.arenas.core.next_id(), 214 | TyKind::Primitive(ty), 215 | t.span, 216 | ), 217 | VTyKind::Tuple(sp) => Ty::new( 218 | gcx, 219 | gcx.arenas.core.next_id(), 220 | TyKind::Tuple(quote_ty_spine(gcx, l, sp)), 221 | t.span, 222 | ), 223 | VTyKind::TupleFlex(sp) => match &*sp.borrow() { 224 | FlexTuple::Flex(sp) => Ty::new( 225 | gcx, 226 | gcx.arenas.core.next_id(), 227 | TyKind::TupleFlex(quote_ty_spine(gcx, l, sp.clone())), 228 | t.span, 229 | ), 230 | _ => unreachable!(), // force 231 | }, 232 | } 233 | } 234 | 235 | pub fn quote_ty_spine(gcx: &GlobalCtxt, l: DeBruijnLvl, spine: VSpine) -> im::Vector> { 236 | spine 237 | .into_iter() 238 | .map(move |a| quote_ty(gcx, l, a)) 239 | .collect() 240 | } 241 | 242 | pub fn nf_ty_force(gcx: &GlobalCtxt, l: DeBruijnLvl, e: Env, t: Id) -> Id { 243 | let vt = eval_ty(gcx, e, t); 244 | let vt = force(gcx, vt); 245 | quote_ty(gcx, l, vt) 246 | } 247 | 248 | #[derive(Debug, Default)] 249 | pub struct TyckArenas { 250 | pub vty: RefCell>, 251 | } 252 | 253 | impl TyckArenas { 254 | pub fn clear(&self) {} 255 | 256 | pub fn vty(&self, id: Id) -> VTy { 257 | self.vty.borrow()[id].clone() 258 | } 259 | } 260 | -------------------------------------------------------------------------------- /src/typeck/unify.rs: -------------------------------------------------------------------------------- 1 | use std::{cmp::Ordering, rc::Rc}; 2 | 3 | use id_arena::Id; 4 | 5 | use crate::ctxt::GlobalCtxt; 6 | 7 | use super::{ 8 | ast::{CoreAstId, DeBruijnLvl, MetaEntry, MetaVar, Ty, TyKind}, 9 | norm::{apply_ty_closure, force, lvl2ix, FlexTuple, VSpine, VTy, VTyKind}, 10 | }; 11 | 12 | #[derive(Clone, Debug)] 13 | struct PartialRenaming { 14 | dom: DeBruijnLvl, 15 | cod: DeBruijnLvl, 16 | ren: im::HashMap, 17 | } 18 | 19 | fn lift_ren(PartialRenaming { dom, cod, mut ren }: PartialRenaming) -> PartialRenaming { 20 | ren.insert(cod, dom); 21 | PartialRenaming { 22 | dom: dom + 1, 23 | cod: cod + 1, 24 | ren, 25 | } 26 | } 27 | 28 | /// Create a partial renaming to convert a spine such as `[4, 6, 5]` 29 | /// (where all are `Rigid`) into `[0, 1, 2]`. 30 | fn invert(gcx: &GlobalCtxt, gamma: DeBruijnLvl, sp: VSpine) -> PartialRenaming { 31 | let mut d = DeBruijnLvl::from(0usize); 32 | let mut r = im::HashMap::new(); 33 | 34 | // println!( 35 | // "{:#?}", 36 | // sp.iter() 37 | // .map(|x| gcx.arenas.tyck.vty(*x).kind) 38 | // .collect::>() 39 | // ); 40 | // TODO: why does `rev` make this work 41 | for t in sp.into_iter().rev() { 42 | match gcx.arenas.tyck.vty(force(gcx, t)).kind { 43 | VTyKind::Rigid(_, x) if !r.contains_key(&x) => { 44 | r.insert(x, d); 45 | d += 1; 46 | } 47 | _ => panic!("invert"), 48 | } 49 | } 50 | 51 | PartialRenaming { 52 | dom: d, 53 | cod: gamma, 54 | ren: r, 55 | } 56 | } 57 | 58 | fn rename_spine( 59 | gcx: &GlobalCtxt, 60 | m: MetaVar, 61 | pren: PartialRenaming, 62 | sp: VSpine, 63 | ) -> Result>, UnifyError> { 64 | sp.into_iter() 65 | .map(move |a| rename(gcx, m.clone(), pren.clone(), a)) 66 | .collect() 67 | } 68 | 69 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 70 | pub enum UnifyError { 71 | Occurs, 72 | Scope(CoreAstId, DeBruijnLvl), 73 | SpineMismatch, 74 | RigidMismatch, 75 | } 76 | 77 | fn rename( 78 | gcx: &GlobalCtxt, 79 | m: MetaVar, 80 | pren: PartialRenaming, 81 | t: Id, 82 | ) -> Result, UnifyError> { 83 | use VTyKind::*; 84 | let t = force(gcx, t); 85 | let t = gcx.arenas.tyck.vty(t); 86 | Ok(match t.kind { 87 | Flex(m1, _) if Rc::ptr_eq(&m1.0, &m.0) => return Err(UnifyError::Occurs), 88 | Flex(m1, sp) => Ty::new( 89 | gcx, 90 | gcx.arenas.core.next_id(), 91 | TyKind::Meta(m1, rename_spine(gcx, m, pren, sp)?), 92 | t.span, 93 | ), 94 | Rigid(x, l) => match pren.ren.get(&l) { 95 | None => return Err(UnifyError::Scope(x, l)), 96 | Some(l1) => Ty::new( 97 | gcx, 98 | gcx.arenas.core.next_id(), 99 | TyKind::Var(x, lvl2ix(pren.dom, *l1)), 100 | t.span, 101 | ), 102 | }, 103 | Unit => Ty::new(gcx, t.id, TyKind::Unit, t.span), 104 | Arrow(a, b) => Ty::new( 105 | gcx, 106 | gcx.arenas.core.next_id(), 107 | TyKind::Arrow( 108 | rename(gcx, m.clone(), pren.clone(), a)?, 109 | rename(gcx, m, pren, b)?, 110 | ), 111 | t.span, 112 | ), 113 | Free(x) => Ty::new(gcx, gcx.arenas.core.next_id(), TyKind::Free(x), t.span), 114 | Forall(x, i, c) => { 115 | let vc = apply_ty_closure( 116 | gcx, 117 | c, 118 | VTy::rigid(gcx, gcx.arenas.core.next_id(), x, pren.cod), 119 | ); 120 | Ty::new( 121 | gcx, 122 | gcx.arenas.core.next_id(), 123 | TyKind::Forall(x, i, rename(gcx, m, lift_ren(pren), vc)?), 124 | t.span, 125 | ) 126 | } 127 | Enum(x, spine) => Ty::new( 128 | gcx, 129 | gcx.arenas.core.next_id(), 130 | TyKind::Enum(x, rename_spine(gcx, m, pren, spine)?), 131 | t.span, 132 | ), 133 | Primitive(prim) => Ty::new( 134 | gcx, 135 | gcx.arenas.core.next_id(), 136 | TyKind::Primitive(prim), 137 | t.span, 138 | ), 139 | Tuple(spine) => Ty::new( 140 | gcx, 141 | gcx.arenas.core.next_id(), 142 | TyKind::Tuple(rename_spine(gcx, m, pren, spine)?), 143 | t.span, 144 | ), 145 | TupleFlex(spine) => match &*spine.borrow() { 146 | FlexTuple::Flex(spine) => Ty::new( 147 | gcx, 148 | gcx.arenas.core.next_id(), 149 | TyKind::TupleFlex(rename_spine(gcx, m, pren, spine.clone())?), 150 | t.span, 151 | ), 152 | _ => unreachable!(), 153 | }, 154 | }) 155 | } 156 | 157 | fn solve( 158 | gcx: &GlobalCtxt, 159 | gamma: DeBruijnLvl, 160 | m: MetaVar, 161 | sp: VSpine, 162 | rhs: Id, 163 | ) -> Result<(), UnifyError> { 164 | let pren = invert(gcx, gamma, sp); 165 | // println!("gamma={gamma:?}, pren={pren:?}"); 166 | let sol = rename(gcx, m.clone(), pren, rhs)?; 167 | m.0.borrow_mut().0 = MetaEntry::Solved(sol); 168 | Ok(()) 169 | } 170 | 171 | fn unify_spine( 172 | gcx: &GlobalCtxt, 173 | l: DeBruijnLvl, 174 | sp1: VSpine, 175 | sp2: VSpine, 176 | ) -> Result<(), UnifyError> { 177 | if sp1.len() != sp2.len() { 178 | return Err(UnifyError::SpineMismatch); 179 | } 180 | for (t1, t2) in sp1.into_iter().zip(sp2.into_iter()) { 181 | unify(gcx, l, t1, t2)?; 182 | } 183 | Ok(()) 184 | } 185 | 186 | pub fn unify(gcx: &GlobalCtxt, l: DeBruijnLvl, t: Id, u: Id) -> Result<(), UnifyError> { 187 | // { 188 | // let mut w = Vec::new(); 189 | // let t = quote_ty(gcx, l, t); 190 | // let u = quote_ty(gcx, l, u); 191 | // let doc = crate::typeck::pretty::pp_ty(0, gcx, t); 192 | // doc.render(80, &mut w).unwrap(); 193 | // let mut w1 = Vec::new(); 194 | // let doc = crate::typeck::pretty::pp_ty(0, gcx, u); 195 | // doc.render(80, &mut w1).unwrap(); 196 | 197 | // println!( 198 | // "unify: {} vs {}", 199 | // String::from_utf8(w).unwrap(), 200 | // String::from_utf8(w1).unwrap() 201 | // ); 202 | // } 203 | use VTyKind::*; 204 | let t = force(gcx, t); 205 | let u = force(gcx, u); 206 | let vt = gcx.arenas.tyck.vty(t); 207 | let vu = gcx.arenas.tyck.vty(u); 208 | 209 | match (vt.kind.clone(), vu.kind.clone()) { 210 | (Flex(m1, sp1), Flex(m2, sp2)) if Rc::ptr_eq(&m1.0, &m2.0) => { 211 | unify_spine(gcx, l, sp1, sp2)?; 212 | } 213 | (Flex(m1, sp1), _) => solve(gcx, l, m1, sp1, u)?, 214 | (_, Flex(m2, sp2)) => solve(gcx, l, m2, sp2, t)?, 215 | (Rigid(_, x1), Rigid(_, x2)) if x1 == x2 => {} 216 | (Unit, Unit) => {} 217 | (Arrow(a1, b1), Arrow(a2, b2)) => { 218 | unify(gcx, l, a1, a2)?; 219 | unify(gcx, l, b1, b2)?; 220 | } 221 | (Forall(x1, _, t1), Forall(x2, _, t2)) => { 222 | let c1 = apply_ty_closure(gcx, t1, VTy::rigid(gcx, gcx.arenas.core.next_id(), x1, l)); 223 | let c2 = apply_ty_closure(gcx, t2, VTy::rigid(gcx, gcx.arenas.core.next_id(), x2, l)); 224 | unify(gcx, l + 1, c1, c2)?; 225 | } 226 | (Free(n1), Free(n2)) if n1 == n2 => {} 227 | (Enum(x1, sp1), Enum(x2, sp2)) if x1 == x2 => { 228 | unify_spine(gcx, l, sp1, sp2)?; 229 | } 230 | (Primitive(p1), Primitive(p2)) if p1 == p2 => {} 231 | (Tuple(t1), Tuple(t2)) => { 232 | unify_spine(gcx, l, t1, t2)?; 233 | } 234 | (TupleFlex(t1), TupleFlex(t2)) => match (&*t1.borrow(), &*t2.borrow()) { 235 | (FlexTuple::Flex(sp1), FlexTuple::Flex(sp2)) => match sp1.len().cmp(&sp2.len()) { 236 | Ordering::Less => { 237 | // (A, B, ...) `unify` (X, Y, Z, ...) 238 | // LH side (t1) := RH side (sp2) 239 | // unify spines, truncated: 240 | // A `unify` X, B `unify` Y 241 | 242 | let sp1 = sp1.clone(); 243 | let mut sp2 = sp2.clone(); 244 | *t1.borrow_mut() = FlexTuple::Flex(sp2.clone()); 245 | 246 | sp2.truncate(sp1.len()); 247 | unify_spine(gcx, l, sp1, sp2)?; 248 | } 249 | Ordering::Equal => { 250 | // (A, B, ...) `unify` (X, Y, ...) 251 | // no need to change flex constraints 252 | // unify spines, no truncation necessary: 253 | // A `unify` X, B `unify` Y 254 | unify_spine(gcx, l, sp1.clone(), sp2.clone())?; 255 | } 256 | Ordering::Greater => { 257 | // (A, B, C, ...) `unify` (X, Y, ...) 258 | // RH side (t2) := LH side (sp1) 259 | // unify spines, truncated: 260 | // A `unify` X, B `unify` Y 261 | 262 | let mut sp1 = sp1.clone(); 263 | let sp2 = sp2.clone(); 264 | *t2.borrow_mut() = FlexTuple::Flex(sp1.clone()); 265 | 266 | sp1.truncate(sp2.len()); 267 | unify_spine(gcx, l, sp1, sp2)?; 268 | } 269 | }, 270 | _ => unreachable!(), // force 271 | }, 272 | (TupleFlex(t1), Tuple(sp2)) => { 273 | let borrow = t1.borrow(); 274 | match &*borrow { 275 | FlexTuple::Flex(sp1) => { 276 | // (A, B, ...) `unify` (X, Y, Z) 277 | // truncate and unify spines: 278 | // A `unify` X, B `unify` Z 279 | let mut sp2_trunc = sp2.clone(); 280 | sp2_trunc.truncate(sp1.len()); 281 | 282 | unify_spine(gcx, l, sp1.clone(), sp2_trunc)?; 283 | // If we succeeded (no rigid mismatch), update ourselves. 284 | // t1 := RH side (sp2) 285 | drop(borrow); 286 | *t1.borrow_mut() = FlexTuple::Rigid(sp2); 287 | } 288 | _ => unreachable!(), // force 289 | }; 290 | } 291 | (Tuple(sp1), TupleFlex(t2)) => { 292 | let borrow = t2.borrow(); 293 | match &*borrow { 294 | FlexTuple::Flex(sp2) => { 295 | // (A, B, C) `unify` (X, Y, ...) 296 | // truncate and unify spines: 297 | // A `unify` X, B `unify` Z 298 | let mut sp1_trunc = sp1.clone(); 299 | sp1_trunc.truncate(sp2.len()); 300 | 301 | unify_spine(gcx, l, sp1_trunc, sp2.clone())?; 302 | // If we succeeded (no rigid mismatch), update ourselves. 303 | // t2 := LH side (sp1) 304 | drop(borrow); 305 | *t2.borrow_mut() = FlexTuple::Rigid(sp1); 306 | } 307 | _ => unreachable!(), // force 308 | }; 309 | } 310 | _ => { 311 | return Err(UnifyError::RigidMismatch); 312 | } 313 | } 314 | Ok(()) 315 | } 316 | -------------------------------------------------------------------------------- /support/README.md: -------------------------------------------------------------------------------- 1 | # support/ 2 | 3 | This file contains UI tests, support files, and other miscellaneous things that are not really related to the main source. This way, the main repo directory isn't super cluttered. -------------------------------------------------------------------------------- /support/examples/fizzbuzz.tys: -------------------------------------------------------------------------------- 1 | def main 2 | : () -> () 3 | = \_. 4 | each (fizzbuzz 100) println 5 | 6 | def fizzbuzz 7 | : Int -> List[String] 8 | = \max. 9 | map (rangeI 1 100) (\n. 10 | if (divides n 15) 11 | then "FizzBuzz" 12 | else if (divides n 3) 13 | then "Fizz" 14 | else if (divides n 5) 15 | then "Buzz" 16 | # Typeclasses soon(tm) 17 | else intToString n) -------------------------------------------------------------------------------- /support/highlight/.vscodeignore: -------------------------------------------------------------------------------- 1 | .vscode/** 2 | .vscode-test/** 3 | .gitignore 4 | -------------------------------------------------------------------------------- /support/highlight/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /support/highlight/language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | "lineComment": "#", 4 | }, 5 | "brackets": [ 6 | ["{", "}"], 7 | ["[", "]"], 8 | ["(", ")"] 9 | ], 10 | "autoClosingPairs": [ 11 | ["{", "}"], 12 | ["[", "]"], 13 | ["(", ")"], 14 | ["\"", "\""], 15 | ], 16 | "surroundingPairs": [ 17 | ["{", "}"], 18 | ["[", "]"], 19 | ["(", ")"], 20 | ["\"", "\""], 21 | ] 22 | } -------------------------------------------------------------------------------- /support/highlight/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tethys-lang", 3 | "displayName": "Tethys Syntax Highlighting", 4 | "description": "Syntax Highlighting for Tethys", 5 | "publisher": "calypso-lang", 6 | "version": "0.0.1", 7 | "engines": { 8 | "vscode": "^1.63.0" 9 | }, 10 | "categories": [ 11 | "Programming Languages" 12 | ], 13 | "author": "ThePuzzlemaker ", 14 | "bugs": "https://github.com/ThePuzzlemaker/Tethys/issues", 15 | "homepage": "https://github.com/ThePuzzlemaker/Tethys", 16 | "license": "MIT", 17 | "repository": "github:ThePuzzlemaker/Tethys", 18 | "contributes": { 19 | "languages": [{ 20 | "id": "tethys", 21 | "aliases": ["Tethys", "tethys"], 22 | "extensions": [".tys"], 23 | "configuration": "./language-configuration.json" 24 | }], 25 | "grammars": [{ 26 | "language": "tethys", 27 | "scopeName": "source.tethys", 28 | "path": "./syntaxes/tethys.tmLanguage.json" 29 | }] 30 | } 31 | } -------------------------------------------------------------------------------- /support/highlight/syntaxes/tethys.tmLanguage.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", 3 | "name": "Tethys", 4 | "patterns": [ 5 | { 6 | "include": "#definition" 7 | }, 8 | { 9 | "include": "#types" 10 | }, 11 | { 12 | "include": "#exprs" 13 | }, 14 | { 15 | "include": "#comments" 16 | }, 17 | { 18 | "name": "keyword.operator.tethys", 19 | "match": "\\.|:|=|\\(|\\)" 20 | } 21 | ], 22 | "repository": { 23 | "definition": { 24 | "match": "(\\bdef\\b)\\s*([A-Za-z_][A-Za-z0-9_]*)", 25 | "captures": { 26 | "1": { 27 | "name": "storage.type.tethys" 28 | }, 29 | "2": { 30 | "name": "entity.name.tethys" 31 | } 32 | } 33 | }, 34 | "types": { 35 | "patterns": [ 36 | { 37 | "name": "keyword.operator.tethys", 38 | "match": "(\\bforall\\b|->)" 39 | }, 40 | { 41 | "name": "storage.type.tethys", 42 | "match": "'[A-Za-z_][A-Za-z0-9_]*" 43 | }, 44 | { 45 | "name": "support.type.tethys", 46 | "match": "\\(\\)" 47 | } 48 | ] 49 | }, 50 | "exprs": { 51 | "patterns": [ 52 | { 53 | "name": "keyword.control.tethys", 54 | "match": "\\bin\\b" 55 | }, 56 | { 57 | "match": "(\\blet\\b)\\s*([A-Za-z_][A-Za-z0-9_]*)", 58 | "captures": { 59 | "1": { 60 | "name": "keyword.control.tethys" 61 | }, 62 | "2": { 63 | "name": "entity.name.tethys" 64 | } 65 | } 66 | }, 67 | { 68 | "begin": "(\\\\)\\s*", 69 | "beginCaptures": { 70 | "1": { 71 | "name": "keyword.control.tethys" 72 | } 73 | }, 74 | "end": "\\.", 75 | "endCaptures": { 76 | "0": { 77 | "name": "keyword.operator.tethys" 78 | } 79 | }, 80 | "patterns": [{ 81 | "match": "\\s*([A-Za-z_][A-Za-z0-9_]*)\\s*", 82 | "captures": { 83 | "1": { 84 | "name": "entity.name.tethys" 85 | } 86 | } 87 | }] 88 | } 89 | ] 90 | }, 91 | "comments": { 92 | "name": "comment.line.number-sign", 93 | "begin": "#", 94 | "end": "\n" 95 | } 96 | }, 97 | "scopeName": "source.tethys" 98 | } -------------------------------------------------------------------------------- /support/highlight/tethys-lang-0.0.1.vsix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThePuzzlemaker/tethys/209079a9a9374e6aa85fb3ccd25a3f20809fdb84/support/highlight/tethys-lang-0.0.1.vsix -------------------------------------------------------------------------------- /support/prelude.tys: -------------------------------------------------------------------------------- 1 | #def id 2 | # : forall 'a. 'a -> 'a 3 | # = \x.x 4 | 5 | def const 6 | : forall 'a 'b. 'a -> 'b -> 'a 7 | = \x y.x 8 | 9 | # Commented out for now 10 | 11 | # type List['a] = Nil | Cons T List['a] 12 | # 13 | # def rangeI 14 | # : Int -> Int -> List[Int] 15 | # = \min max. 16 | # let rec helper = \n acc. 17 | # if (n == min - 1) 18 | # then acc 19 | # else helper (n - 1) (Cons n acc) 20 | # in helper max Nil 21 | # 22 | # def divides 23 | # : Int -> Int -> bool 24 | # = \n m.m % n == 0 25 | # 26 | # def rec concat 27 | # : forall 'a. List['a] -> List['a] -> List['a] 28 | # = \xs ys. 29 | # match xs with 30 | # | Nil -> ys 31 | # \ Cons x xs -> Cons x (concat xs ys) 32 | # 33 | # def map 34 | # : forall 'a 'b. List['a] -> ('a -> 'b) -> List['b] 35 | # = \xs f. 36 | # let rec helper = \xs acc. 37 | # match xs with 38 | # | Nil -> acc 39 | # \ Cons x xs -> helper xs (concat acc (Cons (f x) Nil)) 40 | # in helper xs Nilprelude required for this 41 | # 42 | # def each 43 | # : forall 'a. List['a] -> ('a -> ()) -> () 44 | # = \xs f. let _ = map xs f in () 45 | -------------------------------------------------------------------------------- /support/testing.tys: -------------------------------------------------------------------------------- 1 | def inc : Integer -> Integer 2 | = (\x.x)(\num.add num 1) 3 | 4 | #def foo : () -> () 5 | # = \_.() 6 | -------------------------------------------------------------------------------- /support/tests/resolve/tyvar-scope-1.tys: -------------------------------------------------------------------------------- 1 | # --- 2 | # desc: > 3 | # Scope of outer forall type variables is preserved within the `def` body 4 | # mode: check-pass 5 | def foo 6 | : forall 'a . 'a -> 'a 7 | = let id : 'a -> 'a 8 | = \x.x 9 | in id -------------------------------------------------------------------------------- /support/tests/resolve/tyvar-scope-2.tys: -------------------------------------------------------------------------------- 1 | # --- 2 | # desc: > 3 | # Scope of outer forall type variables does not include higher-rank type 4 | # variables 5 | # mode: diag 6 | # diags: 7 | # - kind: error 8 | # message: cannot find type `'b` in this scope 9 | # labels: 10 | # - loc: 3:20 11 | # message: not found in this scope 12 | # - kind: error 13 | # message: cannot find type `'b` in this scope 14 | # labels: 15 | # - loc: 3:26 16 | # message: not found in this scope 17 | def foo 18 | : forall 'a . 'a -> (forall 'b . 'b -> 'a) -> 'a 19 | = \a f. let id : 'b -> 'b 20 | = \x.x in f a -------------------------------------------------------------------------------- /support/tests/resolve/tyvar-scope-3.tys: -------------------------------------------------------------------------------- 1 | # --- 2 | # desc: > 3 | # Scope of forall type variables does not escape let-bindings 4 | # mode: diag 5 | # diags: 6 | # - kind: error 7 | # message: cannot find type `'b` in this scope 8 | # labels: 9 | # - loc: 5:18 10 | # message: not found in this scope 11 | # - kind: error 12 | # message: cannot find type `'b` in this scope 13 | # labels: 14 | # - loc: 5:24 15 | # message: not found in this scope 16 | def foo 17 | : forall 'a . 'a -> 'a 18 | = let id0 : forall 'b . 'b -> 'b 19 | = \x.x 20 | in let id1 : 'b -> 'b = id0 in id1 -------------------------------------------------------------------------------- /support/tests/resolve/tyvar-scope-4.tys: -------------------------------------------------------------------------------- 1 | # --- 2 | # desc: > 3 | # Scope of forall type variables does not escape multiple let-bindings 4 | # mode: diag 5 | # diags: 6 | # - kind: error 7 | # message: cannot find type `'b` in this scope 8 | # labels: 9 | # - loc: 7:18 10 | # message: not found in this scope 11 | # - kind: error 12 | # message: cannot find type `'c` in this scope 13 | # labels: 14 | # - loc: 7:24 15 | # message: not found in this scope 16 | def foo 17 | : forall 'a . 'a -> 'a 18 | = let id0 : forall 'b . 'b -> 'b 19 | = \x.x 20 | in let id1 : forall 'c . 'c -> 'c 21 | = \x.x 22 | in let id2 : 'b -> 'c = id0 in id2 -------------------------------------------------------------------------------- /support/tests/resolve/tyvar-scope-5.tys: -------------------------------------------------------------------------------- 1 | # --- 2 | # desc: > 3 | # Outer forall type variables are properly resolved within nested `let`-bindings. 4 | # mode: check-pass 5 | def foo 6 | : forall 'a . 'a -> 'a 7 | = let id0 : 'a -> 'a 8 | = let id1 : forall 'b . 'b -> 'b 9 | = let id2 : 'b -> 'b = \x.x 10 | in let id12 : 'a -> 'a 11 | = id2 12 | in id12 13 | in id1 14 | in id0 15 | -------------------------------------------------------------------------------- /support/tests/resolve/tyvar-scope-6.tys: -------------------------------------------------------------------------------- 1 | # --- 2 | # desc: > 3 | # Outer forall type variables do not escape nested `let`-bindings. 4 | # mode: diag 5 | # diags: 6 | # - kind: error 7 | # message: cannot find type `'b` in this scope 8 | # labels: 9 | # - loc: 7:18 10 | # message: not found in this scope 11 | # - kind: error 12 | # message: cannot find type `'b` in this scope 13 | # labels: 14 | # - loc: 7:24 15 | # message: not found in this scope 16 | def foo 17 | : forall 'a . 'a -> 'a 18 | = let id0 : 'a -> 'a 19 | = let id1 : forall 'b . 'b -> 'b 20 | = let id2 : 'b -> 'b = \x.x 21 | in id2 22 | in let id3 : 'b -> 'b 23 | = id1 24 | in id3 25 | in id0 -------------------------------------------------------------------------------- /support/tethys-mode.el: -------------------------------------------------------------------------------- 1 | ;;; tethys-mode --- Major mode for editing Tethys files. 2 | ;;; 3 | ;;; Commentary: 4 | ;;; 5 | ;;; Code: 6 | 7 | (defvar tethys-builtins 8 | '("true" "false")) 9 | 10 | (defvar tethys-keywords 11 | '("def" "forall" "let" "in" "rec" "λ" "type" "enum" "if" "then" "else")) 12 | 13 | (defvar tethys-tab-width nil 14 | "Tab width for `tethys-mode'.") 15 | 16 | (defvar tethys-font-lock-defaults 17 | `(((":\\|->\\|\\.\\|\\\\\\|=" . font-lock-keyword-face) 18 | (,(regexp-opt tethys-builtins 'words) . font-lock-builtin-face) 19 | (,(regexp-opt tethys-keywords 'words) . font-lock-constant-face) 20 | ("\\<_?[A-Z][A-Za-z0-9_]*\\>" . font-lock-type-face) 21 | ("\\<_?[a-z][A-Za-z0-9_]*\\>" . font-lock-variable-name-face) 22 | ("_" . font-lock-constant-face) 23 | ("'[A-Za-z_][A-Za-z0-9_]*" . font-lock-type-face) 24 | ("[0-9]+" . font-lock-constant-face)))) 25 | 26 | (define-derived-mode tethys-mode prog-mode "Tethys" 27 | "Major mode for editing Tethys files." 28 | (setq font-lock-defaults tethys-font-lock-defaults) 29 | 30 | (when tethys-tab-width 31 | (setq tab-width tethys-tab-width)) 32 | 33 | (setq comment-start "#") 34 | (setq comment-end "") 35 | 36 | (modify-syntax-entry ?# "< b" tethys-mode-syntax-table) 37 | (modify-syntax-entry ?\n "> b" tethys-mode-syntax-table)) 38 | 39 | (add-to-list 'auto-mode-alist '("\\.tys\\'" . tethys-mode)) 40 | 41 | (with-eval-after-load 'eglot 42 | (add-to-list 'eglot-server-programs 43 | '(tethys-mode . ("tethys-debug-lsp")))) 44 | 45 | (defgroup lsp-tethys nil 46 | "Customization group for `tethys-mode' lsp integration." 47 | :group 'lsp-mode) 48 | 49 | (defcustom lsp-tethys-server-path 50 | "tethys-debug-lsp" 51 | "The language server executable." 52 | :group 'lsp-tethys 53 | :type 'string) 54 | 55 | (defcustom lsp-tethys-server-args 56 | `() 57 | "The arguments for starting the language server." 58 | :group 'lsp-tethys 59 | :type '(repeat (string :Tag "Argument"))) 60 | 61 | (defun lsp-tethys--server-command () 62 | "Command with arguments for starting the language server." 63 | (append (list lsp-tethys-server-path) lsp-tethys-server-args)) 64 | 65 | (add-to-list 'lsp-language-id-configuration '(tethys-mode . "tethys")) 66 | 67 | (lsp-register-client 68 | (make-lsp-client 69 | :new-connection (lsp-stdio-connection (lambda () (lsp-tethys--server-command))) 70 | :major-modes '(tethys-mode) 71 | :server-id 'lsp-tethys 72 | ;; Fix workspace/configuration issues 73 | :initialized-fn (lambda (workspace) 74 | (with-lsp-workspace workspace 75 | (lsp--set-configuration (lsp-configuration-section "tethys")))) 76 | :synchronize-sections '("tethys") 77 | :language-id "tethys")) 78 | 79 | (provide 'tethys-mode) 80 | 81 | ;;; tethys-mode.el ends here 82 | 83 | -------------------------------------------------------------------------------- /support/tychk_nbe.ml: -------------------------------------------------------------------------------- 1 | (* Source: https://gist.github.com/mb64/f49ccb1bbf2349c8026d8ccf29bd158e#file-tychk_nbe-ml courtesy of MBones, 2 | with some modifications *) 3 | (* Build with: ocamlfind ocamlc -package angstrom,stdio -linkpkg tychk_nbe.ml -o tychk *) 4 | module AST = struct 5 | type ty = 6 | | TNamed of string 7 | | TFun of ty * ty 8 | | TForall of string * ty 9 | type exp = 10 | | Var of string 11 | | App of exp * exp 12 | | Annot of exp * ty 13 | | Lam of string * exp 14 | | Let of string * exp * exp 15 | end 16 | 17 | let elem_index a = (* wish OCaml had this in the stdlib *) 18 | let rec go i = function 19 | | [] -> None 20 | | x :: xs -> if x = a then Some i else go (i+1) xs in 21 | go 0 22 | 23 | module Infer = struct 24 | type idx = int 25 | type lvl = int 26 | type ty = 27 | | TVar of idx 28 | | TFun of ty * ty 29 | | TForall of string * ty 30 | and vty = 31 | | VVar of lvl 32 | | VFun of vty * vty 33 | | VForall of string * (vty -> vty) 34 | | VHole of hole ref 35 | and hole = 36 | | Empty of { scope: lvl } 37 | | Filled of vty 38 | 39 | type ctx = { type_names: string list; lvl: lvl; env: (string * vty) list } 40 | 41 | let initial_ctx: ctx = { type_names = []; lvl = 0; env = [] } 42 | 43 | exception TypeError of string 44 | 45 | let add_ty_to_ctx (name: string) (ctx: ctx): ctx = 46 | { type_names = name :: ctx.type_names 47 | ; lvl = ctx.lvl + 1 48 | ; env = ctx.env } 49 | 50 | let add_var_to_ctx (name: string) (ty: vty) (ctx: ctx): ctx = 51 | { type_names = ctx.type_names 52 | ; lvl = ctx.lvl 53 | ; env = (name, ty) :: ctx.env } 54 | 55 | let lookup_var (name: string) (ctx: ctx) = 56 | match List.assoc_opt name ctx.env with 57 | | Some ty -> ty 58 | | None -> raise (TypeError ("variable " ^ name ^ " not in scope")) 59 | 60 | let ast_ty_to_ty (ast_ty: AST.ty) = 61 | let rec helper (env: string list) (ast_ty: AST.ty) = match ast_ty with 62 | | TNamed n -> (match elem_index n env with 63 | | Some idx -> TVar idx 64 | | None -> raise (TypeError ("type variable " ^ n ^ " not in scope"))) 65 | | TFun(a, b) -> TFun (helper env a, helper env b) 66 | | TForall(n, a) -> TForall (n, helper (n::env) a) in 67 | helper [] ast_ty 68 | 69 | let rec eval (env: vty list) = function 70 | | TVar idx -> List.nth env idx 71 | | TFun(a, b) -> VFun(eval env a, eval env b) 72 | | TForall(name, ty) -> VForall(name, fun x -> eval (x::env) ty) 73 | 74 | let deref = function 75 | | VHole hole -> 76 | let rec helper h = match !h with 77 | | Filled (VHole h') -> 78 | (* path compression *) 79 | let a = helper h' in h := Filled a; a 80 | | Filled a -> a 81 | | _ -> VHole h in 82 | helper hole 83 | | a -> a 84 | 85 | let print_ty (ctx: ctx) ty = 86 | let parens p s = if p then "(" ^ s ^ ")" else s in 87 | let rec helper ctx p t = match deref t with 88 | | VVar lvl -> List.nth ctx.type_names (ctx.lvl - lvl - 1) 89 | | VFun(a, b) -> parens p (helper ctx true a ^ " -> " ^ helper ctx false b) 90 | | VForall(n, a) -> 91 | let rec freshen_name n = 92 | if List.mem n ctx.type_names then freshen_name (n ^ "'") else n in 93 | let n' = freshen_name n in 94 | let pr_a = helper (add_ty_to_ctx n' ctx) false (a (VVar ctx.lvl)) in 95 | parens p ("forall " ^ n' ^ ". " ^ pr_a) 96 | | VHole { contents = Empty { scope = lvl } } -> 97 | Printf.sprintf "?[at lvl %d]" lvl 98 | | VHole _ -> raise (invalid_arg "this should've been handled by deref") in 99 | helper ctx false ty 100 | 101 | (* when filling in a hole, a few things need to be checked: 102 | - occurs check: check that you aren't making recursive types 103 | - scope check: check that you aren't using bound vars outside its scope 104 | *) 105 | let unify_hole_prechecks (ctx: ctx) (hole: hole ref) (scope: lvl) ty = 106 | let initial_lvl = ctx.lvl in 107 | let rec helper ctx t = match deref t with 108 | | VVar lvl -> 109 | if lvl >= scope && lvl < initial_lvl 110 | then raise (TypeError ("type variable " ^ print_ty ctx (VVar lvl) ^ " escaping its scope")) 111 | | VFun(a, b) -> helper ctx a; helper ctx b; 112 | | VForall(n, a) -> 113 | helper (add_ty_to_ctx n ctx) (a (VVar ctx.lvl)) 114 | | VHole ({ contents = Empty { scope = l } } as h) -> 115 | if h = hole 116 | then raise (TypeError "occurs check: can't make infinite type") 117 | else if l > scope then h := Empty { scope } 118 | | _ -> raise (invalid_arg "unify_hole_prechecks") 119 | in helper ctx ty 120 | 121 | let rec unify (ctx: ctx) a b = match deref a, deref b with 122 | | VHole hole_a, _ -> unify_hole_ty ctx hole_a b 123 | | _, VHole hole_b -> unify_hole_ty ctx hole_b a 124 | | VVar lvl_a, VVar lvl_b when lvl_a = lvl_b -> () 125 | | VFun(a1, a2), VFun(b1, b2) -> unify ctx a1 b1; unify ctx a2 b2 126 | | VForall(n, a_fun), VForall(_, b_fun) -> 127 | let new_ctx = add_ty_to_ctx n ctx in 128 | unify new_ctx (a_fun (VVar ctx.lvl)) (b_fun (VVar ctx.lvl)) 129 | | _ -> 130 | let a', b' = print_ty ctx a, print_ty ctx b in 131 | raise (TypeError ("mismatch between " ^ a' ^ " and " ^ b')) 132 | 133 | and unify_hole_ty (ctx: ctx) hole ty = 134 | match !hole with 135 | | Empty { scope } -> 136 | if ty <> VHole hole 137 | then (unify_hole_prechecks ctx hole scope ty; hole := Filled ty) 138 | | Filled _ -> raise (invalid_arg "unify_hole_ty") 139 | 140 | let rec eagerly_instantiate (ctx: ctx) = function 141 | | VForall(n, a) -> 142 | let new_hole = ref (Empty { scope = ctx.lvl }) in 143 | eagerly_instantiate ctx (a (VHole new_hole)) 144 | | a -> a 145 | 146 | (* The mutually-recursive typechecking functions *) 147 | 148 | let rec check (ctx: ctx) (term: AST.exp) (ty: vty) = match term, deref ty with 149 | | _, VForall(n, a) -> 150 | check (add_ty_to_ctx n ctx) term (a (VVar ctx.lvl)) 151 | | Lam(var, body), VFun(a, b) -> 152 | check (add_var_to_ctx var a ctx) body b 153 | | Let(var, exp, body), a -> 154 | let exp_ty = infer ctx exp in 155 | check (add_var_to_ctx var exp_ty ctx) body a 156 | | _, a -> 157 | let inferred_ty = infer_and_inst ctx term in 158 | unify ctx inferred_ty a 159 | 160 | and infer (ctx: ctx) (term: AST.exp) = match term with 161 | | Var var -> lookup_var var ctx 162 | | Annot(e, ast_ty) -> 163 | let ty = eval [] (ast_ty_to_ty ast_ty) in 164 | check ctx e ty; ty 165 | | App(f, arg) -> 166 | let f_ty = infer_and_inst ctx f in 167 | begin match deref f_ty with 168 | | VFun(a, b) -> check ctx arg a; b 169 | | VHole ({ contents = Empty { scope } } as hole) -> 170 | let a = VHole (ref (Empty { scope })) in 171 | let b = VHole (ref (Empty { scope })) in 172 | hole := Filled (VFun(a, b)); 173 | check ctx arg a; 174 | b 175 | | _ -> raise (TypeError "not a function type") 176 | end 177 | | Lam(var, body) -> 178 | let arg_ty = VHole (ref (Empty { scope = ctx.lvl })) in 179 | let res_ty = infer_and_inst (add_var_to_ctx var arg_ty ctx) body in 180 | VFun(arg_ty, res_ty) 181 | | Let(var, exp, body) -> 182 | let exp_ty = infer ctx exp in 183 | infer (add_var_to_ctx var exp_ty ctx) body 184 | 185 | and infer_and_inst (ctx: ctx) (term: AST.exp) = 186 | let ty = infer ctx term in eagerly_instantiate ctx ty 187 | 188 | end 189 | 190 | (* module Parser = struct 191 | open AST 192 | open Angstrom (* parser combinators library *) 193 | 194 | let keywords = ["forall"; "let"; "in"; "fun"] 195 | 196 | let whitespace = take_while (String.contains " \n\t") 197 | let lexeme a = a <* whitespace 198 | let ident = lexeme ( 199 | let is_ident_char c = 200 | c = '_' || ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') in 201 | let* i = take_while is_ident_char in 202 | if String.length i > 0 then return i else fail "expected ident") 203 | 204 | let str s = lexeme (string s) *> return () 205 | let name = 206 | let* i = ident in 207 | if List.mem i keywords then fail (i ^ " is a keyword") else return i 208 | let keyword k = 209 | let* i = ident in 210 | if i = k then return () else fail ("expected " ^ k) 211 | let parens p = str "(" *> p <* str ")" 212 | 213 | let ty = fix (fun ty -> 214 | let simple_ty = parens ty <|> lift (fun n -> TNamed n) name in 215 | let forall_ty = 216 | let+ () = keyword "forall" 217 | and+ names = many1 name 218 | and+ () = str "." 219 | and+ a = ty in 220 | List.fold_right (fun n a -> TForall(n, a)) names a in 221 | let fun_ty = 222 | let+ arg_ty = simple_ty 223 | and+ () = str "->" 224 | and+ res_ty = ty in 225 | TFun(arg_ty, res_ty) in 226 | forall_ty <|> fun_ty <|> simple_ty "type") 227 | 228 | let exp = fix (fun exp -> 229 | let atomic_exp = parens exp <|> lift (fun n -> Var n) name in 230 | let make_app (f::args) = 231 | List.fold_left (fun f arg -> App(f,arg)) f args in 232 | let simple_exp = lift make_app (many1 atomic_exp) in 233 | let annot_exp = 234 | let+ e = simple_exp 235 | and+ annot = option (fun e -> e) 236 | (lift (fun t e -> Annot(e,t)) (str ":" *> ty)) in 237 | annot e in 238 | let let_exp = 239 | let+ () = keyword "let" 240 | and+ n = name 241 | and+ () = str "=" 242 | and+ e = exp 243 | and+ () = keyword "in" 244 | and+ body = exp in 245 | Let(n, e, body) in 246 | let fun_exp = 247 | let+ () = keyword "fun" 248 | and+ args = many1 name 249 | and+ () = str "->" 250 | and+ body = exp in 251 | List.fold_right (fun arg body -> Lam(arg, body)) args body in 252 | let_exp <|> fun_exp <|> annot_exp "expression") 253 | 254 | let parse (s: string) = 255 | match parse_string ~consume:All (whitespace *> exp) s with 256 | | Ok e -> e 257 | | Error msg -> failwith msg 258 | end 259 | 260 | let main () = 261 | let stdin = Stdio.In_channel.(input_all stdin) in 262 | let exp = Parser.parse stdin in 263 | let () = print_endline "parsed" in 264 | let open Infer in 265 | let ctx = initial_ctx in 266 | let ty = infer ctx exp in 267 | print_endline ("input : " ^ print_ty ctx ty) 268 | 269 | let () = main () *) -------------------------------------------------------------------------------- /support/typeck.ml: -------------------------------------------------------------------------------- 1 | (* Based on tychk_nbe.ml and Andras Kovacs's elab-zoo `03_holes` *) 2 | module IntMap = Map.Make (Int) 3 | 4 | module Dependent = struct 5 | type meta_var = MetaVar of int 6 | type name = string 7 | 8 | type raw = 9 | | RVar of name 10 | | RLam of name * raw 11 | | RApp of raw * raw 12 | | RU 13 | | RPi of name * raw * raw 14 | | RLet of name * raw * raw * raw 15 | | RHole 16 | 17 | type ix = Ix of int 18 | type bd = Bound | Defined 19 | 20 | type ty = tm 21 | 22 | and tm = 23 | | Var of ix 24 | | Lam of name * tm 25 | | App of tm * tm 26 | | U 27 | | Pi of name * ty * ty 28 | | Let of name * ty * tm * tm 29 | | Meta of meta_var 30 | | InsertedMeta of meta_var * bd list 31 | 32 | type lvl = Lvl of int 33 | 34 | type env = value list 35 | and spine = value list 36 | and closure = Closure of env * tm 37 | and vty = value 38 | 39 | and value = 40 | | VFlex of meta_var * spine 41 | | VRigid of lvl * spine 42 | | VLam of name * closure 43 | | VPi of name * vty * closure 44 | | VU 45 | 46 | type meta_entry = Solved of value | Unsolved 47 | 48 | let next_meta = ref 0 49 | let mcxt : meta_entry IntMap.t ref = ref IntMap.empty 50 | 51 | let lookup_meta (MetaVar m) = 52 | match IntMap.find_opt m !mcxt with 53 | | Some x -> x 54 | | None -> raise (invalid_arg "impossible") 55 | 56 | let reset () = 57 | mcxt := IntMap.empty; 58 | next_meta := 0; 59 | () 60 | 61 | let snoc xs y = y :: xs 62 | 63 | let rec closure_apply (Closure (env, t)) u = eval (snoc env u) t 64 | 65 | and v_app t u = 66 | match t with 67 | | VLam (_, t) -> closure_apply t u 68 | | VFlex (m, sp) -> VFlex (m, snoc sp u) 69 | | VRigid (x, sp) -> VRigid (x, snoc sp u) 70 | | _ -> raise (invalid_arg "impossible") 71 | 72 | and v_app_sp t = function [] -> t | u :: sp -> v_app (v_app_sp t sp) u 73 | 74 | and v_meta m = 75 | match lookup_meta m with Solved v -> v | Unsolved -> VFlex (m, []) 76 | 77 | and v_app_bds env v bds = 78 | match (env, bds) with 79 | | [], [] -> v 80 | | t :: env, Bound :: bds -> v_app (v_app_bds env v bds) t 81 | | t :: env, Defined :: bds -> v_app_bds env v bds 82 | | _ -> raise (invalid_arg "impossible") 83 | 84 | and eval env = function 85 | | Var (Ix x) -> List.nth env x 86 | | App (t, u) -> v_app (eval env t) (eval env u) 87 | | Lam (x, t) -> VLam (x, Closure (env, t)) 88 | | Pi (x, a, b) -> VPi (x, eval env a, Closure (env, b)) 89 | | Let (_, _, t, u) -> eval (snoc env (eval env t)) u 90 | | U -> VU 91 | | Meta m -> v_meta m 92 | | InsertedMeta (m, bds) -> v_app_bds env (v_meta m) bds 93 | 94 | let rec force t = 95 | match t with 96 | | VFlex (m, sp) -> ( 97 | match lookup_meta m with Solved t -> force (v_app_sp t sp) | _ -> t) 98 | | t -> t 99 | 100 | let lvl_2_ix (Lvl l) (Lvl x) = Ix (l - x - 1) 101 | let inc_lvl (Lvl l) = Lvl (l + 1) 102 | 103 | let rec quote_sp l t = function 104 | | [] -> t 105 | | u :: sp -> App (quote_sp l t sp, quote l u) 106 | 107 | and quote l t = 108 | match force t with 109 | | VFlex (m, sp) -> quote_sp l (Meta m) sp 110 | | VRigid (x, sp) -> quote_sp l (Var (lvl_2_ix l x)) sp 111 | | VLam (x, t) -> 112 | Lam (x, quote (inc_lvl l) (closure_apply t (VRigid (l, [])))) 113 | | VPi (x, a, b) -> 114 | Pi (x, quote l a, quote (inc_lvl l) (closure_apply b (VRigid (l, [])))) 115 | | VU -> U 116 | 117 | let nf env t = quote (Lvl (List.length env)) (eval env t) 118 | 119 | type types = (string * vty) list 120 | type cxt = { env : env; lvl : lvl; types : types; bds : bd list } 121 | 122 | let fresh_meta cxt = 123 | let m = !next_meta in 124 | next_meta := m + 1; 125 | mcxt := IntMap.add m Unsolved !mcxt; 126 | InsertedMeta (MetaVar m, cxt.bds) 127 | 128 | type p_ren = { dom : lvl; cod : lvl; ren : lvl IntMap.t } 129 | 130 | exception Unify_error of unit 131 | 132 | let unify_error = Unify_error () 133 | let unlvl (Lvl x) = x 134 | 135 | let lift { dom; cod; ren } = 136 | { 137 | dom = inc_lvl dom; 138 | cod = inc_lvl cod; 139 | ren = IntMap.add (unlvl cod) dom ren; 140 | } 141 | 142 | let invert gamma sp = 143 | let rec go = function 144 | | [] -> (0, IntMap.empty) 145 | | t :: sp -> ( 146 | let dom, ren = go sp in 147 | match force t with 148 | | VRigid (Lvl x, []) when not (IntMap.mem x ren) -> 149 | (dom + 1, IntMap.add x (Lvl dom) ren) 150 | | _ -> raise unify_error) 151 | in 152 | let dom, ren = go sp in 153 | { dom = Lvl dom; cod = gamma; ren } 154 | 155 | let rename m pren v = 156 | let rec go_sp pren t = function 157 | | [] -> t 158 | | u :: sp -> App (go_sp pren t sp, go pren u) 159 | and go pren t = 160 | match force t with 161 | | VFlex (m', sp) when m == m' -> raise unify_error 162 | | VFlex (m', sp) -> go_sp pren (Meta m') sp 163 | | VRigid (Lvl x, sp) -> ( 164 | match IntMap.find_opt x pren.ren with 165 | | None -> raise unify_error 166 | | Some x' -> go_sp pren (Var (lvl_2_ix pren.dom x')) sp) 167 | | VLam (x, t) -> 168 | Lam (x, go (lift pren) (closure_apply t (VRigid (pren.cod, [])))) 169 | | VPi (x, a, b) -> 170 | Pi 171 | ( x, 172 | go pren a, 173 | go (lift pren) (closure_apply b (VRigid (pren.cod, []))) ) 174 | | VU -> U 175 | in 176 | go pren v 177 | 178 | let lams l = 179 | let rec go x t = 180 | if x == l then t else Lam ("x" ^ Int.to_string (x + 1), go (x + 1) t) 181 | in 182 | go 0 183 | 184 | let solve gamma (MetaVar m) sp rhs = 185 | let pren = invert gamma sp in 186 | let rhs = rename (MetaVar m) pren rhs in 187 | let solution = eval [] (lams (unlvl pren.dom) rhs) in 188 | mcxt := IntMap.add m (Solved solution) !mcxt; 189 | () 190 | 191 | let rec unify_sp l sp sp' = 192 | match (sp, sp') with 193 | | [], [] -> () 194 | | t :: sp, t' :: sp' -> 195 | unify_sp l sp sp'; 196 | unify l t t' 197 | | _ -> raise unify_error 198 | 199 | and unify l t u = 200 | match (force t, force u) with 201 | | VLam (_, t), VLam (_, t') -> 202 | unify (inc_lvl l) 203 | (closure_apply t (VRigid (l, []))) 204 | (closure_apply t' (VRigid (l, []))) 205 | | t, VLam (_, t') -> 206 | unify (inc_lvl l) 207 | (v_app t (VRigid (l, []))) 208 | (closure_apply t' (VRigid (l, []))) 209 | | VLam (_, t), t' -> 210 | unify (inc_lvl l) 211 | (closure_apply t (VRigid (l, []))) 212 | (v_app t' (VRigid (l, []))) 213 | | VU, VU -> () 214 | | VPi (x, a, b), VPi (x', a', b') -> 215 | unify l a a'; 216 | unify (inc_lvl l) 217 | (closure_apply b (VRigid (l, []))) 218 | (closure_apply b' (VRigid (l, []))) 219 | | VRigid (x, sp), VRigid (x', sp') when x == x' -> unify_sp l sp sp' 220 | | VFlex (m, sp), VFlex (m', sp') when m == m' -> unify_sp l sp sp' 221 | | VFlex (m, sp), t' -> solve l m sp t' 222 | | t, VFlex (m', sp') -> solve l m' sp' t 223 | | _ -> raise unify_error 224 | 225 | let empty_cxt = { env = []; lvl = Lvl 0; types = []; bds = [] } 226 | 227 | let bind { env; lvl; types; bds } x a = 228 | { 229 | env = snoc env (VRigid (lvl, [])); 230 | lvl = inc_lvl lvl; 231 | types = snoc types (x, a); 232 | bds = snoc bds Bound; 233 | } 234 | 235 | let define { env; lvl; types; bds } x t a = 236 | { 237 | env = snoc env t; 238 | lvl = inc_lvl lvl; 239 | types = snoc types (x, a); 240 | bds = snoc bds Defined; 241 | } 242 | 243 | let close_val cxt t = Closure (cxt.env, quote (inc_lvl cxt.lvl) t) 244 | 245 | exception Real_unify_error of ty * ty 246 | 247 | let unify_catch cxt t t' = 248 | try unify cxt.lvl t t' 249 | with Unify_error () -> 250 | raise (Real_unify_error (quote cxt.lvl t, quote cxt.lvl t')) 251 | 252 | exception Name_not_in_scope of name 253 | 254 | let rec check cxt t a = 255 | match (t, force a) with 256 | | RLam (x, t), VPi (_, a, b) -> 257 | Lam (x, check (bind cxt x a) t (closure_apply b (VRigid (cxt.lvl, [])))) 258 | | RLet (x, a, t, u), a' -> 259 | let a = check cxt a VU in 260 | let va = eval cxt.env a in 261 | let t = check cxt t va in 262 | let vt = eval cxt.env t in 263 | let u = check (define cxt x vt va) u a' in 264 | Let (x, a, t, u) 265 | | RHole, a -> fresh_meta cxt 266 | | t, expected -> 267 | let t, inferred = infer cxt t in 268 | unify_catch cxt expected inferred; 269 | t 270 | 271 | and infer cxt = function 272 | | RVar x -> 273 | let rec go ix = function 274 | | (x', a) :: types -> 275 | if String.equal x x' then (Var (Ix ix), a) else go (ix + 1) types 276 | | [] -> raise (invalid_arg (Printf.sprintf "Name not in scope: %s" x)) 277 | in 278 | go 0 cxt.types 279 | | RLam (x, t) -> 280 | let a = eval cxt.env (fresh_meta cxt) in 281 | let t, b = infer (bind cxt x a) t in 282 | (Lam (x, t), VPi (x, a, close_val cxt b)) 283 | | RApp (t, u) -> 284 | let t, tty = infer cxt t in 285 | (* ensure that tty is Pi *) 286 | let a, b = 287 | match force tty with 288 | | VPi (x, a, b) -> (a, b) 289 | | tty -> 290 | let a = eval cxt.env (fresh_meta cxt) in 291 | let b = Closure (cxt.env, fresh_meta (bind cxt "x" a)) in 292 | unify_catch cxt (VPi ("x", a, b)) tty; 293 | (a, b) 294 | in 295 | let u = check cxt u a in 296 | (App (t, u), closure_apply b (eval cxt.env u)) 297 | | RU -> (U, VU) 298 | | RPi (x, a, b) -> 299 | let a = check cxt a VU in 300 | let b = check (bind cxt x (eval cxt.env a)) b VU in 301 | (Pi (x, a, b), VU) 302 | | RLet (x, a, t, u) -> 303 | let a = check cxt a VU in 304 | let va = eval cxt.env a in 305 | let t = check cxt t va in 306 | let vt = eval cxt.env t in 307 | let u, b = infer (define cxt x vt va) u in 308 | (Let (x, a, t, u), b) 309 | | RHole -> 310 | let a = eval cxt.env (fresh_meta cxt) in 311 | let t = fresh_meta cxt in 312 | (t, a) 313 | 314 | (* 315 | let id : (A : U) -> A -> A = λ A x. x; 316 | let id1 : (A : U) -> A -> A = λ A x. id _ x; 317 | U 318 | *) 319 | 320 | (* 321 | 322 | *) 323 | let tm = 324 | RLet 325 | ( "id", 326 | RPi ("A", RU, RPi ("_", RVar "A", RVar "A")), 327 | RLam ("A", RLam ("x", RVar "x")), 328 | RLet 329 | ( "id1", 330 | RPi ("A", RU, RPi ("_", RVar "A", RVar "A")), 331 | RLam ("A", RLam ("x", RApp (RApp (RVar "id", RHole), RVar "x"))), 332 | RU ) ) 333 | 334 | let inferred = infer empty_cxt tm 335 | end 336 | 337 | -------------------------------------------------------------------------------- /support/typeck/.gitignore: -------------------------------------------------------------------------------- 1 | _build -------------------------------------------------------------------------------- /support/typeck/.ocamlformat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThePuzzlemaker/tethys/209079a9a9374e6aa85fb3ccd25a3f20809fdb84/support/typeck/.ocamlformat -------------------------------------------------------------------------------- /support/typeck/bin/dune: -------------------------------------------------------------------------------- 1 | (executable 2 | (public_name typeck) 3 | (name main) 4 | (libraries typeck)) 5 | -------------------------------------------------------------------------------- /support/typeck/bin/main.ml: -------------------------------------------------------------------------------- 1 | let () = print_endline "Hello, World!" 2 | -------------------------------------------------------------------------------- /support/typeck/dune-project: -------------------------------------------------------------------------------- 1 | (lang dune 3.11) 2 | 3 | (name typeck) 4 | 5 | (generate_opam_files true) 6 | 7 | (source 8 | (github username/reponame)) 9 | 10 | (authors "Author Name") 11 | 12 | (maintainers "Maintainer Name") 13 | 14 | (license LICENSE) 15 | 16 | (documentation https://url/to/documentation) 17 | 18 | (package 19 | (name typeck) 20 | (synopsis "A short synopsis") 21 | (description "A longer description") 22 | (depends ocaml dune core) 23 | (tags 24 | (topics "to describe" your project))) 25 | 26 | ; See the complete stanza docs at https://dune.readthedocs.io/en/stable/dune-files.html#dune-project 27 | -------------------------------------------------------------------------------- /support/typeck/lib/dune: -------------------------------------------------------------------------------- 1 | (library 2 | (name typeck) 3 | (libraries core ppx_sexp_conv) 4 | (preprocess (pps ppx_sexp_conv))) 5 | (env (dev (flags (:standard -w -32-38)))) 6 | -------------------------------------------------------------------------------- /support/typeck/test/dune: -------------------------------------------------------------------------------- 1 | (test 2 | (name typeck)) 3 | -------------------------------------------------------------------------------- /support/typeck/test/typeck.ml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThePuzzlemaker/tethys/209079a9a9374e6aa85fb3ccd25a3f20809fdb84/support/typeck/test/typeck.ml -------------------------------------------------------------------------------- /support/typeck/typeck.opam: -------------------------------------------------------------------------------- 1 | # This file is generated by dune, edit dune-project instead 2 | opam-version: "2.0" 3 | synopsis: "A short synopsis" 4 | description: "A longer description" 5 | maintainer: ["Maintainer Name"] 6 | authors: ["Author Name"] 7 | license: "LICENSE" 8 | tags: ["topics" "to describe" "your" "project"] 9 | homepage: "https://github.com/username/reponame" 10 | doc: "https://url/to/documentation" 11 | bug-reports: "https://github.com/username/reponame/issues" 12 | depends: [ 13 | "ocaml" 14 | "dune" {>= "3.11"} 15 | "core" 16 | "odoc" {with-doc} 17 | ] 18 | build: [ 19 | ["dune" "subst"] {dev} 20 | [ 21 | "dune" 22 | "build" 23 | "-p" 24 | name 25 | "-j" 26 | jobs 27 | "@install" 28 | "@runtest" {with-test} 29 | "@doc" {with-doc} 30 | ] 31 | ] 32 | dev-repo: "git+https://github.com/username/reponame.git" 33 | -------------------------------------------------------------------------------- /support/vim-tethys/README.md: -------------------------------------------------------------------------------- 1 | vim-tethys 2 | ========== 3 | 4 | ## Description 5 | 6 | This plugin provides Tethys language support for vim. Currently it only does 7 | file detection and syntax highlighting. 8 | 9 | ## Installation 10 | 11 | [vim-plug](https://github.com/junegunn/vim-plug): 12 | 13 | ```vim 14 | " in init.lua 15 | Plug("ThePuzzlemaker/tethys", { rtp = "support/vim-tethys" }) 16 | " or init.vim 17 | Plug "ThePuzzlemaker/tethys", { "rtp": "support/vim-tethys" } 18 | ``` 19 | -------------------------------------------------------------------------------- /support/vim-tethys/ftdetect/tethys.vim: -------------------------------------------------------------------------------- 1 | au BufNewFile,BufRead *.tys setfiletype tethys 2 | -------------------------------------------------------------------------------- /support/vim-tethys/syntax/tethys.vim: -------------------------------------------------------------------------------- 1 | " Vim syntax file 2 | " Language: Tethys 3 | " Maintainer: tbmreza 4 | " Last Change: 2022 May 21 5 | 6 | if exists("b:current_syntax") 7 | finish 8 | endif 9 | 10 | syn match tethysComment "\v#.*$" 11 | 12 | syn region tethysString start=+"+ end=+"+ skip=+\\\\\|\\"+ contains=@Spell 13 | syn match tethysNumber "\<\d\+\>" " integer 14 | syn match tethysNumber "\<\d\+\.\d*\%([eE][-+]\=\d\+\)\=\>" " float, with dot, optional exp 15 | syn match tethysNumber "\.\d\+\%([eE][-+]\=\d\+\)\=\>" " float, starts with a dot, optional exp 16 | syn match tethysNumber "\<\d\+[eE][-+]\=\d\+\>" " float, without dot, with exp 17 | 18 | syn keyword tethysFunction println map rangeI divides intToString 19 | syn match tethysFunction "\h\w*" display contained 20 | 21 | syn keyword tethysStatement def nextgroup=tethysFunction skipwhite 22 | syn keyword tethysConditional if then else 23 | syn keyword tethysRepeat each 24 | syn match tethysOperator "\\" 25 | syn match tethysOperator "\." 26 | syn match tethysOperator ":" 27 | syn match tethysOperator "=" 28 | syn match tethysOperator "\(forall\>\|->\)" 29 | 30 | syn match tethysType "()" 31 | syn keyword tethysType Int List String 32 | 33 | hi link tethysComment Comment 34 | hi link tethysString String 35 | hi link tethysNumber Number 36 | hi link tethysFunction Function 37 | hi link tethysStatement Statement 38 | hi link tethysConditional Conditional 39 | hi link tethysRepeat Repeat 40 | hi link tethysOperator Operator 41 | hi link tethysKeyword Keyword 42 | hi link tethysType Type 43 | 44 | let b:current_syntax = "tethys" 45 | -------------------------------------------------------------------------------- /tmp.tys: -------------------------------------------------------------------------------- 1 | # enum List['T] = Nil 2 | # | Cons 'T List['T] 3 | 4 | # enum Option['T] = None 5 | # | Some 'T 6 | 7 | # def optMap : forall 'T 'U.Option['T] -> ('T -> 'U) -> Option['U] 8 | # = λo f.Option o None (λt.Some (f t)) 9 | # def optBind : forall 'T 'U.Option['T] -> ('T -> Option['U]) -> Option['U] 10 | # = λo f.Option o None (λt.f t) 11 | 12 | # def hd : forall 'T. List['T] -> Option['T] 13 | # = λl. List l None (λh _.Some h) 14 | # def tl : forall 'T. List['T] -> Option[List['T]] 15 | # = λl. List l None (λ_ t.Some t) 16 | # def length : forall 'T. List['T] -> Integer 17 | # = λl. List l 0 (λ_ t.1 + (length t)) 18 | # def map : forall 'T 'U. List['T] -> ('T -> 'U) -> List['U] 19 | # = λl f. List l Nil (λh t.Cons (f h) (map t f)) 20 | 21 | # def id : forall 'a.'a -> 'a = λx.x 22 | # def const : forall 'a.'a -> 'a -> 'a = λx y.x 23 | 24 | # def foo : Integer -> Boolean = λx. if x == 0 then false else true 25 | 26 | # def fact : Integer -> Integer 27 | # = λn.if n == 0 then 1 else n * fact (n - 1) 28 | 29 | # def list : List[Integer] = (Cons 0 (Cons 1 (Cons 2 (Cons 3 (Cons 4 Nil))))) 30 | #def main : _ = map list fact 31 | 32 | # def baz : (Integer, Boolean) -> Integer = \x. x.0 33 | # def bar : (Integer, Boolean) = (baz (0, true), false) 34 | 35 | # def main : _ = bar 36 | 37 | # def main : Integer -> Integer -> List[Integer] -> List[Integer] 38 | # = \a b xs.let foo = \x.x + ((\_. a + b) ()) in 39 | # map xs foo 40 | # def main : Boolean -> Integer -> Integer = \x y.(if x then 5 + 6 + 7 + y else 0) + 1 41 | 42 | 43 | # def f : (forall 'a. 'a -> ('a, Integer)) -> Integer 44 | # = \x.(g 42).0 45 | # def g : forall 'a. 'a -> ('a, Integer) 46 | # = \x.(x, f g) 47 | 48 | #def main : Integer -> () -> () -> () -> () = \_ _ _ x.x 49 | #def main : Integer -> Integer = \x.1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + x 50 | #def main : Boolean -> Integer = \x.(if x then 1 + 1 else 1 * 2) 51 | #def main : Boolean -> () = \x.if x then () else () 52 | #def main : Boolean -> Integer -> Integer -> Integer = \x y z.(if x then y+1 else z+1) 53 | #def main : Integer -> Integer = λn.if n == 0 then 1 else n * main (n - 1) 54 | def main : _ = 55 | λn. let rec go = 56 | λn acc.if n == 1 57 | then acc 58 | else go (n - 1) (acc * n) 59 | in go n 1 60 | # def main : _ = 61 | # λn. let rec go = 62 | # λn acc.go n acc 63 | # in go n n 64 | # def main : Integer -> Integer = λn. let f = (\y.n + y) in f n 65 | 66 | #def main : Boolean -> Integer -> Integer = \x y.1 + (if x then y else 4) + 5 67 | #def main : Boolean -> Boolean -> Boolean = \x y. x && y 68 | # def infrec : Boolean -> Boolean = \x.infrec x 69 | # def main : Boolean = false && infrec true 70 | #def foo : _ = 0 71 | #def fact : Integer -> Integer = λn.if n == 0 then 1 else n * fact (n - 1) 72 | #def main : _ = (\x y.(if x then (\z.z + y + 5) else (\z.z)) (fact 5) + 1) true 0 73 | 74 | # def foo : forall 'T 'U. List['T] -> ('T -> 'U) -> List['U] 75 | # = λl f. match l with 76 | # | Nil -> Nil 77 | # | Cons h t -> Cons (f h) (foo t) 78 | # end 79 | 80 | --------------------------------------------------------------------------------