├── .editorconfig ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .vscode └── extensions.json ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Cargo.toml ├── LICENSE ├── README.md ├── book ├── .gitignore ├── README.md ├── assets │ └── pikelet.png ├── book.toml ├── index.js ├── package.json └── src │ ├── SUMMARY.md │ ├── development.md │ ├── development │ ├── bibliography.md │ ├── code-of-conduct.md │ ├── contributing.md │ ├── design.md │ ├── influences.md │ └── roadmap.md │ ├── guide.md │ ├── guide │ ├── installation.md │ └── using-the-repl.md │ ├── index.md │ ├── reference.md │ ├── reference │ ├── builtins.md │ ├── comments.md │ ├── functions.md │ ├── keywords.md │ ├── literals.md │ ├── names.md │ ├── records.md │ └── universes.md │ ├── specification.md │ └── specification │ ├── inspiration.md │ ├── textual-representation.md │ └── textual-representation │ ├── concrete-syntax.md │ └── lexical-syntax.md ├── examples ├── README.md ├── hello-world.pi ├── meta.pi ├── prelude.pi ├── record-mesh.pi └── window-settings.pi ├── package.json ├── pikelet-cli ├── Cargo.toml ├── README.md ├── src │ ├── check.rs │ ├── lib.rs │ ├── main.rs │ └── repl.rs └── tests │ └── source_tests.rs ├── pikelet-editor ├── Cargo.toml ├── README.md └── src │ └── lib.rs ├── pikelet-language-server ├── Cargo.toml ├── README.md └── src │ └── lib.rs ├── pikelet-test ├── Cargo.toml └── src │ └── lib.rs ├── pikelet ├── Cargo.toml ├── README.md ├── build.rs └── src │ ├── lang.rs │ ├── lang │ ├── anf.rs │ ├── cc.rs │ ├── core.rs │ ├── core │ │ ├── marshall.rs │ │ ├── semantics.rs │ │ └── typing.rs │ ├── surface.rs │ └── surface │ │ ├── grammar.lalrpop │ │ └── lexer.rs │ ├── lib.rs │ ├── literal.rs │ ├── pass.rs │ ├── pass │ ├── core_to_pretty.rs │ ├── core_to_surface.rs │ ├── surface_to_core.rs │ └── surface_to_pretty.rs │ └── reporting.rs ├── tests ├── comments.pi ├── functions.pi ├── literals.pi ├── record-term-deps.pi └── record-type-deps.pi └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | # https://editorconfig.org 2 | root = true 3 | 4 | [*] 5 | charset = utf-8 6 | end_of_line = lf 7 | indent_style = space 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | 11 | [*.{md,yml}] 12 | indent_size = 2 13 | 14 | [*.{pi,rs,toml}] 15 | indent_size = 4 16 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | pull_request: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | check: 14 | runs-on: ubuntu-20.04 15 | strategy: 16 | matrix: 17 | rust: ["1.51.0", "stable", "beta", "nightly"] 18 | name: Check (${{ matrix.rust }}) 19 | steps: 20 | - uses: actions/checkout@v2 21 | - name: Install minimal ${{ matrix.rust }} 22 | uses: actions-rs/toolchain@v1 23 | with: 24 | profile: minimal 25 | toolchain: ${{ matrix.rust }} 26 | override: true 27 | - name: Run cargo check 28 | uses: actions-rs/cargo@v1 29 | with: 30 | command: check 31 | 32 | test: 33 | runs-on: ubuntu-20.04 34 | strategy: 35 | matrix: 36 | rust: ["1.51.0", "stable", "beta", "nightly"] 37 | name: Test Suite (${{ matrix.rust }}) 38 | steps: 39 | - uses: actions/checkout@v2 40 | - name: Install minimal ${{ matrix.rust }} 41 | uses: actions-rs/toolchain@v1 42 | with: 43 | profile: minimal 44 | toolchain: ${{ matrix.rust }} 45 | override: true 46 | - name: Run cargo test 47 | uses: actions-rs/cargo@v1 48 | with: 49 | command: test 50 | 51 | fmt: 52 | runs-on: ubuntu-20.04 53 | strategy: 54 | matrix: 55 | rust: ["1.51.0", "stable", "beta", "nightly"] 56 | name: Rustfmt (${{ matrix.rust }}) 57 | steps: 58 | - uses: actions/checkout@v2 59 | - name: Install minimal ${{ matrix.rust }} with rustfmt 60 | uses: actions-rs/toolchain@v1 61 | with: 62 | profile: minimal 63 | toolchain: ${{ matrix.rust }} 64 | override: true 65 | components: rustfmt 66 | - name: Run cargo fmt 67 | uses: actions-rs/cargo@v1 68 | with: 69 | command: fmt 70 | args: --all -- --check 71 | 72 | clippy: 73 | runs-on: ubuntu-20.04 74 | strategy: 75 | matrix: 76 | rust: ["stable"] 77 | # rust: ["1.51.0", "stable", "beta", "nightly"] 78 | name: Clippy (${{ matrix.rust }}) 79 | steps: 80 | - uses: actions/checkout@v2 81 | - name: Install minimal ${{ matrix.rust }} with clippy 82 | uses: actions-rs/toolchain@v1 83 | with: 84 | profile: minimal 85 | toolchain: ${{ matrix.rust }} 86 | override: true 87 | components: clippy 88 | - name: Run cargo clippy 89 | uses: actions-rs/cargo@v1 90 | with: 91 | command: clippy 92 | args: -- -D warnings 93 | 94 | book: 95 | runs-on: ubuntu-20.04 96 | name: Build and Test Book 97 | env: 98 | MDBOOK_VERSION: '0.4.4' 99 | MDBOOK_LINKCHECK_VERSION: '0.7.0' 100 | steps: 101 | - uses: actions/checkout@v2 102 | - name: Install mdBook 103 | # Install prebuilt binaries where possible to improve CI performance 104 | run: | 105 | mkdir -p "$HOME/mdbook" 106 | curl -L "https://github.com/rust-lang/mdBook/releases/download/v$MDBOOK_VERSION/mdbook-v$MDBOOK_VERSION-x86_64-unknown-linux-gnu.tar.gz" | tar xz -C "$HOME/mdbook" 107 | echo "${HOME}/mdbook/" >> $GITHUB_PATH 108 | mkdir -p "$HOME/mdbook-linkcheck" 109 | curl -L "https://github.com/Michael-F-Bryan/mdbook-linkcheck/releases/download/v$MDBOOK_LINKCHECK_VERSION/mdbook-linkcheck-v$MDBOOK_LINKCHECK_VERSION-x86_64-unknown-linux-gnu.tar.gz" | tar xz -C "$HOME/mdbook-linkcheck" 110 | echo "${HOME}/mdbook-linkcheck/" >> $GITHUB_PATH 111 | - name: Install Javascript dependencies 112 | run: yarn install 113 | working-directory: book 114 | - name: Build additional Javascript 115 | run: yarn build 116 | working-directory: book 117 | - name: Build book 118 | run: mdbook build 119 | working-directory: book 120 | - name: Test book 121 | run: mdbook test 122 | working-directory: book 123 | # TODO: Deploy to Github Pages on crate release 124 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Rust 2 | /target/ 3 | **/*.rs.bk 4 | Cargo.lock 5 | 6 | # Yarn 7 | /node_modules 8 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "editorconfig.editorconfig" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | * Using welcoming and inclusive language 12 | * Being respectful of differing viewpoints and experiences 13 | * Gracefully accepting constructive criticism 14 | * Focusing on what is best for the community 15 | * Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | * Trolling, insulting/derogatory comments, and personal or political attacks 21 | * Public or private harassment 22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | * Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at bjzaba@yahoo.com.au. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ 47 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Code of Conduct 4 | 5 | Please note that this project is released with a [Code of Conduct](./CODE_OF_CONDUCT.md). 6 | By participating in this project you agree to abide by its terms. 7 | 8 | [code_of_conduct]: https://github.com/pikelet-lang/pikelet/blob/main/CODE_OF_CONDUCT.md 9 | 10 | ## Matrix room 11 | 12 | Joining the matrix room at [#pikelet:matrix.org][pikelet-matrix] is a good way to get in touch with the developers and community. 13 | 14 | [pikelet-matrix]: https://app.element.io/#/room/#pikelet:matrix.org 15 | 16 | ## Prerequisites 17 | 18 | We use [Rust][rust] as our implementation language, which can be installed using the [rustup] tool. 19 | 20 | For the best experience in working with Rust we also recommend installing IDE support for your editor of choice: 21 | 22 | - [Rust Analyzer][rust-analyzer] (for VS Code, Vim Emacs, etc.) 23 | - [IntelliJ Rust][intellij-rust] (for IntelliJ-based IDEs) 24 | 25 | You can learn more about programming in Rust by reading [The Rust Programming Language][rust-book]. 26 | 27 | [rust]: https://www.rust-lang.org/ 28 | [rustup]: https://rustup.rs/ 29 | [rust-analyzer]: https://rust-analyzer.github.io/ 30 | [intellij-rust]: https://intellij-rust.github.io/ 31 | [rust-book]: https://doc.rust-lang.org/book/ 32 | 33 | ## Workflow 34 | 35 | Follow these steps to contribute to the project: 36 | 37 | 1. Make a fork of the [Pikelet repository][pikelet-repo]. 38 | 1. Within your fork, create a branch for your contribution. Use a meaningful name. 39 | 1. Create your contribution, meeting all [contribution quality standards](#quality-standards). 40 | 1. Ensure all the tests pass (`cargo test`). 41 | 1. [Create a pull request][create-a-pr] against the `main` branch of the repository. 42 | 1. Once the pull request is reviewed and CI passes, it will be merged. 43 | 44 | [pikelet-repo]: https://github.com/pikelet-lang/pikelet/ 45 | [create-a-pr]: https://help.github.com/articles/creating-a-pull-request-from-a-fork/ 46 | 47 | ## Quality Standards 48 | 49 | Most quality and style standards are checked automatically by the CI build. 50 | Contributions should: 51 | 52 | - Separate each **logical change** into its own commit. 53 | - Include tests for any new functionality in your pull request. 54 | - Document public functions. 55 | - Format code with `cargo fmt`. 56 | - Avoid adding `unsafe` code. 57 | If it is necessary, provide an explanatory comment on any `unsafe` block explaining its rationale and why it's safe. 58 | - Add a descriptive message for each commit. Follow [these commit message guidelines][commit-messages]. 59 | - Document your pull requests. Include the reasoning behind each change, and the testing done. 60 | 61 | [commit-messages]: https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html 62 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "./pikelet", 4 | "./pikelet-cli", 5 | "./pikelet-editor", 6 | "./pikelet-language-server", 7 | "./pikelet-test", 8 | ] 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pikelet! 2 | 3 | ![Pikelet Mascot][pikelet-mascot] 4 | 5 | [pikelet-mascot]: ./book/assets/pikelet.png 6 | 7 | [![Actions Status][actions-badge]][actions-url] 8 | [![Matrix][matrix-badge]][matrix-lobby] 9 | [![License][license-badge]][license-url] 10 | 11 | [actions-badge]: https://github.com/pikelet-lang/pikelet/workflows/ci/badge.svg 12 | [actions-url]: https://github.com/pikelet-lang/pikelet/actions 13 | [matrix-badge]: https://img.shields.io/matrix/pikelet:matrix.org?label=%23pikelet%3Amatrix.org 14 | [matrix-lobby]: https://app.element.io/#/room/#pikelet:matrix.org 15 | [license-badge]: https://img.shields.io/github/license/pikelet-lang/pikelet 16 | [license-url]: ./LICENSE 17 | 18 | Pikelet is a small, functional, dependently typed programming language. 19 | 20 | Dependent types allow us to do a bunch of really interesting things, like using 21 | records for modules, declaring the length of arrays at the type level, and 22 | much more — many of great utility for low-level and high-level code alike! 23 | 24 | We hope to one day grow Pikelet into fully-fledged systems programming language, 25 | with support for unboxed data types, control over memory layout and allocation 26 | strategy, linear types, and a flexible phase distinction and support for calling 27 | other languages. At the moment however we've only implemented a type checker and 28 | very slow interpreter, so don't get your hopes up too much yet! There's still a 29 | whole lot to do before it is even remotely useful to anyone! 😅 30 | 31 | ## Roadmap 32 | 33 | Check out our plans in [the roadmap](./book/src/development/roadmap.md). 34 | 35 | ## Code of Conduct 36 | 37 | Please note that this project is released with a [Code of Conduct](./CODE_OF_CONDUCT.md). 38 | By participating in this project you agree to abide by its terms. 39 | -------------------------------------------------------------------------------- /book/.gitignore: -------------------------------------------------------------------------------- 1 | # mdBook 2 | /build 3 | 4 | # Yarn 5 | /node_modules 6 | 7 | # Parcel 8 | /.cache 9 | /dist 10 | -------------------------------------------------------------------------------- /book/README.md: -------------------------------------------------------------------------------- 1 | # Pikelet Book 2 | 3 | To build the book, you will first need to [install mdBook][install-mdbook] and [mdbook-linkcheck]: 4 | 5 | ```sh 6 | cargo install mdbook mdbook-linkcheck 7 | ``` 8 | 9 | Note that for consistency we use specific versions of these tools on CI, 10 | so the one you install might be newer than the one used to build and deploy the book. 11 | To check the versions we currently assume, look at the [workflows directory](../.github/workflows). 12 | 13 | ## Building additional JavaScript 14 | 15 | In order to highlight the Fathom code examples in the book we override mdBook's built-in [highlight.js] with our own. 16 | To build the highlighting code, run the following commands using [Yarn]: 17 | 18 | ```sh 19 | yarn workspace book install 20 | yarn workspace book build 21 | ``` 22 | 23 | You will need to rebuild the book or restart the mdBook server for changes to take effect. 24 | 25 | [highlight.js]: https://highlightjs.org/ 26 | [Yarn]: (https://yarnpkg.com/) 27 | 28 | ## Running the mdBook server 29 | 30 | You can then serve the documentation locally by calling the [`serve` command][mdbook-serve] 31 | from the `book` directory: 32 | 33 | ```sh 34 | mdbook serve 35 | ``` 36 | 37 | Alternatively it can be called from the root of the repository: 38 | 39 | ```sh 40 | mdbook serve book 41 | ``` 42 | 43 | [install-mdbook]: https://rust-lang.github.io/mdBook/cli/index.html#install-cratesio-version 44 | [mdbook-serve]: https://rust-lang.github.io/mdBook/cli/serve.html 45 | [mdbook-linkcheck]: https://github.com/Michael-F-Bryan/mdbook-linkcheck#getting-started 46 | -------------------------------------------------------------------------------- /book/assets/pikelet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pikelet-lang/pikelet/8752087d4b8d4dafae4ac3c051df49b993a28ed2/book/assets/pikelet.png -------------------------------------------------------------------------------- /book/book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | title = "Pikele Book" 3 | authors = ["YesLogic Pty. Ltd. "] 4 | description = "Documentation for the Pikelet programming language" 5 | language = "en" 6 | multilingual = false 7 | src = "src" 8 | 9 | [build] 10 | build-dir = "build" 11 | 12 | [output.html] 13 | additional-js = [ 14 | "dist/index.js", 15 | ] 16 | 17 | [output.html.redirect] 18 | "/installation/index.html" = "./guide/installation.html" 19 | "/language/index.html" = "./reference.html" 20 | # "/language/conditionals.html" = TODO 21 | "/language/functions.html" = "./reference/functions.html" 22 | "/language/records.html" = "./reference/records.html" 23 | # "/language/bindings.html" = TODO 24 | # "/language/type-inference.html" = TODO 25 | "/language/universes.html" = "./reference/universes.html" 26 | "/appendix/index.html" = "./specification.html" 27 | "/appendix/design.html" = "./development/design-goals.html" 28 | "/appendix/theory.html" = "./specification.html" 29 | # "/appendix/influences.html" = TODO 30 | # "/appendix/references.html" = TODO 31 | 32 | # [output.linkcheck] 33 | # exclude = [ 34 | # '\./contributing\.md', # Bypass `traverse-parent-directories` for this symlink 35 | # '\./code-of-conduct\.md', # Bypass `traverse-parent-directories` for this symlink 36 | # ] 37 | -------------------------------------------------------------------------------- /book/index.js: -------------------------------------------------------------------------------- 1 | import hljs from "highlight.js/lib/core"; 2 | 3 | hljs.registerLanguage("pikelet", (hljs) => { 4 | const KEYWORDS = { 5 | keyword: "as fun Fun record Record", 6 | built_in: "Type Bool true false U8 U16 U32 U64 S8 S16 S32 S64 F32 F64 String Char Array List", 7 | }; 8 | 9 | const CHARACTER = { 10 | className: "string", 11 | begin: /'([^'\\]|\\.)*'/, 12 | }; 13 | const STRING = { 14 | className: "string", 15 | begin: /"([^"\\]|\\.)*"/, 16 | }; 17 | const NUMBER = { 18 | className: "number", 19 | begin: /\b[-+]?[0-9][a-zA-Z0-9_\.]*\b/, 20 | relevance: 0, 21 | }; 22 | 23 | const COMMENT = { 24 | variants: [ 25 | hljs.COMMENT("--", "$"), 26 | hljs.COMMENT("|||", "$"), 27 | ], 28 | }; 29 | 30 | return { 31 | name: "Fathom", 32 | keywords: KEYWORDS, 33 | contains: [ 34 | STRING, 35 | CHARACTER, 36 | NUMBER, 37 | 38 | COMMENT, 39 | 40 | { begin: "->|<-" }, // No markup, relevance booster 41 | ], 42 | }; 43 | }); 44 | 45 | window.addEventListener("load", (event) => { 46 | document 47 | .querySelectorAll("code.language-pikelet") 48 | .forEach((block) => hljs.highlightBlock(block)); 49 | }); 50 | -------------------------------------------------------------------------------- /book/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "book", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "parcel index.js --no-source-maps", 7 | "build": "parcel build index.js --no-source-maps" 8 | }, 9 | "devDependencies": { 10 | "parcel-bundler": "^1.12.4" 11 | }, 12 | "dependencies": { 13 | "highlight.js": "^10.4.1" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /book/src/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Summary 2 | 3 | [Pikelet](index.md) 4 | 5 | - [Guide](./guide.md) 6 | - [Installation](./guide/installation.md) 7 | - [Using the REPL](./guide/using-the-repl.md) 8 | - [Compiling Standalone Programs]() 9 | - [Pikelet as a Configuration Language]() 10 | - [Pikelet as a Scripting Language]() 11 | 12 | - [Reference](./reference.md) 13 | - [Comments](./reference/comments.md) 14 | - [Keywords](./reference/keywords.md) 15 | - [Names](./reference/names.md) 16 | - [Builtins](./reference/builtins.md) 17 | - [Literals](./reference/literals.md) 18 | - [Universes](./reference/universes.md) 19 | - [Functions](./reference/functions.md) 20 | - [Records](./reference/records.md) 21 | 22 | - [Specification](./specification.md) 23 | - [Core Language]() 24 | - [Semantics]() 25 | - [Typing]() 26 | - [Surface Language]() 27 | - [Elaboration]() 28 | - [Textual Representation](./specification/textual-representation.md) 29 | - [Lexical Syntax](./specification/textual-representation/lexical-syntax.md) 30 | - [Concrete Syntax](./specification/textual-representation/concrete-syntax.md) 31 | - [Inspiration](./specification/inspiration.md) 32 | 33 | - [Development](./development.md) 34 | - [Contributing](./development/contributing.md) 35 | - [Code of Conduct](./development/code-of-conduct.md) 36 | - [Roadmap](./development/roadmap.md) 37 | - [Design](./development/design.md) 38 | - [Influences](./development/influences.md) 39 | - [Bibliography](./development/bibliography.md) 40 | -------------------------------------------------------------------------------- /book/src/development.md: -------------------------------------------------------------------------------- 1 | # Development 2 | 3 | Development documentation for contributors to the Pikelet programming language. 4 | 5 | ## Summary 6 | 7 | - [Contributing](./development/contributing.md) 8 | - [Code of Conduct](./development/code-of-conduct.md) 9 | - [Roadmap](./development/roadmap.md) 10 | - [Design](./development/design.md) 11 | - [Influences](./development/influences.md) 12 | - [Bibliography](./development/bibliography.md) 13 | -------------------------------------------------------------------------------- /book/src/development/bibliography.md: -------------------------------------------------------------------------------- 1 | # Bibliography 2 | 3 | The following resources where helpful when designing and building Pikelet. 4 | 5 | ### How to implement dependent type theory I 6 | 7 | Andrej Bauer
8 | Blog post, 2012.
9 | [blog post](http://math.andrej.com/2012/11/08/how-to-implement-dependent-type-theory-i/) 10 | 11 | ### How to implement dependent type theory II 12 | 13 | Andrej Bauer
14 | Blog post, 2012.
15 | [blog post](http://math.andrej.com/2012/11/11/how-to-implement-dependent-type-theory-ii/) 16 | 17 | ### How to implement dependent type theory III 18 | 19 | Andrej Bauer
20 | Blog post, 2012.
21 | [blog post](http://math.andrej.com/2012/11/29/how-to-implement-dependent-type-theory-iii/) 22 | 23 | ### A simple type-theoretic language: Mini-TT 24 | 25 | Thierry Coquand, Yoshiki Kinoshita, Bengt Nordström, and Makoto Takeyama
26 | Essays in Honour of Gilles Kahn, 2009.
27 | [paper](http://www.cse.chalmers.se/~bengt/papers/GKminiTT.pdf) 28 | 29 | ### Bidirectional Typing Rules: A Tutorial 30 | 31 | David Raymond Christiansen
32 | Tutorial, 2013.
33 | [paper](http://www.davidchristiansen.dk/tutorials/bidirectional.pdf) 34 | 35 | ### Checking Dependent Types with Normalization by Evaluation: A Tutorial 36 | 37 | David Thrane Christiansen
38 | Web page, 2018 (Last accessed 2020).
39 | [web page](http://www.davidchristiansen.dk/tutorials/nbe/) 40 | 41 | ### A tutorial implementation of a dependently typed lambda calculus 42 | 43 | Andres Löh, Conor McBride, and Wouter Swierstra
44 | Fundamenta Informaticae XXI, 2001 (Revised 2009).
45 | [paper](https://www.andres-loeh.de/LambdaPi/LambdaPi.pdf) - 46 | [abstract](https://www.andres-loeh.de/LambdaPi/) 47 | 48 | 61 | 62 | ### Lecture Notes on Bidirectional Type Checking 63 | 64 | Frank Pfenning
65 | Lecture Notes, 2004.
66 | [paper](https://www.cs.cmu.edu/~fp/courses/15312-f04/handouts/15-bidirectional.pdf) 67 | 68 | 75 | -------------------------------------------------------------------------------- /book/src/development/code-of-conduct.md: -------------------------------------------------------------------------------- 1 | ../../../CODE_OF_CONDUCT.md -------------------------------------------------------------------------------- /book/src/development/contributing.md: -------------------------------------------------------------------------------- 1 | ../../../CONTRIBUTING.md -------------------------------------------------------------------------------- /book/src/development/design.md: -------------------------------------------------------------------------------- 1 | # Design 2 | 3 | ## Design Principles 4 | 5 | - Empower our users through careful design, rather than being driven by familiarity. 6 | - Surface level features should decompose into a simple, typed core. 7 | - The top-level should not be [hopeless](https://gist.github.com/samth/3083053). 8 | - Programs should not have to pay for things that they do not use. 9 | - Pikelet should work well for high level and low level, resource constrained applications. 10 | - It should be easy to bootstrap the language fro a new platform and cross-compile programs. 11 | - Diagnostics should be clear and easy to understand. 12 | - Features should behave predictably, without relying on complicated, hard to understand heuristics. 13 | - Features should have a high power-to-weight ratio. 14 | 15 | ## Research to Watch 16 | 17 | There are a number of exciting areas of research that are worth keeping an eye on: 18 | 19 | - Dependent types 20 | - High performance elaboration 21 | - Effects and Coeffects 22 | - Algebraic Effects and Handlers 23 | - Effect Runners 24 | - Graded Modal Type Theory 25 | - Quantitative Type Theory 26 | - Multistage Programming 27 | - Call by Push Value 28 | - Codata vs. Data 29 | - Modular Programming with Dependent Records 30 | - Fancy Dependencies 31 | - Self Types 32 | - Dependent Intersection 33 | - Very Dependent Types 34 | - Datatype Generic Programming 35 | - Levitated data descriptions 36 | - Data Layout Interpretations 37 | - Ornamented Data Types 38 | - Projectional Editors 39 | -------------------------------------------------------------------------------- /book/src/development/influences.md: -------------------------------------------------------------------------------- 1 | # Influences 2 | 3 | Some languages have been inspiring when building Pikelet. 4 | We list some of them here, and the contributions they have made in our thinking. 5 | These ideas may or may not be included in the final Pikelet language, but they are worth mentioning! 6 | 7 | ## 1ML 8 | 9 | Links: 10 | 11 | - [Website](https://people.mpi-sws.org/~rossberg/1ml/) 12 | 13 | Things we love: 14 | 15 | - focus on simplicity 16 | - combines module language of ML with dependent records 17 | - formalized foundation 18 | 19 | ## Agda 20 | 21 | Links: 22 | 23 | - [Wiki](http://wiki.portal.chalmers.se/agda/pmwiki.php) 24 | 25 | Things we love: 26 | 27 | - interactive editing 28 | - dependent types 29 | - dependent records 30 | - implicit arguments 31 | - instance arguments for emulating type classes 32 | - codata for unbounded data 33 | - totality checking 34 | - inductive data types 35 | 36 | ## ATS 37 | 38 | Links: 39 | 40 | - [Website](http://www.ats-lang.org/) 41 | 42 | Things we love: 43 | 44 | - dependent types 45 | - proofs can be generated by SMT solvers 46 | - high level of performance, low level interoperability 47 | - unboxed data types 48 | 49 | ## D 50 | 51 | Links: 52 | 53 | - [Website](http://dlang.org) 54 | 55 | Things we love: 56 | 57 | - strong support for static metaprogramming 58 | - [design by introspection](https://dconf.org/2017/talks/alexandrescu.pdf) 59 | (ie. breaking parametricity for performance optimization and metaprogramming purposes) 60 | - low-level control and high-level abstraction 61 | - fast compiler, short iteration times 62 | 63 | ## Dhall 64 | 65 | Links: 66 | 67 | - [Website](https://github.com/dhall-lang/) 68 | 69 | Things we love: 70 | 71 | - simple core language 72 | - dependent types 73 | - total language 74 | - structural records 75 | 76 | ## Discus (formerly DDC) 77 | 78 | Links: 79 | 80 | - [Website](http://www.discus-lang.org/) 81 | 82 | ## Elm 83 | 84 | Links: 85 | 86 | - [Website](http://elm-lang.org/) 87 | 88 | Things we love: 89 | 90 | - focus on usability, and adoption 91 | - friendly marketing 92 | - welcoming community 93 | - best-in-class error messages 94 | - row polymorphism 95 | 96 | ## F* 97 | 98 | Links: 99 | 100 | - [Website](https://www.fstar-lang.org/) 101 | 102 | Things we love: 103 | 104 | - combining SMT solvers with explicit proofs 105 | - combining effects with dependent types 106 | 107 | ## Gluon 108 | 109 | Links: 110 | 111 | - [Repository](https://github.com/gluon-lang/gluon) 112 | 113 | Things we love: 114 | 115 | - strict evaluation 116 | - focus on simplicity 117 | - embeddable in Rust programs 118 | - using records as a basis for the module system 119 | 120 | ## Granule 121 | 122 | Links: 123 | 124 | - [Repository](https://github.com/dorchard/granule/) 125 | 126 | Things we love: 127 | 128 | - combining coeffects with effects in one language 129 | 130 | ## Idris 131 | 132 | Links: 133 | 134 | - [Website](https://www.idris-lang.org/) 135 | - [Documentation](http://docs.idris-lang.org) 136 | 137 | Things we love: 138 | 139 | - focus on making dependently typed programming practical 140 | - interactive editing 141 | - simple core type theory 142 | - opt-out totality checking 143 | - nice, accessible documentation, focused on real-world examples 144 | - universe checking 145 | - aggressive erasure 146 | - linear types 147 | - compilation to native code 148 | - elaborator reflection 149 | - ad-hoc interfaces desugar into records 150 | - effects system as a library 151 | - state machine library 152 | 153 | ## Koka 154 | 155 | Links: 156 | 157 | - [Website](https://www.microsoft.com/en-us/research/project/koka/) 158 | 159 | Things we love: 160 | 161 | - algebraic effects and handlers 162 | - nice library documentation, with clickable links, etc. 163 | 164 | ## Lean 165 | 166 | Links: 167 | 168 | - [Website](http://leanprover.github.io) 169 | 170 | Things we love: 171 | 172 | - focus on responsive interactive development 173 | - metaprogramming support using Lean 174 | - simple kernel language 175 | 176 | ## OCaml 177 | 178 | Links: 179 | 180 | - [Website](https://ocaml.org/) 181 | - [Repository (Multicore)](https://github.com/ocamllabs/ocaml-multicore) 182 | - [Repository (Modular implicits)](https://github.com/ocamllabs/ocaml-modular-implicits) 183 | 184 | Things we love: 185 | 186 | - module system 187 | - algebraic effects 188 | - modular implicits 189 | - row polymorphism 190 | - structural records 191 | - efficient (and _fast_) code generation 192 | 193 | ## Rust 194 | 195 | Links: 196 | 197 | - [Website](http://rust-lang.org/) 198 | 199 | Things we love: 200 | 201 | - friendly community 202 | - non-uniform, unboxed data layout 203 | - predictable optimizations 204 | - focus on systems programming 205 | - having safety and low level code under one roof 206 | - infectious 'unsafe' keyword 207 | - strict evaluation 208 | - opt-in garbage collection 209 | - minimal runtime 210 | - bootstrapped itself off a foundation of C libraries 211 | 212 | ## Sixten 213 | 214 | Links: 215 | 216 | - [Repository](https://github.com/ollef/sixten) 217 | 218 | Things we love: 219 | 220 | - non-uniform, unboxed data layout 221 | - dependent types 222 | 223 | ## Ur 224 | 225 | Links: 226 | 227 | - [Website](http://www.impredicative.com/ur/) 228 | 229 | Things we love: 230 | 231 | - Statically typed metaprogramming with type-level records 232 | -------------------------------------------------------------------------------- /book/src/development/roadmap.md: -------------------------------------------------------------------------------- 1 | # Roadmap 2 | 3 | Our main aim is to start off with a simple configuration language, like [Dhall][dhall]. 4 | From there we will progressively add features to gain more flexibility. 5 | We want to provide a textual syntax up-front, 6 | but we should aim to keep the core language reasonably decoupled from this, 7 | allowing us to provide support for [projectional editing][structure-editor-wikipedia] in the future. 8 | 9 | [dhall]: https://dhall-lang.org/ 10 | [structure-editor-wikipedia]: https://en.wikipedia.org/wiki/Structure_editor 11 | 12 | You can read more about what we hope to achieve in [_Pondering the next version of Pikelet_][next-pikelet]. 13 | 14 | [next-pikelet]: https://gist.github.com/brendanzab/eba7015e6345abe79a57a704091820bb/. 15 | 16 | ### Language 17 | 18 | - Basic config language 19 | - [x] Comments 20 | - [x] Boolean literals/constants 21 | - [x] Integer literals/constants 22 | - [x] Float literals/constants 23 | - [x] Character literals/constants 24 | - [x] String literals/constants 25 | - [x] Record terms 26 | - [x] Non-dependent record types 27 | - [x] Dynamically sized arrays 28 | - [x] Fixed sized arrays 29 | - Basic programming language 30 | - [x] Improved literal parsing 31 | - [x] Annotated terms 32 | - [ ] Let expressions 33 | - [x] Record field lookups 34 | - [ ] Import expressions 35 | - [x] Function terms 36 | - [x] Non-dependent function types 37 | - [ ] Enumeration sets 38 | - [ ] Pattern matching 39 | - [ ] Recursive terms 40 | - Dependently typed language 41 | - [x] Dependent record types 42 | - [x] Dependent function types 43 | - [ ] Equality (identity) types 44 | - [ ] Universe levels 45 | - [ ] Multi-stage programming 46 | - [ ] Quantitative type theory 47 | 48 | ### Projections 49 | 50 | - [x] Surface → Pretty 51 | - [x] Surface → Core 52 | - [x] Core → Pretty 53 | - [x] Core → Value 54 | - [ ] Core → Binary 55 | - [ ] Core → Documentation 56 | - [ ] Core → Cranelift 57 | - [ ] Core → LLVM 58 | - [ ] Value → JSON/YAML/TOML 59 | 60 | ### Tooling 61 | 62 | - [x] REPL 63 | - [ ] Package manager 64 | - [ ] Auto-formatter for surface language 65 | - [ ] Structured editor 66 | 67 | ### Testing 68 | 69 | - [x] Language samples 70 | - [ ] Feature tests 71 | - [ ] Property based tests 72 | 73 | ### Diagnostics 74 | 75 | - [x] Basic error enum 76 | - [x] Error recovery 77 | - [x] Pretty diagnostic reporting 78 | 79 | ### Rust marshalling 80 | 81 | - [x] Mashalling traits 82 | - [ ] Improved error messages 83 | - [ ] Nicer marshalling API 84 | - [ ] Derive macro for generating marshalling trait implementations 85 | - [ ] More efficient, visitor based marshalling 86 | -------------------------------------------------------------------------------- /book/src/guide.md: -------------------------------------------------------------------------------- 1 | # Language Guide 2 | 3 | Welcome to the Pikelet Language Guide! 4 | This part of the documentation will guide you through the installation of Pikelet, 5 | and show you how to start writing your own programs. 6 | 7 | The aim for this guide is to be accessible as possible! 8 | Once you feel comfortable with the basics more detailed descriptions of the Pikelet's features can be found in the [language reference]. 9 | 10 | [language reference]: ./reference.md 11 | 12 | ## Summary 13 | 14 | - [Installation](./guide/installation.md) 15 | - [Using the REPL](./guide/using-the-repl.md) 16 | - [Compiling Standalone Programs]() 17 | - [Pikelet as a Configuration Language]() 18 | - [Pikelet as a Scripting Language]() 19 | -------------------------------------------------------------------------------- /book/src/guide/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | Pikelet is written in [Rust][rust-site] and therefore needs to be compiled with 4 | Cargo, because we don't yet offer prebuilt binaries. If you haven't already 5 | installed Rust, please [install it][rust-install] now! 6 | 7 | [rust-site]: https://www.rust-lang.org/ 8 | [rust-install]: https://www.rust-lang.org/downloads.html 9 | 10 | ## Cloning the source from Github 11 | 12 | We've not yet published Pikelet on [crates.io][crates-io], so you'll first need 13 | to clone [the repository][pikelet-repository] using git: 14 | 15 | ```sh 16 | git clone https://github.com/pikelet-lang/pikelet.git 17 | cd pikelet 18 | ``` 19 | 20 | [crates-io]: https://crates.io/ 21 | [pikelet-repository]: https://github.com/pikelet-lang/pikelet 22 | 23 | ## Running the REPL 24 | 25 | After cloning, you can now run the [REPL][repl-wikipedia] using Cargo: 26 | 27 | ```sh 28 | cargo run repl 29 | ``` 30 | 31 | You will now need to wait for Cargo to download and build the dependencies, but 32 | sooner or later the REPL will be ready for you to interact with! 33 | 34 | [repl-wikipedia]: https://en.wikipedia.org/wiki/Read%E2%80%93eval%E2%80%93print_loop 35 | -------------------------------------------------------------------------------- /book/src/guide/using-the-repl.md: -------------------------------------------------------------------------------- 1 | # Using the REPL 2 | 3 | If you have [installed Pikelet][installation], you can run the REPL by running this command in the terminal: 4 | 5 | ```sh 6 | pikelet repl 7 | ``` 8 | 9 | [installation]: ./installation 10 | 11 | The REPL should appear in the terminal like so: 12 | 13 | ```text 14 | $ pikelet repl 15 | ____ _ __ __ __ 16 | / __ \(_) /_____ / /__ / /_ 17 | / /_/ / / //_/ _ \/ / _ \/ __/ Version 0.1.0 18 | / ____/ / ,< / __/ / __/ /_ https://github.com/pikelet-lang/pikelet 19 | /_/ /_/_/|_|\___/_/\___/\__/ :? for help 20 | 21 | > 22 | ``` 23 | 24 | "REPL" stands for "Read-eval-print-loop" and is a nice way to experiment with Pikelet in an interactive way. 25 | You can enter Pikelet terms into the REPL after the `>`. For example: 26 | 27 | ```pikelet 28 | > "Hello world!" 29 | ``` 30 | 31 | By pressing Enter, you can 'normalize' the term, and see its type: 32 | 33 | ```pikelet 34 | > "Hello world!" 35 | "Hello world!" : String 36 | ``` 37 | -------------------------------------------------------------------------------- /book/src/index.md: -------------------------------------------------------------------------------- 1 | # Pikelet! 2 | 3 | [![Actions Status][actions-badge]][actions-url] 4 | [![Matrix][matrix-badge]][matrix-lobby] 5 | [![License][license-badge]][license-url] 6 | [![GitHub stars][stars-badge]][github-url] 7 | 8 | [actions-badge]: https://github.com/pikelet-lang/pikelet/workflows/ci/badge.svg 9 | [actions-url]: https://github.com/pikelet-lang/pikelet/actions 10 | [matrix-badge]: https://img.shields.io/matrix/pikelet:matrix.org?label=%23pikelet%3Amatrix.org 11 | [matrix-lobby]: https://app.element.io/#/room/#pikelet:matrix.org 12 | [license-badge]: https://img.shields.io/github/license/pikelet-lang/pikelet 13 | [license-url]: https://github.com/pikelet-lang/pikelet/blob/main/LICENSE 14 | [stars-badge]: https://img.shields.io/github/stars/pikelet-lang/pikelet?style=social 15 | [github-url]: https://github.com/pikelet-lang/pikelet 16 | 17 | ![Pikelet Mascot][pikelet-mascot] 18 | 19 | [pikelet-mascot]: ../assets/pikelet.png 20 | 21 | Pikelet is a small [dependently typed][dependent-type-wikipedia] language. It 22 | doesn't do many interesting things yet, but hopefully that will change in the future! 23 | 24 | [dependent-type-wikipedia]: https://en.wikipedia.org/wiki/Dependent_type 25 | 26 | > **Note:** 27 | > 28 | > Pikelet is still a work in progress! Many features are not implemented yet! 29 | > 30 | > If you'd like to see what we hope to work on next, have a look at [the roadmap](./development/roadmap). 31 | 32 | ## A small taste 33 | 34 | Definitions: 35 | 36 | ```pikelet 37 | record { 38 | id : Fun (A : Type) -> A -> A, 39 | id A a = a, 40 | 41 | always : Fun (A B : Type) -> A -> B -> A, 42 | always A B a b = a, 43 | } 44 | ``` 45 | 46 | Interactive REPL: 47 | 48 | ```text 49 | $ pikelet repl 50 | ____ _ __ __ __ 51 | / __ \(_) /_____ / /__ / /_ 52 | / /_/ / / //_/ _ \/ / _ \/ __/ Version 0.1.0 53 | / ____/ / ,< / __/ / __/ /_ https://github.com/pikelet-lang/pikelet 54 | /_/ /_/_/|_|\___/_/\___/\__/ :? for help 55 | 56 | > (fun A a => a : Fun (A : Type) -> A -> A) String "hello" 57 | "hello" : String 58 | ``` 59 | 60 | ## Summary 61 | 62 | - [Guide](./guide.md): For people new to Pikelet 63 | - [Reference](./reference.md): For people who need a detailed descriptions of individual language features 64 | - [Development](./development.md): For people wanting to contribute to the language 65 | - [Specification](./specification.md): For developers and researchers 66 | -------------------------------------------------------------------------------- /book/src/reference.md: -------------------------------------------------------------------------------- 1 | # Language Reference 2 | 3 | This part of the documentation is a reference-level description of Pikelet, 4 | intended for Pikelet users who want a comprehensive description of Pikelet's surface-level features. 5 | 6 | This is _not_ a precise description of the Pikelet language. 7 | A more precise description of the concrete syntax, elaboration, 8 | and core language of Fathom can be found in the [language specification]. 9 | 10 | [language specification]: ./specification.md 11 | 12 | ## Summary 13 | 14 | - [Comments](./reference/comments.md) 15 | - [Keywords](./reference/keywords.md) 16 | - [Names](./reference/names.md) 17 | - [Builtins](./reference/builtins.md) 18 | - [Literals]() 19 | - [Universes](./reference/universes.md) 20 | - [Functions](./reference/functions.md) 21 | - [Records](./reference/records.md) 22 | -------------------------------------------------------------------------------- /book/src/reference/builtins.md: -------------------------------------------------------------------------------- 1 | # Builtins 2 | 3 | Pikelet has a number of builtin types, which we now describe here: 4 | 5 | ## Booleans 6 | 7 | ```pikelet 8 | Bool : Type 9 | ``` 10 | 11 | Booleans have two constructors, `true` and `false`: 12 | 13 | ```pikelet 14 | true : Type 15 | false : Type 16 | ``` 17 | 18 | ## Unsigned integers 19 | 20 | Unsigned integers are defined via the following built-ins: 21 | 22 | ```pikelet 23 | U8 : Type 24 | U16 : Type 25 | U32 : Type 26 | U64 : Type 27 | ``` 28 | 29 | Unsigned integers can be constructed using numeric literals: 30 | 31 | ```pikelet 32 | 0 : S8 33 | +42 : S32 34 | 0x2F : S16 35 | ``` 36 | 37 | ## Signed integers 38 | 39 | Two's complement, signed integers are defined via the following built-ins: 40 | 41 | ```pikelet 42 | S8 : Type 43 | S16 : Type 44 | S32 : Type 45 | S64 : Type 46 | ``` 47 | 48 | Signed integers can be constructed using numeric literals: 49 | 50 | ```pikelet 51 | 0 : S8 52 | +42 : S32 53 | -42 : S32 54 | 0x2F : S16 55 | ``` 56 | 57 | ## Floating point numbers 58 | 59 | ```pikelet 60 | F32 : Type 61 | F64 : Type 62 | ``` 63 | 64 | ## Strings 65 | 66 | ```pikelet 67 | String : Type 68 | ``` 69 | 70 | Characters can be constructed using string literals. For example: 71 | 72 | ```pikelet 73 | "hello" : String 74 | ``` 75 | 76 | ## Characters 77 | 78 | ```pikelet 79 | Char : Type 80 | ``` 81 | 82 | Characters can be constructed using character literals. For example: 83 | 84 | ```pikelet 85 | 'A' : Char 86 | '가' : Char 87 | '🥞' : Char 88 | ``` 89 | 90 | ## Lists 91 | 92 | Lists are ordered sequences of terms. 93 | 94 | ```pikelet 95 | List : Type -> Type 96 | ``` 97 | 98 | Lists can be constructed using sequences. For example: 99 | 100 | ```pikelet 101 | [] : List F32 102 | [1, 2, 3] : List F32 103 | ``` 104 | 105 | ## Arrays 106 | 107 | Arrays are ordered sequences of terms, with a length specified in the type. 108 | 109 | ```pikelet 110 | Array : U32 -> Type -> Type 111 | ``` 112 | 113 | Arrays can be constructed using sequences. For example: 114 | 115 | ```pikelet 116 | [] : Array 0 F32 117 | [1, 2, 3] : Array 3 F32 118 | ``` 119 | -------------------------------------------------------------------------------- /book/src/reference/comments.md: -------------------------------------------------------------------------------- 1 | # Comments 2 | 3 | ## Line comments 4 | 5 | Line comments are preceded by a double dash (`--`): 6 | 7 | ```pikelet 8 | -- This is a comment! 9 | ``` 10 | 11 | ## Doc comments 12 | 13 | Documentation comments are preceded by three pipes (`|||`): 14 | 15 | ```pikelet 16 | ||| A doc comment! 17 | ``` 18 | 19 | Multi-line doc comments can be created by 'stacking'. For example: 20 | 21 | ```pikelet 22 | ||| The unit type 23 | ||| 24 | ||| This is a synonym for the empty record, 25 | ||| and can be constructed using the `unit` function. 26 | ``` 27 | -------------------------------------------------------------------------------- /book/src/reference/functions.md: -------------------------------------------------------------------------------- 1 | # Functions 2 | 3 | A function relates a number some unknown input to an output term. 4 | 5 | ## Types 6 | 7 | Function types are written as `A -> B`. 8 | Functions are [_curried_][currying-wikipedia], meaning that they take a single input, and return a single output. 9 | Multi-input functions can be created by creating functions that output other functions. 10 | 11 | For example, the function type for adding two 32-bit signed integers together is: 12 | 13 | ```pikelet 14 | S32 -> S32 -> S32 15 | ``` 16 | 17 | ### Dependency 18 | 19 | Functions output types can also depend on their inputs. 20 | For example this is the type of the identity function: 21 | 22 | ```pikelet 23 | Fun (A : Type) -> A -> A 24 | ``` 25 | 26 | > **Note:** 27 | > 28 | > These are sometimes called _pi types_ or [_dependent product types_][dependent-product-types-nlab] in type theory. 29 | 30 | ### Universes 31 | 32 | Function types are also types: 33 | 34 | ```pikelet 35 | U32 -> U32 : Type 36 | ``` 37 | 38 | In order to find the universe level of a function type, 39 | we use the universe level the largest input or output: 40 | 41 | ```pikelet 42 | U32 -> Type^2 : Type^3 43 | ``` 44 | 45 | ## Terms 46 | 47 | Functions are constructed by specifying a list of one-or-more input names after a `fun` token, 48 | and then a output term after a `=>` token. 49 | The inputs can then be referred to in the output term of the function. 50 | 51 | ```pikelet 52 | fun input-1 input-2 => output 53 | ``` 54 | 55 | Functions must always be constructed in a position where they can find a type annotation. 56 | For example, the following function is ambiguous: 57 | 58 | ```pikelet 59 | fun x y => x 60 | ``` 61 | 62 | The following function passes the type checker, 63 | because the function type is pulled from the record annotation: 64 | 65 | ```pikelet 66 | record { 67 | const = fun x y => x, 68 | } : Record { 69 | const : S32 -> String -> S32, 70 | } 71 | ``` 72 | 73 | > **Note:** 74 | > 75 | > These are sometimes called [_lambda abstractions_][lambda-abstraction-nlab] in type theory, 76 | > or _anonymous functions_ in programming languages. 77 | 78 | ## Eliminations 79 | 80 | Functions can be applied to arguments via [_juxtaposition_][juxtaposition-wikipedia]. 81 | 82 | For example, this is how the identity function might be applied: 83 | 84 | ```pikelet 85 | id String "hello!" 86 | ``` 87 | 88 | ```pikelet 89 | Array 3 String 90 | ``` 91 | 92 | ### Computation 93 | 94 | > **Note:** 95 | > 96 | > This section is a work in progress. 97 | > 98 | > We should describe beta-reduction here. 99 | 100 | [currying-wikipedia]: https://en.wikipedia.org/wiki/Currying 101 | [dependent-product-types-nlab]: https://ncatlab.org/nlab/show/dependent+product+type 102 | [lambda-abstraction-nlab]: https://ncatlab.org/nlab/show/lambda-abstraction 103 | [juxtaposition-wikipedia]: https://en.wikipedia.org/wiki/Juxtaposition#Mathematics 104 | -------------------------------------------------------------------------------- /book/src/reference/keywords.md: -------------------------------------------------------------------------------- 1 | # Keywords 2 | 3 | Keywords use the same lexical syntax as [names](./names.md), but are reserved by Pikelet. 4 | 5 | The following keywords are reserved by Pikelet: 6 | 7 | | Keyword | Purpose | 8 | | ------- | ------- | 9 | | `as` | [Explicit binding names](./records#Explicit-binding-names) | 10 | | `Fun` | [Function formation](./functions#Formation) | 11 | | `fun` | [Function terms](./functions#Terms) | 12 | | `Record` | [Record types](./records#Types) | 13 | | `record` | [Record terms](./records#Terms) | 14 | -------------------------------------------------------------------------------- /book/src/reference/literals.md: -------------------------------------------------------------------------------- 1 | # Literals 2 | 3 | ## Numbers 4 | 5 | ```pikelet 6 | 0.0 7 | +1 8 | -25 9 | 0xAB342 10 | 1_000_000 11 | ``` 12 | 13 | ### Supported types 14 | 15 | - Unsigned integers: [`U8`][unsigned-integers], [`U16`][unsigned-integers], [`U32`][unsigned-integers], [`U64`][unsigned-integers] 16 | - Signed integers: [`S8`][signed-integers], [`S16`][signed-integers], [`S32`][signed-integers], [`S64`][signed-integers] 17 | - Floating point numbers: [`F32`][floating-point-numbers], [`F64`][floating-point-numbers] 18 | 19 | [unsigned-integers]: ./builtins#unsigned-integers 20 | [signed-integers]: ./builtins#signed-integers 21 | [floating-point-numbers]: ./builtins#floating-point-numbers 22 | 23 | ### Overloading 24 | 25 | Overloaded number literals are not yet supported, but _are_ planned. 26 | 27 | ## Characters 28 | 29 | ```pikelet 30 | 'A' 31 | '가' 32 | '🥞' 33 | ``` 34 | 35 | ### Supported types 36 | 37 | - [`Char`][characters] 38 | 39 | [characters]: ./builtins#characters 40 | 41 | ### Overloading 42 | 43 | Overloaded character literals are not yet supported, but _are_ planned. 44 | 45 | ## Strings 46 | 47 | ```pikelet 48 | "hello" 49 | ``` 50 | 51 | ### Supported types 52 | 53 | - [`String`][strings] 54 | 55 | [strings]: ./builtins#strings 56 | 57 | ### Overloading 58 | 59 | Overloaded string literals are not yet supported, but _are_ planned. 60 | -------------------------------------------------------------------------------- /book/src/reference/names.md: -------------------------------------------------------------------------------- 1 | # Names 2 | 3 | Names refer to bindings that are currently in scope. 4 | 5 | These could either be _global_, or _local_. 6 | 7 | ```pikelet 8 | make-string 9 | Foo-23 10 | Unicode-String 11 | ``` 12 | 13 | ## Conventions 14 | 15 | 'Small' bindings should use `lower-kebab-case`, for example: 16 | 17 | ```pikelet 18 | my-string 19 | ``` 20 | 21 | 'Large' bindings should use `Title-Kebab-Case`, for example: 22 | 23 | ```pikelet 24 | My-String 25 | My-Universe 26 | ``` 27 | -------------------------------------------------------------------------------- /book/src/reference/records.md: -------------------------------------------------------------------------------- 1 | # Records 2 | 3 | Records provide a way of grouping together data into [composite data types][composite-data-types-wikipedia]. 4 | 5 | [composite-data-types-wikipedia]: https://en.wikipedia.org/wiki/Composite_data_type 6 | 7 | ## Types 8 | 9 | A record type is a list of entries, consisting of an entry label, and an entry type. 10 | For example, this is a record that defines `width` and `height` extents: 11 | 12 | ```pikelet 13 | Record { 14 | width : U32, 15 | height : U32, 16 | } 17 | ``` 18 | 19 | ### Entry dependencies 20 | 21 | Entries can be used to constrain the types of later entries. 22 | For example: 23 | 24 | ```pikelet 25 | Record { 26 | A : Type, 27 | a : A, 28 | } 29 | ``` 30 | 31 | Here the type of the entry with the label `a` _depends_ on the type given to 32 | the entry with label `A`. 33 | 34 | ### Explicit binding names 35 | 36 | By default, the binding name of an entry is the same as the label. 37 | In rare cases, however the label name might shadow a binding from a higher scope. 38 | In this case we can give the field a new, internal name using the `as` keyword: 39 | 40 | ```pikelet 41 | Record { 42 | -- label 43 | -- │ 44 | -- │ explicit name binding 45 | -- │ │ 46 | -- v v 47 | String as String-1 : Type, 48 | 49 | -- refers to the built-in `String` type 50 | -- │ 51 | -- v 52 | x : String, 53 | 54 | -- refers to the local `String` entry 55 | -- │ 56 | -- v 57 | y : String-1, 58 | } 59 | ``` 60 | 61 | ### Universes 62 | 63 | Record types are also types: 64 | 65 | ```pikelet 66 | Record { 67 | first : U32, 68 | } : Type 69 | ``` 70 | 71 | In order to find the universe level of a record type, 72 | we use the universe level the largest entry type: 73 | 74 | ```pikelet 75 | Record { 76 | first : U32, 77 | second : Type^2, 78 | third : Type, 79 | } : Type^3 80 | ``` 81 | 82 | ### Entry order 83 | 84 | The order of entries in a record type are significant, 85 | so the following record type is not the same as the one shown above: 86 | 87 | ```pikelet 88 | Record { 89 | height : U32, 90 | width : U32, 91 | } 92 | ``` 93 | 94 | Dependencies must be supplied from the roots to the leaves. 95 | For example the following record would not type check because `A : Type` is not yet defined when `a : A` is declared: 96 | 97 | ```pikelet 98 | Record { 99 | a : A, 100 | A : Type, 101 | } 102 | ``` 103 | 104 | > **Note:** 105 | > 106 | > The entry order seems annoying! 107 | > It would be nice not to require this in the future, but dependencies make this a challenge! 108 | 109 | ## Terms 110 | 111 | > **Note:** 112 | > 113 | > This section is a work in progress. 114 | 115 | ```pikelet 116 | record {} 117 | ``` 118 | 119 | ```pikelet 120 | record { 121 | width = 24, 122 | height = 33, 123 | } : Record { 124 | width : U32, 125 | height : U32, 126 | } 127 | ``` 128 | 129 | ### Entry dependencies 130 | 131 | The entries of record terms can depend on one another: 132 | 133 | ```pikelet 134 | record { x = 1, y = x } 135 | : Record { x : S32, y : S32 } 136 | ``` 137 | 138 | ### Entry order 139 | 140 | The entries of record terms must be supplied in the order that was specified in the type. 141 | For example this is a type error: 142 | 143 | ```pikelet 144 | record { y = 2, x = 1 } 145 | : Record { x : S32, y : S32 } 146 | ``` 147 | 148 | > **Note:** 149 | > 150 | > The entry order seems annoying! 151 | > It would be nice not to require this in the future. 152 | 153 | ### Explicit binding names 154 | 155 | By default, the binding name of an entry is the same as the label. 156 | In rare cases, however the label name might shadow a binding from a higher scope. 157 | In this case we can give the field a new, internal name using the `as` keyword: 158 | 159 | ```pikelet 160 | record { 161 | -- label 162 | -- │ 163 | -- │ explicit name binding 164 | -- │ │ 165 | -- v v 166 | String as String-1 = MyString, 167 | 168 | -- refers to the built-in `String` type 169 | -- │ 170 | -- │ refers to the local `String` entry 171 | -- | | 172 | -- v v 173 | Types = [ String, String-1 ], 174 | } : Record { 175 | String : Type, 176 | Types : Array 2 Type, 177 | } 178 | ``` 179 | 180 | ## Eliminations 181 | 182 | > **Note:** 183 | > 184 | > This section is a work in progress. 185 | 186 | ```pikelet 187 | extents.width 188 | ``` 189 | 190 | ### Computation 191 | 192 | > **Note:** 193 | > 194 | > This section is a work in progress. 195 | -------------------------------------------------------------------------------- /book/src/reference/universes.md: -------------------------------------------------------------------------------- 1 | # Universes 2 | 3 | The type of types is `Type`. For example: 4 | 5 | ```pikelet 6 | Bool : Type 7 | S32 : Type 8 | Array : U32 -> Type Type 9 | ``` 10 | 11 | The type of `Type` is `Type`: 12 | 13 | ```pikelet 14 | Type : Type 15 | ``` 16 | 17 | Note that for [subtle reasons][type-in-type-liamoc] this is [_inconsistent_][consistency-wikipedia], 18 | as seen in [Girard's Paradox][girards-paradox-wikipedia]. 19 | This means that we might allow for paradoxical cycles in types. 20 | In the future we might add a hierarchy of universes to address this problem. 21 | 22 | [consistency-wikipedia]: https://en.wikipedia.org/wiki/Consistency 23 | [type-in-type-liamoc]: http://liamoc.net/posts/2015-09-10-girards-paradox/index.html 24 | [girards-paradox-wikipedia]: https://en.wikipedia.org/wiki/System_U#Girard's_paradox 25 | 26 | 102 | -------------------------------------------------------------------------------- /book/src/specification.md: -------------------------------------------------------------------------------- 1 | # Language Specification 2 | 3 | This part of the documentation is where we describe the syntax and semantics of Pikelet in a more precise way than the [language reference]. 4 | It is intended for language developers and programming languages researchers who want to understand the underlying type system of the language. 5 | 6 | [language reference]: ./reference.md 7 | 8 | ## Limitations 9 | 10 | It is important to note that we do not claim that these semantics are sound, 11 | but at the very least this specification could form the building blocks of formally verified specification in the future. 12 | 13 | ## Summary 14 | 15 | - [Core Language]() 16 | - [Surface Language]() 17 | - [Textual Representation](./specification/textual-representaion.md) 18 | - [Inspiration](./specification/inspiration.md) 19 | -------------------------------------------------------------------------------- /book/src/specification/inspiration.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: inspiration 3 | title: Inspiration 4 | sidebar_label: Inspiration 5 | keywords: 6 | - docs 7 | - specification 8 | - pikelet 9 | --- 10 | 11 | Some inspiring language specifications/references: 12 | 13 | - [WebAssembly Specification](https://webassembly.github.io/spec/core/) 14 | - [The Definition of Standard ML](http://sml-family.org/sml97-defn.pdf) 15 | - [Dhall Specification](https://github.com/dhall-lang/dhall-lang/blob/master/standard/README.md) 16 | - [The Ur/Web Manual](http://www.impredicative.com/ur/manual.pdf) 17 | - [Coq Reference Manual: The Gallina specification language](https://coq.inria.fr/refman/language/gallina-specification-language.html) 18 | - [Coq Reference Manual: Calculus of Inductive Constructions](https://coq.inria.fr/refman/language/cic.html) 19 | - [The Isabelle/Isar Reference Manual](http://isabelle.in.tum.de/dist/Isabelle2019/doc/isar-ref.pdf) 20 | - [The Isabelle/Isar Implementation](http://isabelle.in.tum.de/dist/Isabelle2019/doc/implementation.pdf) 21 | - [Specification of Core Agda](https://agda.github.io/agda-spec/core-agda.pdf) 22 | - [Swift Reference](https://docs.swift.org/swift-book/ReferenceManual/AboutTheLanguageReference.html) 23 | - [D Specification](https://dlang.org/spec/spec.html) 24 | - [Rust Reference](https://doc.rust-lang.org/reference/) 25 | -------------------------------------------------------------------------------- /book/src/specification/textual-representation.md: -------------------------------------------------------------------------------- 1 | # Textual Representation 2 | 3 | This section describes the textual representation of Pikelet. 4 | 5 | ## Summary 6 | 7 | - [Lexical Syntax](./textual-representation/lexical-syntax.md) 8 | - [Concrete Syntax](./textual-representation/concrete-syntax.md) 9 | -------------------------------------------------------------------------------- /book/src/specification/textual-representation/concrete-syntax.md: -------------------------------------------------------------------------------- 1 | # Concrete Syntax 2 | 3 | This section defines the concrete syntax of the surface language. 4 | 5 | ## Terms 6 | 7 | ```text 8 | term ::= 9 | | expr-term 10 | | expr-term ":" term 11 | 12 | expr-term ::= 13 | | arrow-term 14 | | "fun" name+ "=>" expr-term 15 | 16 | arrow-term ::= 17 | | app-term 18 | | "Fun" ("(" name+ ":" arrow-term ")")+ "->" arrow-term 19 | | app-term "->" arrow-term 20 | 21 | app-term ::= 22 | | atomic-term 23 | | atomic-term atomic-term+ 24 | 25 | atomic-term ::= 26 | | "(" term ")" 27 | | name 28 | | "Record" "{" (type-entry ",")* type-entry? "}" 29 | | "record" "{" (term-entry ",")* term-entry? "}" 30 | | atomic-term "." name 31 | | "[" (term ",")* term? "]" 32 | | numeric-literal 33 | | character-literal 34 | | string-literal 35 | ``` 36 | 37 | ## Entries 38 | 39 | ```text 40 | type-entry ::= 41 | | doc-comment* name ("as" name)? ":" term 42 | 43 | term-entry ::= 44 | | doc-comment* name ("as" name)? "=" term 45 | ``` 46 | -------------------------------------------------------------------------------- /book/src/specification/textual-representation/lexical-syntax.md: -------------------------------------------------------------------------------- 1 | # Lexical Syntax 2 | 3 | ## Input format 4 | 5 | The textual surface language assigns meaning to a source string, 6 | which consists of a sequence of _Unicode scalar values_ (as defined in Section 3.4 of [the Unicode Standard](www.unicode.org/versions/latest/)), 7 | terminated with a virtual end-of-file symbol, `"\0"`: 8 | 9 | ```text 10 | unicode-scalar-value ::= 11 | | "\u{00}" ... "\u{D7FF}" 12 | | "\u{E000}" ... "\u{10FFF}" 13 | 14 | source ::= 15 | | unicode-scalar-value* "\0" 16 | ``` 17 | 18 | For convenience, we define a number of special values within the above `unicode-scalar-value` definition: 19 | 20 | ```text 21 | horizontal-tab ::= "\u{0009}" 22 | line-feed ::= "\u{000A}" 23 | vertical-tab ::= "\u{000B}" 24 | form-feed ::= "\u{000C}" 25 | carriage-return ::= "\u{000D}" 26 | next-line ::= "\u{0085}" 27 | left-to-right-mark ::= "\u{200E}" 28 | right-to-left-mark ::= "\u{200F}" 29 | line-separator ::= "\u{2028}" 30 | paragraph-separator ::= "\u{2029}" 31 | ``` 32 | 33 | ## Whitespace 34 | 35 | ```text 36 | line-break ::= 37 | | line-feed 38 | | carriage-return 39 | | carriage-return line-feed 40 | | "\0" 41 | 42 | white-space ::= 43 | | horizontal-tab 44 | | vertical-tab 45 | | form-feed 46 | | line-break 47 | | next-line 48 | | left-to-right-mark 49 | | right-to-left-mark 50 | | line-separator 51 | | paragraph-separator 52 | ``` 53 | 54 | ## Comments 55 | 56 | ```text 57 | comment-data ::= unicode-scalar-value - (line-feed | carriage-return) 58 | 59 | comment ::= "--" comment-data* line-break 60 | doc-comment ::= "|||" comment-data* line-break 61 | ``` 62 | 63 | ## Keywords 64 | 65 | ```text 66 | keyword ::= 67 | | "as" 68 | | "fun" 69 | | "Fun" 70 | | "Record" 71 | | "record" 72 | ``` 73 | 74 | ## Names 75 | 76 | ```text 77 | name-start ::= "a" ... "z" | "A" ... "Z" 78 | name-continue ::= "a" ... "z" | "A" ... "Z" | "0" ... "9" | "-" 79 | 80 | name ::= 81 | | (name-start name-continue*) - keyword 82 | ``` 83 | 84 | ### Punctuation 85 | 86 | ```text 87 | delimiter ::= 88 | | "{" 89 | | "}" 90 | | "[" 91 | | "]" 92 | | "(" 93 | | ")" 94 | 95 | symbol ::= 96 | | "." 97 | | ":" 98 | | "," 99 | | "=" 100 | | "=>" 101 | | "->" 102 | 103 | punctuation ::= 104 | | delimiter 105 | | symbol 106 | ``` 107 | 108 | ### Numeric Literals 109 | 110 | ```text 111 | sign ::= "+" | "-" 112 | digit-start ::= "0" ... "9" 113 | digit-continue ::= "a" ... "z" | "A" ... "Z" | "0" ... "9" | "." 114 | 115 | numeric-literal ::= 116 | | sign? digit-start digit-continue* 117 | ``` 118 | 119 | ### Quoted Literals 120 | 121 | ```text 122 | quoted-data(quote-end) ::= 123 | | unicode-scalar-value - (quote-end | "\\") 124 | | "\\" unicode-scalar-value 125 | 126 | character-literal ::= "\'" quoted-data("\'")* "\'" 127 | string-literal ::= "\"" quoted-data("\"")* "\"" 128 | ``` 129 | 130 | ### Tokens 131 | 132 | ```text 133 | ignored := 134 | | white-space 135 | | comment 136 | 137 | token ::= 138 | | doc-comment 139 | | keyword 140 | | name 141 | | punctuation 142 | | numeric-literal 143 | | character-literal 144 | | string-literal 145 | 146 | tokens := 147 | | (token | ignored)* 148 | ``` 149 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Pikelet examples 2 | 3 | This directory includes a bunch of different example Pikelet programs. 4 | 5 | We make sure to ensure these are kept up-to-date in [`../pikelet/tests/examples.rs`]. 6 | 7 | [`../pikelet/tests/examples.rs`]: ../pikelet/tests/examples.rs 8 | -------------------------------------------------------------------------------- /examples/hello-world.pi: -------------------------------------------------------------------------------- 1 | "Hello beautiful world!" 2 | -------------------------------------------------------------------------------- /examples/meta.pi: -------------------------------------------------------------------------------- 1 | record { 2 | Constant = 3 | Fun (Constant : Type) 4 | (constant : Record { 5 | u8 : U8 -> Constant, 6 | u16 : U16 -> Constant, 7 | u32 : U32 -> Constant, 8 | u64 : U64 -> Constant, 9 | s8 : S8 -> Constant, 10 | s16 : S16 -> Constant, 11 | s32 : S32 -> Constant, 12 | s64 : S64 -> Constant, 13 | f32 : F32 -> Constant, 14 | f64 : F64 -> Constant, 15 | char : Char -> Constant, 16 | string : String -> Constant, 17 | }) 18 | -> Constant, 19 | 20 | Term = 21 | Fun (Term : Type) 22 | (term : Record { 23 | global : String -> Term, 24 | local : U32 -> Term, 25 | ann : Term -> Term -> Term, 26 | type-type : Term, 27 | function-type : Term -> Term -> Term, 28 | function-term : Term -> Term, 29 | function-elim : Term -> Term -> Term, 30 | record-type : List (Record { label : String, type : Term }) -> Term, 31 | record-term : List (Record { label : String, term : Term }) -> Term, 32 | record-elim : Term -> Term, 33 | list-term : List Term -> Term, 34 | array-term : List Term -> Term, 35 | constant : Constant -> Term, 36 | }) 37 | -> Term, 38 | } : Record { 39 | Constant : Type, 40 | Term : Type, 41 | } 42 | -------------------------------------------------------------------------------- /examples/prelude.pi: -------------------------------------------------------------------------------- 1 | -- TODO: type annotations for record terms 2 | record { 3 | id = fun A a => a, 4 | always = fun A B a b => a, 5 | 6 | dep-compose = fun A B C a-b a-b-c a => 7 | a-b-c a (a-b a), 8 | compose = fun A B C a-b b-c => 9 | dep-compose A (fun a => B) (fun a b => C) a-b (fun a b => b-c b), 10 | 11 | dep-flip = fun A B C a-b-c b a => 12 | a-b-c a b, 13 | flip = fun A B C => 14 | dep-flip A B (fun a b => C), 15 | 16 | dep-subst = fun A B C a-b-c a-b a => 17 | a-b-c a (a-b a), 18 | subst = fun A B C => 19 | dep-subst A (fun a => B) (fun a b => C), 20 | 21 | Unit = Record {}, 22 | unit = record {}, 23 | 24 | Prod = fun A B => Fun (a : A) -> B a, 25 | Sum = fun A B => Record { val : A, proof : B val }, 26 | 27 | Semigroup = Record { 28 | ||| The carrier type 29 | Carrier : Type, 30 | ||| The associative operation 31 | append : Carrier -> Carrier -> Carrier, 32 | }, 33 | 34 | Category = Record { 35 | ||| An object in the category 36 | Object : Type, 37 | ||| Arrows between the objects in the category 38 | Arrow : Object -> Object -> Type, 39 | ||| The identity arrow 40 | id : Fun (A : Object) -> Arrow A A, 41 | ||| The sequencing of two arrows 42 | seq : Fun (A B C : Object) -> Arrow A B -> Arrow B C -> Arrow A C, 43 | }, 44 | 45 | category-pikelet = record { 46 | Object = Type, 47 | Arrow = fun A B => A -> B, 48 | id = id, 49 | seq = compose, 50 | }, 51 | 52 | category-opposite = fun c => record { 53 | Object = c.Object, 54 | Arrow = fun A B => c.Arrow B A, 55 | id = c.id, 56 | seq = fun A B C a-b b-c => c.seq C B A b-c a-b, 57 | }, 58 | } : Record { 59 | ||| The polymorphic identity function. 60 | id : Fun (A : Type) -> A -> A, 61 | 62 | ||| Creates a function that always returns the same value. 63 | always : Fun (A B : Type) -> A -> B -> A, 64 | 65 | 66 | ||| Dependent function composition. 67 | dep-compose : 68 | Fun (A : Type) 69 | (B : A -> Type) 70 | (C : Fun (a : A) -> B a -> Type) 71 | (a-b : Fun (a : A) -> B a) 72 | (a-b-c : Fun (a : A) (b : B a) -> C a b) 73 | -> (Fun (a : A) -> C a (a-b a)), 74 | 75 | ||| Function composition. 76 | compose : Fun (A B C : Type) -> (A -> B) -> (B -> C) -> (A -> C), 77 | 78 | 79 | ||| Flip the order of the first two inputs to a dependent function. 80 | dep-flip : 81 | Fun (A B : Type) 82 | (C : A -> B -> Type) 83 | (a-b-c : Fun (a : A) (b : B) -> C a b) 84 | -> (Fun (b : B) (a : A) -> C a b), 85 | 86 | ||| Flip the order of the first two inputs to a function 87 | flip : Fun (A B C : Type) -> (A -> B -> C) -> (B -> A -> C), 88 | 89 | 90 | ||| Dependent substitution. 91 | ||| 92 | ||| Takes three inputs and then returns the first input applied to the third, 93 | ||| which is then applied to the result of the second input applied to the third. 94 | ||| 95 | ||| Also known as the 'S Combinator' in the [SKI combinator calculus][ski-wiki]. 96 | ||| 97 | ||| # References 98 | ||| 99 | ||| - [Outrageous but Meaningful Coincidences: Dependent type-safe syntax and evaluation][dep-rep] 100 | ||| (Described in Section 5 as an infix `_ˢ_` operator) 101 | ||| 102 | ||| [ski-wiki]: https://en.wikipedia.org/wiki/SKI_combinator_calculus 103 | ||| [dep-rep]: https://personal.cis.strath.ac.uk/conor.mcbride/pub/DepRep/DepRep.pdf 104 | dep-subst : 105 | Fun (A : Type) 106 | (B : A -> Type) 107 | (C : Fun (a : A) -> B a -> Type) 108 | (a-b-c : Fun (a : A) (b : B a) -> C a b) 109 | (a-b : Fun (a : A) -> B a) 110 | -> (Fun (a : A) -> C a (a-b a)), 111 | 112 | ||| Substitution. 113 | subst : Fun (A B C : Type) -> (A -> B -> C) -> (A -> B) -> (A -> C), 114 | 115 | 116 | ||| The unit type 117 | ||| 118 | ||| This is a synonym for the empty record, and can be constructed using the 119 | ||| `unit` function. 120 | Unit : Type, 121 | 122 | ||| Create an element of the `Unit` type 123 | unit : Unit, 124 | 125 | 126 | ||| Dependent products 127 | Prod : Fun (A : Type) (B : A -> Type) -> Type, 128 | 129 | ||| Dependent sums (subtypes) 130 | Sum : Fun (A : Type) (B : A -> Type) -> Type, 131 | 132 | 133 | ||| A carrier equipped with an associative operation 134 | Semigroup : Type, 135 | 136 | ||| A category is a very general structure that provides a common way of 137 | ||| composing units of functionality 138 | ||| 139 | ||| The most common category programmers would be familiar with would be `Type`s 140 | ||| are the objects, and the functions between those types are the arrows. Many 141 | ||| other categories exist though, for example: 142 | ||| 143 | ||| - nodes in a directed graph, and the edges between those nodes. 144 | ||| - etc. 145 | Category : Type, 146 | 147 | ||| Category of Pikelet functions and types. 148 | category-pikelet : Category, 149 | 150 | ||| Opposite categories. 151 | category-opposite : Category -> Category, 152 | } 153 | -------------------------------------------------------------------------------- /examples/record-mesh.pi: -------------------------------------------------------------------------------- 1 | record { 2 | Vertex = Record { 3 | point : Array 3 F32, 4 | normal : Array 3 F32, 5 | color : Array 4 F32, 6 | }, 7 | Mesh = Record { 8 | ||| The index data to use. 9 | index-data : List U16, 10 | ||| The vertex data. 11 | vertex-data : List Vertex, 12 | }, 13 | 14 | examples = record { 15 | cube = record { 16 | index-data = [ 17 | 0, 1, 3, 3, 1, 2, 18 | 1, 4, 2, 2, 4, 7, 19 | 4, 5, 7, 7, 5, 6, 20 | 5, 0, 6, 6, 0, 3, 21 | 3, 2, 6, 6, 2, 7, 22 | 5, 4, 0, 0, 4, 1, 23 | ], 24 | vertex-data = [ 25 | record { point = [0, 0, 0], normal = [0, 0, 1], color = [1, 0, 0, 1] }, 26 | record { point = [1, 0, 0], normal = [0, 0, 1], color = [0, 1, 0, 1] }, 27 | record { point = [1, 1, 0], normal = [0, 0, 1], color = [0, 0, 1, 1] }, 28 | record { point = [0, 1, 0], normal = [0, 0, 1], color = [1, 0, 1, 1] }, 29 | record { point = [1, 0, -1], normal = [0, 0, 1], color = [1, 0, 0, 1] }, 30 | record { point = [0, 0, -1], normal = [0, 0, 1], color = [0, 1, 0, 1] }, 31 | record { point = [0, 1, -1], normal = [0, 0, 1], color = [0, 0, 1, 1] }, 32 | record { point = [1, 1, -1], normal = [0, 0, 1], color = [1, 0, 1, 1] }, 33 | ], 34 | }, 35 | }, 36 | } : Record { 37 | ||| The type of vertex in a mesh. 38 | Vertex : Type, 39 | ||| Mesh data, made up of an index buffer and a vertex buffer. 40 | Mesh : Type, 41 | 42 | ||| Example meshes. 43 | examples : Record { 44 | ||| Cube mesh. 45 | cube : Mesh, 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /examples/window-settings.pi: -------------------------------------------------------------------------------- 1 | record { 2 | window = record { 3 | title = "Voyager", 4 | default-size = record { 5 | width = 1280.0, 6 | height = 720.0, 7 | }, 8 | fullscreen = false, 9 | }, 10 | controls = record { 11 | move-speed = 2.0, 12 | look-speed = 15.0, 13 | }, 14 | } : Record { 15 | window : Record { 16 | title : String, 17 | default-size : Record { 18 | width : F32, 19 | height : F32, 20 | }, 21 | fullscreen : Bool, 22 | }, 23 | controls : Record { 24 | move-speed : F32, 25 | look-speed : F32, 26 | }, 27 | } 28 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "private": true, 3 | "workspaces": [ 4 | "book" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /pikelet-cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pikelet-cli" 3 | version = "0.1.0" 4 | authors = ["Brendan Zabarauskas "] 5 | edition = "2018" 6 | publish = false 7 | description = "Command line interface for interacting with the Pikelet programming language" 8 | homepage = "https://github.com/pikelet-lang/pikelet" 9 | repository = "https://github.com/pikelet-lang/pikelet" 10 | readme = "README.md" 11 | keywords = ["pikelet", "cli"] 12 | categories = ["command-line-utilities"] 13 | license = "Apache-2.0" 14 | 15 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 16 | 17 | [[bin]] 18 | name = "pikelet" 19 | path = "src/main.rs" 20 | 21 | [[test]] 22 | name = "source_tests" 23 | harness = false 24 | 25 | [features] 26 | default = ["editor", "language-server"] 27 | editor = ["pikelet-editor"] 28 | language-server = ["pikelet-language-server"] 29 | 30 | [dependencies] 31 | anyhow = "1.0" 32 | codespan-reporting = "0.11" 33 | crossbeam-channel = "0.5" 34 | pikelet = { path = "../pikelet" } 35 | pikelet-editor = { path = "../pikelet-editor", optional = true } 36 | pikelet-language-server = { path = "../pikelet-language-server", optional = true } 37 | pretty = "0.10" 38 | rustyline = "8.0" 39 | structopt = "0.3" 40 | term_size = "0.3" 41 | xdg = "2.2" 42 | 43 | [dev-dependencies] 44 | libtest-mimic = "0.3.0" 45 | pikelet-test = { path = "../pikelet-test" } 46 | -------------------------------------------------------------------------------- /pikelet-cli/README.md: -------------------------------------------------------------------------------- 1 | # pikelet-cli 2 | 3 | Command line interface for interacting with the Pikelet programming language 4 | -------------------------------------------------------------------------------- /pikelet-cli/src/check.rs: -------------------------------------------------------------------------------- 1 | use codespan_reporting::diagnostic::Severity; 2 | use codespan_reporting::files::SimpleFiles; 3 | use codespan_reporting::term::termcolor::{BufferedStandardStream, ColorChoice}; 4 | use pikelet::lang::{core, surface}; 5 | use pikelet::pass::surface_to_core; 6 | use std::io::Write; 7 | use std::path::PathBuf; 8 | 9 | /// Check some Pikelet source files. 10 | #[derive(structopt::StructOpt)] 11 | pub struct Options { 12 | /// Validate the elaborated core language. 13 | #[structopt(long = "validate-core")] 14 | validate_core: bool, 15 | /// The Pikelet source files to be checked. 16 | #[structopt(name = "FILE")] 17 | file_names: Vec, 18 | } 19 | 20 | pub fn run(options: Options) -> anyhow::Result<()> { 21 | let pretty_alloc = pretty::BoxAllocator; 22 | let mut writer = BufferedStandardStream::stderr(ColorChoice::Always); 23 | let reporting_config = codespan_reporting::term::Config::default(); 24 | 25 | let globals = core::Globals::default(); 26 | let (messages_tx, messages_rx) = crossbeam_channel::unbounded(); 27 | let mut files = SimpleFiles::new(); 28 | let mut surface_to_core = surface_to_core::Context::new(&globals, messages_tx.clone()); 29 | let mut core_typing = match options.validate_core { 30 | true => Some(core::typing::Context::new(&globals, messages_tx.clone())), 31 | false => None, 32 | }; 33 | 34 | let mut is_ok = true; 35 | 36 | for file_name in &options.file_names { 37 | let source = std::fs::read_to_string(file_name)?; 38 | let file_id = files.add(file_name.display().to_string(), source); 39 | let file = files.get(file_id).unwrap(); 40 | 41 | let surface_term = surface::Term::from_str(file_id, file.source(), &messages_tx); 42 | 43 | let (core_term, _) = surface_to_core.synth_type(&surface_term); 44 | if let Some(core_typing) = &mut core_typing { 45 | let _ = core_typing.synth_type(&core_term); 46 | } 47 | 48 | for message in messages_rx.try_iter() { 49 | let diagnostic = message.to_diagnostic(&pretty_alloc); 50 | is_ok &= diagnostic.severity < Severity::Error; 51 | 52 | codespan_reporting::term::emit(&mut writer, &reporting_config, &files, &diagnostic)?; 53 | writer.flush()?; 54 | } 55 | } 56 | 57 | match is_ok { 58 | true => Ok(()), 59 | false => Err(anyhow::anyhow!("errors found in supplied source files")), 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /pikelet-cli/src/lib.rs: -------------------------------------------------------------------------------- 1 | use anyhow::anyhow; 2 | 3 | pub mod check; 4 | pub mod repl; 5 | 6 | /// The Pikelet command line interface. 7 | #[derive(structopt::StructOpt)] 8 | pub enum Options { 9 | /// Check some Pikelet source files. 10 | #[structopt(name = "check")] 11 | Check(check::Options), 12 | /// Runs the structured editor. 13 | #[cfg(feature = "editor")] 14 | #[structopt(name = "editor")] 15 | Editor, 16 | /// Runs the language server. 17 | #[cfg(feature = "language-server")] 18 | #[structopt(name = "language-server")] 19 | LanguageServer, 20 | /// Runs the REPL/interactive mode. 21 | #[structopt(name = "repl")] 22 | Repl(repl::Options), 23 | } 24 | 25 | /// Run the CLI with the given options 26 | pub fn run(options: Options) -> anyhow::Result<()> { 27 | match options { 28 | Options::Check(options) => check::run(options), 29 | #[cfg(feature = "editor")] 30 | Options::Editor => { 31 | // FIXME: `iced::Error` is not `Send + Sync`, and so is incompatible with `anyhow::Result`. 32 | // See this issue for more information: https://github.com/hecrj/iced/issues/516 33 | pikelet_editor::run().map_err(|err| anyhow!("{}", err)) 34 | } 35 | #[cfg(feature = "language-server")] 36 | Options::LanguageServer => pikelet_language_server::run(), 37 | Options::Repl(options) => repl::run(options), 38 | } 39 | } 40 | 41 | fn term_width() -> usize { 42 | match term_size::dimensions() { 43 | Some((width, _)) => width, 44 | None => std::usize::MAX, 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /pikelet-cli/src/main.rs: -------------------------------------------------------------------------------- 1 | use pikelet_cli::Options; 2 | use structopt::StructOpt; 3 | 4 | fn main() -> anyhow::Result<()> { 5 | pikelet_cli::run(Options::from_args()) 6 | } 7 | -------------------------------------------------------------------------------- /pikelet-cli/src/repl.rs: -------------------------------------------------------------------------------- 1 | use codespan_reporting::diagnostic::Severity; 2 | use codespan_reporting::files::SimpleFiles; 3 | use codespan_reporting::term::termcolor::{BufferedStandardStream, ColorChoice}; 4 | use pikelet::lang::{core, surface}; 5 | use pikelet::pass::{surface_to_core, surface_to_pretty}; 6 | use rustyline::error::ReadlineError; 7 | use std::io::Write; 8 | use std::sync::Arc; 9 | 10 | const HISTORY_FILE_NAME: &str = "history"; 11 | 12 | /// The Pikelet REPL/interactive mode. 13 | #[derive(structopt::StructOpt)] 14 | pub struct Options { 15 | /// The prompt to display before expressions. 16 | #[structopt(long = "prompt", default_value = "> ")] 17 | pub prompt: String, 18 | /// Disable the welcome banner on startup. 19 | #[structopt(long = "no-banner")] 20 | pub no_banner: bool, 21 | /// Disable saving of command history on exit. 22 | #[structopt(long = "no-history")] 23 | pub no_history: bool, 24 | } 25 | 26 | fn print_welcome_banner() { 27 | const WELCOME_BANNER: &[&str] = &[ 28 | r" ____ _ __ __ __ ", 29 | r" / __ \(_) /_____ / /__ / /_ ", 30 | r" / /_/ / / //_/ _ \/ / _ \/ __/ ", 31 | r" / ____/ / ,< / __/ / __/ /_ ", 32 | r"/_/ /_/_/|_|\___/_/\___/\__/ ", 33 | r"", 34 | ]; 35 | 36 | for (i, line) in WELCOME_BANNER.iter().enumerate() { 37 | // warning on `env!` is a known issue 38 | #[allow(clippy::print_literal)] 39 | match i { 40 | 2 => println!("{}Version {}", line, env!("CARGO_PKG_VERSION")), 41 | 3 => println!("{}{}", line, env!("CARGO_PKG_HOMEPAGE")), 42 | 4 => println!("{}:? for help", line), 43 | _ => println!("{}", line.trim_end()), 44 | } 45 | } 46 | } 47 | 48 | pub fn run(options: Options) -> anyhow::Result<()> { 49 | let mut editor = { 50 | let config = rustyline::Config::builder() 51 | .history_ignore_space(true) 52 | .history_ignore_dups(true) 53 | .build(); 54 | 55 | rustyline::Editor::<()>::with_config(config) 56 | }; 57 | 58 | if !options.no_banner { 59 | print_welcome_banner() 60 | } 61 | 62 | // TODO: Use appropriate directory on Windows 63 | let xdg_dirs = xdg::BaseDirectories::with_prefix("pikelet/repl")?; 64 | let history_path = xdg_dirs.get_data_home().join(HISTORY_FILE_NAME); 65 | 66 | if !options.no_history && editor.load_history(&history_path).is_err() { 67 | // No previous REPL history! 68 | } 69 | 70 | let pretty_alloc = pretty::BoxAllocator; 71 | let mut writer = BufferedStandardStream::stderr(ColorChoice::Always); 72 | let reporting_config = codespan_reporting::term::Config::default(); 73 | 74 | let globals = core::Globals::default(); 75 | let (messages_tx, messages_rx) = crossbeam_channel::unbounded(); 76 | let mut files = SimpleFiles::new(); 77 | let mut state = surface_to_core::Context::new(&globals, messages_tx.clone()); 78 | 79 | 'repl: loop { 80 | let (file_id, file) = match editor.readline(&options.prompt) { 81 | Ok(line) => { 82 | let file_id = files.add("", line); 83 | (file_id, files.get(file_id).unwrap()) 84 | } 85 | Err(ReadlineError::Interrupted) => { 86 | println!("Interrupted!"); 87 | continue 'repl; 88 | } 89 | Err(ReadlineError::Eof) => break 'repl, 90 | Err(error) => return Err(error.into()), 91 | }; 92 | 93 | if !options.no_history { 94 | editor.add_history_entry(file.source()); 95 | } 96 | 97 | // TODO: Parse REPL commands 98 | // 99 | // Command Arguments Purpose 100 | // 101 | // normalize a term in the context 102 | // :? :h :help display this help text 103 | // :core print the core representation of a term 104 | // :define : add a declaration in the REPL context 105 | // :define = add a definition in the REPL context 106 | // :q :quit quit the repl 107 | // :t :type infer the type of a term 108 | let surface_term = surface::Term::from_str(file_id, file.source(), &messages_tx); 109 | let (core_term, r#type) = state.synth_type(&surface_term); 110 | 111 | let mut is_ok = true; 112 | for message in messages_rx.try_iter() { 113 | let diagnostic = message.to_diagnostic(&pretty_alloc); 114 | is_ok &= diagnostic.severity < Severity::Error; 115 | 116 | codespan_reporting::term::emit(&mut writer, &reporting_config, &files, &diagnostic)?; 117 | writer.flush()?; 118 | } 119 | 120 | if is_ok { 121 | let ann_term = core::Term::generated(core::TermData::Ann( 122 | Arc::new(state.normalize(&core_term)), 123 | Arc::new(state.read_back(&r#type)), 124 | )); 125 | let term = state.core_to_surface(&ann_term); 126 | let doc = surface_to_pretty::from_term(&pretty_alloc, &term); 127 | 128 | println!("{}", doc.1.pretty(crate::term_width())); 129 | } 130 | } 131 | 132 | if !options.no_history && !editor.history().is_empty() { 133 | let history_path = xdg_dirs.place_data_file(HISTORY_FILE_NAME)?; 134 | editor.save_history(&history_path)?; 135 | } 136 | 137 | println!("Bye bye"); 138 | 139 | Ok(()) 140 | } 141 | -------------------------------------------------------------------------------- /pikelet-cli/tests/source_tests.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let args = libtest_mimic::Arguments::from_args(); 3 | 4 | std::env::set_current_dir("..").unwrap(); 5 | 6 | let tests = std::iter::empty() 7 | .chain(pikelet_test::walk_files("examples").filter_map(pikelet_test::extract_simple_test)) 8 | .chain(pikelet_test::walk_files("tests").filter_map(pikelet_test::extract_config_test)) 9 | .collect(); 10 | let run_test = pikelet_test::run_test(env!("CARGO_BIN_EXE_pikelet")); 11 | 12 | libtest_mimic::run_tests(&args, tests, run_test).exit(); 13 | } 14 | -------------------------------------------------------------------------------- /pikelet-editor/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pikelet-editor" 3 | version = "0.1.0" 4 | authors = ["Brendan Zabarauskas "] 5 | edition = "2018" 6 | publish = false 7 | description = "Graphical user interface for interacting with the Pikelet programming language" 8 | homepage = "https://github.com/pikelet-lang/pikelet" 9 | repository = "https://github.com/pikelet-lang/pikelet" 10 | readme = "README.md" 11 | keywords = ["pikelet", "gui"] 12 | categories = [] 13 | license = "Apache-2.0" 14 | 15 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 16 | 17 | [dependencies] 18 | anyhow = "1.0" 19 | pikelet = { path = "../pikelet" } 20 | -------------------------------------------------------------------------------- /pikelet-editor/README.md: -------------------------------------------------------------------------------- 1 | # pikelet-editor 2 | 3 | This is intended to provided the basis for a structured programming environment for Pikelet. 4 | While the hopes behind this are ambitious, we will need to proceed carefully, taking care to avoid getting trapped down rabbit holes. 5 | 6 | It should both be natively compiled, be able to be run in a browser, or embedded in other applications, like game engines. 7 | For this, libraries like [Iced](https://github.com/hecrj/iced) could prove useful. 8 | Another possibility could be to embed this within Electron-based IDEs like VS Code. 9 | We might also want to eventually investigate implementing the programming environment using platform-specific GUI toolkits. 10 | 11 | ## Inspiration 12 | 13 | ### Projects 14 | 15 | - [Aardappel](http://strlen.com/aardappel-language/) 16 | - [Alfa](http://www.cse.chalmers.se/~hallgren/Alfa/) 17 | - [Dark](https://darklang.com/) 18 | - [Eve](http://witheve.com/) 19 | - [Factor](https://factorcode.org/) 20 | - [fructure](https://github.com/disconcision/fructure) 21 | - [Glamorous Toolkit](https://gtoolkit.com/) 22 | - [Hazel](https://hazel.org/) 23 | - [Houdini Networks](https://www.sidefx.com/docs/houdini/network/index.html) 24 | - [Livelits](https://github.com/hazelgrove/livelits-tyde/blob/master/livelits-tyde.pdf) 25 | - [medit](https://github.com/molikto/medit) 26 | - [MPS](https://www.jetbrains.com/mps/) 27 | - [Pharo](https://pharo.org/) 28 | - [Self](https://www.jetbrains.com/mps/) 29 | - [Symbolics Lisp](https://twitter.com/RainerJoswig/status/1213528401774071813) 30 | - [TreeSheets](http://strlen.com/treesheets/) 31 | - [Unreal Blueprints](https://docs.unrealengine.com/en-US/Engine/Blueprints) 32 | 33 | ### Collections 34 | 35 | - [Visual Programming Codex](https://github.com/ivanreese/visual-programming-codex) 36 | - [The Whole Code Catalog](https://futureofcoding.org/catalog/) 37 | 38 | ### Papers 39 | 40 | - "Tangible Functional Programming"
41 | Conal M. Elliott, 42 | [CiteSeer](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.422.6896) 43 | - "An Extensible Proof Text Editor"
44 | Thomas Hallgren, Aarne Ranta, 45 | [CiteSeer](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.138.2186) 46 | - "Interactive visual functional programming"
47 | Keith Hanna, 48 | [ACM](https://dl.acm.org/doi/10.1145/583852.581493) 49 | - "A Document-Centered Environment for Haskell"
50 | Keith Hanna, 51 | [ACM](https://dl.acm.org/doi/10.1007/11964681_12) 52 | - "Accessible AST-Based Programming for Visually-Impaired Programmers"
53 | Emmanuel Schanzer, Sina Bahram, Shriram Krishnamurthi, 54 | [PDF](https://cs.brown.edu/~sk/Publications/Papers/Published/sbk-accessible-ast-blocks/paper.pdf) 55 | -------------------------------------------------------------------------------- /pikelet-editor/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub fn run() -> Result<(), anyhow::Error> { 2 | anyhow::bail!("not yet implemented") 3 | } 4 | -------------------------------------------------------------------------------- /pikelet-language-server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pikelet-language-server" 3 | version = "0.1.0" 4 | authors = ["Brendan Zabarauskas "] 5 | edition = "2018" 6 | publish = false 7 | description = "Language server protocol implementation for the Pikelet programming language" 8 | homepage = "https://github.com/pikelet-lang/pikelet" 9 | repository = "https://github.com/pikelet-lang/pikelet" 10 | readme = "README.md" 11 | keywords = ["pikelet"] 12 | categories = [] 13 | license = "Apache-2.0" 14 | 15 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 16 | 17 | [dependencies] 18 | anyhow = "1.0" 19 | flexi_logger = "0.17" 20 | log = "0.4" 21 | lsp-server = "0.5" 22 | lsp-types = "0.88" 23 | serde_json = "1.0.57" 24 | serde = { version = "1.0.114", features = ["derive"] } 25 | -------------------------------------------------------------------------------- /pikelet-language-server/README.md: -------------------------------------------------------------------------------- 1 | # pikelet-language-server 2 | 3 | Language server protocol implementation for the Pikelet programming language 4 | -------------------------------------------------------------------------------- /pikelet-language-server/src/lib.rs: -------------------------------------------------------------------------------- 1 | use log::info; 2 | use lsp_server::{Connection, Message}; 3 | use lsp_types::{InitializeParams, ServerCapabilities}; 4 | 5 | pub fn run() -> anyhow::Result<()> { 6 | // Set up logging. Because `stdio_transport` gets a lock on stdout and stdin, we must have 7 | // our logging only write out to stderr. 8 | flexi_logger::Logger::with_str("info").start()?; 9 | info!("Starting Pikelet LSP server"); 10 | 11 | // Create the transport. Includes the stdio (stdin and stdout) versions but this could 12 | // also be implemented to use sockets or HTTP. 13 | let (connection, io_threads) = Connection::stdio(); 14 | 15 | // Run the server and wait for the two threads to end (typically by trigger LSP Exit event). 16 | let server_capabilities = serde_json::to_value(&ServerCapabilities::default())?; 17 | let initialization_params = connection.initialize(server_capabilities)?; 18 | main_loop(&connection, initialization_params)?; 19 | io_threads.join()?; 20 | 21 | // Shut down gracefully. 22 | info!("Shutting down server"); 23 | 24 | Ok(()) 25 | } 26 | 27 | fn main_loop(connection: &Connection, params: serde_json::Value) -> anyhow::Result<()> { 28 | let _params: InitializeParams = serde_json::from_value(params).unwrap(); 29 | 30 | info!("Starting Pikelet main loop"); 31 | for msg in &connection.receiver { 32 | info!("Received msg: {:?}", msg); 33 | match msg { 34 | Message::Request(request) => { 35 | if connection.handle_shutdown(&request)? { 36 | return Ok(()); 37 | } 38 | info!("Got request: {:?}", request); 39 | } 40 | Message::Response(response) => { 41 | info!("Received response: {:?}", response); 42 | } 43 | Message::Notification(notification) => { 44 | info!("Received notification: {:?}", notification); 45 | } 46 | } 47 | } 48 | 49 | Ok(()) 50 | } 51 | -------------------------------------------------------------------------------- /pikelet-test/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pikelet-test" 3 | version = "0.1.0" 4 | authors = ["Brendan Zabarauskas "] 5 | edition = "2018" 6 | publish = false 7 | description = "Integration tests for the Pikelet programming language" 8 | 9 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 10 | 11 | [dependencies] 12 | itertools = "0.10.0" 13 | libtest-mimic = "0.3.0" 14 | serde = { version = "1.0", features = ["derive"] } 15 | toml = "0.5" 16 | walkdir = "2.3.2" 17 | -------------------------------------------------------------------------------- /pikelet-test/src/lib.rs: -------------------------------------------------------------------------------- 1 | use libtest_mimic::{Outcome, Test}; 2 | use serde::Deserialize; 3 | use std::fmt::Write; 4 | use std::fs; 5 | use std::path::{Path, PathBuf}; 6 | use std::process::Command; 7 | use walkdir::WalkDir; 8 | 9 | /// Recursively walk over test files under a file path. 10 | pub fn walk_files(root: impl AsRef) -> impl Iterator { 11 | WalkDir::new(root) 12 | .into_iter() 13 | .filter_map(|dir_entry| dir_entry.ok()) 14 | .filter(|dir_entry| dir_entry.file_type().is_file()) 15 | .map(|dir_entry| dir_entry.into_path()) 16 | } 17 | 18 | fn is_pikelet_path(path: &Path) -> bool { 19 | matches!(path.extension(), Some(ext) if ext == "pi") 20 | } 21 | 22 | /// Extract a new simple test from the given file path 23 | pub fn extract_simple_test(path: PathBuf) -> Option> { 24 | is_pikelet_path(&path).then(|| Test { 25 | name: path.display().to_string(), 26 | kind: String::new(), 27 | is_ignored: false, 28 | is_bench: false, 29 | data: TestData { 30 | input_file: path, 31 | parse_config: false, 32 | }, 33 | }) 34 | } 35 | 36 | /// Extract a new test configured with TOML-formatted comments from the given file path 37 | pub fn extract_config_test(path: PathBuf) -> Option> { 38 | is_pikelet_path(&path).then(|| Test { 39 | name: path.display().to_string(), 40 | kind: String::new(), 41 | is_ignored: false, 42 | is_bench: false, 43 | data: TestData { 44 | input_file: path, 45 | parse_config: true, 46 | }, 47 | }) 48 | } 49 | 50 | /// Create a test runner function using the given Pikelet executable 51 | pub fn run_test( 52 | pikelet_exe: &'static str, 53 | ) -> impl Fn(&Test) -> Outcome + 'static + Send + Sync { 54 | move |test| run_test_impl(pikelet_exe, test) 55 | } 56 | 57 | fn run_test_impl(pikelet_exe: &str, test: &Test) -> Outcome { 58 | let mut failures = Vec::new(); 59 | 60 | let config = if test.data.parse_config { 61 | use itertools::Itertools; 62 | 63 | const CONFIG_COMMENT_START: &str = "--!"; 64 | 65 | let input_source = fs::read_to_string(&test.data.input_file).unwrap(); 66 | let config_source = input_source 67 | .lines() 68 | .filter_map(|line| line.split(CONFIG_COMMENT_START).nth(1)) 69 | .join("\n"); 70 | 71 | match toml::from_str(&config_source) { 72 | Ok(config) => config, 73 | Err(error) => { 74 | failures.push(Failure { 75 | name: "config parse error".to_owned(), 76 | details: error.to_string(), 77 | }); 78 | 79 | return failures_to_outcome(&failures); 80 | } 81 | } 82 | } else { 83 | Config { 84 | ignore: false, 85 | check: CheckConfig { 86 | enable: true, 87 | validate_core: true, 88 | }, 89 | } 90 | }; 91 | 92 | if config.ignore || (!config.check.enable/* && ... */) { 93 | return Outcome::Ignored; 94 | } 95 | 96 | if config.check.enable { 97 | let output = Command::new(pikelet_exe) 98 | .arg("check") 99 | .arg("--validate-core") 100 | .arg(&test.data.input_file) 101 | .output(); 102 | 103 | match output { 104 | Ok(output) => { 105 | if !output.status.success() { 106 | failures.push(Failure { 107 | name: "exit status".to_owned(), 108 | details: output.status.to_string(), 109 | }); 110 | } 111 | if !output.stdout.is_empty() { 112 | failures.push(Failure { 113 | name: "stdout".to_owned(), 114 | details: String::from_utf8_lossy(&output.stdout).into(), 115 | }); 116 | } 117 | if !output.stderr.is_empty() { 118 | failures.push(Failure { 119 | name: "stderr".to_owned(), 120 | details: String::from_utf8_lossy(&output.stderr).into(), 121 | }); 122 | } 123 | } 124 | Err(error) => failures.push(Failure { 125 | name: "command error".to_owned(), 126 | details: error.to_string(), 127 | }), 128 | } 129 | } 130 | 131 | failures_to_outcome(&failures) 132 | } 133 | 134 | pub struct TestData { 135 | input_file: PathBuf, 136 | parse_config: bool, 137 | } 138 | 139 | #[derive(Deserialize, Debug)] 140 | #[serde(deny_unknown_fields)] 141 | #[serde(rename_all = "kebab-case")] 142 | struct Config { 143 | #[serde(default = "false_value")] 144 | ignore: bool, 145 | #[serde(default)] 146 | check: CheckConfig, 147 | } 148 | 149 | #[derive(Deserialize, Debug)] 150 | #[serde(deny_unknown_fields)] 151 | #[serde(rename_all = "kebab-case")] 152 | struct CheckConfig { 153 | #[serde(default = "false_value")] 154 | enable: bool, 155 | #[serde(default = "true_value")] 156 | validate_core: bool, 157 | } 158 | 159 | impl Default for CheckConfig { 160 | fn default() -> Self { 161 | CheckConfig { 162 | enable: false, 163 | validate_core: true, 164 | } 165 | } 166 | } 167 | 168 | fn false_value() -> bool { 169 | false 170 | } 171 | 172 | fn true_value() -> bool { 173 | false 174 | } 175 | 176 | struct Failure { 177 | name: String, 178 | details: String, 179 | } 180 | 181 | fn failures_to_outcome(failures: &[Failure]) -> Outcome { 182 | if failures.is_empty() { 183 | Outcome::Passed 184 | } else { 185 | let mut buffer = String::new(); 186 | 187 | writeln!(buffer).unwrap(); 188 | writeln!(buffer, " failures:").unwrap(); 189 | for failure in failures { 190 | writeln!(buffer).unwrap(); 191 | writeln!(buffer, " ---- {} ----", failure.name).unwrap(); 192 | for line in failure.details.lines() { 193 | writeln!(buffer, " {}", line).unwrap(); 194 | } 195 | } 196 | writeln!(buffer).unwrap(); 197 | writeln!(buffer).unwrap(); 198 | writeln!(buffer, " failures:").unwrap(); 199 | for failure in failures { 200 | writeln!(buffer, " {}", failure.name).unwrap(); 201 | } 202 | 203 | Outcome::Failed { msg: Some(buffer) } 204 | } 205 | } 206 | -------------------------------------------------------------------------------- /pikelet/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pikelet" 3 | version = "0.1.0" 4 | authors = ["Brendan Zabarauskas"] 5 | edition = "2018" 6 | publish = false 7 | description = "The Pikelet programming language" 8 | homepage = "https://github.com/pikelet-lang/pikelet" 9 | repository = "https://github.com/pikelet-lang/pikelet" 10 | readme = "../README.md" 11 | keywords = ["pikelet", "language"] 12 | categories = [] 13 | license = "Apache-2.0" 14 | 15 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 16 | 17 | [dependencies] 18 | codespan-reporting = "0.11" 19 | contracts = "0.6" 20 | crossbeam-channel = "0.5" 21 | fxhash = "0.2" 22 | im = "15" 23 | itertools = "0.10" 24 | lalrpop-util = "0.19" 25 | logos = "0.12" 26 | num-traits = "0.2" 27 | once_cell = "1.4" 28 | pretty = "0.10" 29 | regex = "1.3" 30 | 31 | [build-dependencies] 32 | lalrpop = "0.19" 33 | -------------------------------------------------------------------------------- /pikelet/README.md: -------------------------------------------------------------------------------- 1 | # pikelet 2 | 3 | See the [top-level README](../README.md) for more information. 4 | -------------------------------------------------------------------------------- /pikelet/build.rs: -------------------------------------------------------------------------------- 1 | fn main() -> Result<(), Box> { 2 | lalrpop::Configuration::new() 3 | .always_use_colors() 4 | .process_current_dir() 5 | } 6 | -------------------------------------------------------------------------------- /pikelet/src/lang.rs: -------------------------------------------------------------------------------- 1 | //! Intermediate languages of the Pikelet compiler. 2 | 3 | pub mod surface; 4 | // 🠃 5 | pub mod core; 6 | // 🠃 7 | pub mod anf; 8 | // 🠃 9 | pub mod cc; 10 | // 🠃 11 | // ... 12 | 13 | /// File identifier 14 | pub type FileId = usize; 15 | 16 | /// Location metadata, for diagnostic reporting purposes. 17 | #[derive(Debug, Copy, Clone)] 18 | pub enum Location { 19 | /// Generated code. 20 | Generated, 21 | /// Ranges in a text file. 22 | FileRange(FileId, Range), 23 | } 24 | 25 | impl Location { 26 | pub fn generated() -> Location { 27 | Location::Generated 28 | } 29 | 30 | pub fn file_range(file_id: FileId, range: impl Into) -> Location { 31 | Location::FileRange(file_id, range.into()) 32 | } 33 | 34 | pub fn merge(self, other: Location) -> Location { 35 | match (self, other) { 36 | (Location::Generated, Location::Generated) => Location::Generated, 37 | (Location::FileRange(file_id0, range0), Location::FileRange(file_id1, range1)) => { 38 | assert_eq!( 39 | file_id0, file_id1, 40 | "tried to merge source locations with different file ids" 41 | ); 42 | Location::FileRange(file_id0, Range::merge(range0, range1)) 43 | } 44 | (_, _) => panic!("incompatible source ranges"), 45 | } 46 | } 47 | } 48 | 49 | /// A range of source code. 50 | #[derive(Debug, Copy, Clone)] 51 | pub struct Range { 52 | pub start: usize, 53 | pub end: usize, 54 | } 55 | 56 | impl Range { 57 | pub fn merge(self, other: Range) -> Range { 58 | Range { 59 | start: std::cmp::min(self.start, other.start), 60 | end: std::cmp::max(self.end, other.end), 61 | } 62 | } 63 | } 64 | 65 | impl From> for Range { 66 | fn from(src: std::ops::Range) -> Range { 67 | Range { 68 | start: src.start, 69 | end: src.end, 70 | } 71 | } 72 | } 73 | 74 | impl From for std::ops::Range { 75 | fn from(src: Range) -> std::ops::Range { 76 | src.start..src.end 77 | } 78 | } 79 | 80 | /// Data that covers some range of source code. 81 | #[derive(Debug, Clone)] 82 | pub struct Located { 83 | pub location: Location, 84 | pub data: Data, 85 | } 86 | 87 | impl Located { 88 | pub fn new(location: Location, data: Data) -> Located { 89 | Located { location, data } 90 | } 91 | 92 | pub fn generated(data: Data) -> Located { 93 | Located::new(Location::generated(), data) 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /pikelet/src/lang/anf.rs: -------------------------------------------------------------------------------- 1 | //! The A-normal form language, with types preserved. 2 | //! 3 | //! This language makes an explicit distinction between _computations_ and 4 | //! _values_, and makes the evaluation indifferent to the order in which 5 | //! computations are executed (somewhat like [applicative functors in 6 | //! Haskell][applicative-functors]). It does this through alterations to the 7 | //! syntactic structure of the [core language][crate::lang::core], while 8 | //! avoiding making many significant changes to the type structure which 9 | //! would make type preservation more challenging. 10 | //! 11 | //! The main inspiration for this language is William Bowman's dissertation, 12 | //! [Compiling with Dependent Types][wjb-dissertation]. 13 | //! 14 | //! Note: the 'A' in 'A-Normal Form' does not stand for anything, at least 15 | //! [according to one of the original authors, Matthias Felleisen][just-a]. 16 | //! I really wish there was a better name for this language. 17 | //! 18 | //! [applicative-functors]: https://wiki.haskell.org/Applicative_functor 19 | //! [wjb-dissertation]: https://www.williamjbowman.com/resources/wjb-dissertation.pdf 20 | //! [just-a]: https://vimeo.com/387739817 21 | 22 | pub use crate::lang::core::{Constant, VarIndex}; 23 | 24 | /// Values are terms that do not reduce. 25 | pub enum Value { 26 | /// Global variables. 27 | Global(String), 28 | /// Variables. 29 | Var(VarIndex), 30 | 31 | /// Annotated values 32 | Ann(Box, Box), 33 | 34 | /// The type of types. 35 | TypeType, 36 | 37 | /// Function types. 38 | /// 39 | /// Also known as: pi type, dependent product type. 40 | FunctionType(Option, Box, Box), 41 | /// Function terms. 42 | /// 43 | /// Also known as: lambda abstraction, anonymous function. 44 | FunctionTerm(String, Box), 45 | 46 | /// Record types. 47 | RecordType(Vec<(String, Box)>), 48 | /// Record terms. 49 | RecordTerm(Vec<(String, Box)>), 50 | 51 | /// Constants. 52 | Constant(Constant), 53 | 54 | /// Error sentinel. 55 | Error, 56 | } 57 | 58 | impl From for Value { 59 | fn from(constant: Constant) -> Value { 60 | Value::Constant(constant) 61 | } 62 | } 63 | 64 | /// Computations eliminate values. 65 | pub enum Computation { 66 | /// Values. 67 | Value(Box), 68 | /// Function eliminations. 69 | /// 70 | /// Also known as: function application. 71 | FunctionElim(Box, Box), 72 | /// Record eliminations. 73 | /// 74 | /// Also known as: record projection, field lookup. 75 | RecordElim(Box, String), 76 | } 77 | 78 | /// Programs that are ready to be executed. 79 | pub struct Configuration { 80 | /// A list of computations to be used when we execute this program. 81 | pub bindings: Vec, 82 | /// The final output of the program. 83 | pub output: Computation, 84 | } 85 | -------------------------------------------------------------------------------- /pikelet/src/lang/cc.rs: -------------------------------------------------------------------------------- 1 | //! The closure converted language, with types preserved. 2 | //! 3 | //! This language makes an explicit distinction between the _use_ of a 4 | //! computation and the _definition_ of a computation. In doing this we make 5 | //! implicit environment capture explicit through the use of an explicit 6 | //! closure object, which holds the closed code and the local environment. 7 | //! 8 | //! The main inspiration for this language is William Bowman's dissertation, 9 | //! [Compiling with Dependent Types][wjb-dissertation]. 10 | //! 11 | //! [wjb-dissertation](https://www.williamjbowman.com/resources/wjb-dissertation.pdf) 12 | 13 | // TODO: Define syntax 14 | -------------------------------------------------------------------------------- /pikelet/src/lang/core.rs: -------------------------------------------------------------------------------- 1 | //! The core language. 2 | //! 3 | //! This is not intended to be used directly by users of the programming 4 | //! language. 5 | 6 | use fxhash::FxHashMap; 7 | use std::fmt; 8 | use std::sync::Arc; 9 | 10 | use crate::lang::Located; 11 | 12 | pub mod marshall; 13 | pub mod semantics; 14 | pub mod typing; 15 | 16 | /// Constants used in the core language. 17 | // FIXME: Partial eq for floating point numbers 18 | #[derive(Clone, Debug, PartialEq)] 19 | pub enum Constant { 20 | /// 8-bit unsigned integers. 21 | U8(u8), 22 | /// 16-bit unsigned integers. 23 | U16(u16), 24 | /// 32-bit unsigned integers. 25 | U32(u32), 26 | /// 64-bit unsigned integers. 27 | U64(u64), 28 | /// 8-bit signed [two's complement] integers. 29 | /// 30 | /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement 31 | S8(i8), 32 | /// 16-bit signed [two's complement] integers. 33 | /// 34 | /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement 35 | S16(i16), 36 | /// 32-bit signed [two's complement] integers. 37 | /// 38 | /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement 39 | S32(i32), 40 | /// 64-bit signed [two's complement] integers. 41 | /// 42 | /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement 43 | S64(i64), 44 | /// 32-bit [IEEE-754] floating point numbers. 45 | /// 46 | /// [IEEE-754]: https://en.wikipedia.org/wiki/IEEE_754 47 | F32(f32), 48 | /// 64-bit [IEEE-754] floating point numbers. 49 | /// 50 | /// [IEEE-754]: https://en.wikipedia.org/wiki/IEEE_754 51 | F64(f64), 52 | /// [Unicode scalar values](http://www.unicode.org/glossary/#unicode_scalar_value). 53 | Char(char), 54 | /// [UTF-8] encoded strings. 55 | /// 56 | /// [UTF-8]: http://www.unicode.org/glossary/#UTF_8 57 | String(String), 58 | } 59 | 60 | pub type Term = Located; 61 | 62 | /// Terms in the core language. 63 | #[derive(Clone, Debug)] 64 | pub enum TermData { 65 | /// Global variables. 66 | Global(String), 67 | /// Local variables. 68 | Var(VarIndex), 69 | 70 | /// Annotated terms 71 | Ann(Arc, Arc), 72 | 73 | /// The type of types. 74 | TypeType, 75 | 76 | /// Function types. 77 | /// 78 | /// Also known as: pi type, dependent product type. 79 | FunctionType(Option, Arc, Arc), 80 | /// Function terms. 81 | /// 82 | /// Also known as: lambda abstraction, anonymous function. 83 | FunctionTerm(String, Arc), 84 | /// Function eliminations. 85 | /// 86 | /// Also known as: function application. 87 | FunctionElim(Arc, Arc), 88 | 89 | /// Record types. 90 | RecordType(Arc<[String]>, Arc<[Arc]>), 91 | /// Record terms. 92 | RecordTerm(Arc<[String]>, Arc<[Arc]>), 93 | /// Record eliminations. 94 | /// 95 | /// Also known as: record projection, field lookup. 96 | RecordElim(Arc, String), 97 | 98 | /// Array terms. 99 | ArrayTerm(Vec>), 100 | /// List terms. 101 | ListTerm(Vec>), 102 | 103 | /// Constants. 104 | Constant(Constant), 105 | 106 | /// Error sentinel. 107 | Error, 108 | } 109 | 110 | impl From for TermData { 111 | fn from(constant: Constant) -> TermData { 112 | TermData::Constant(constant) 113 | } 114 | } 115 | 116 | /// An environment of global definitions. 117 | pub struct Globals { 118 | entries: FxHashMap, Option>)>, 119 | } 120 | 121 | impl Globals { 122 | pub fn new(entries: FxHashMap, Option>)>) -> Globals { 123 | Globals { entries } 124 | } 125 | 126 | pub fn get(&self, name: &str) -> Option<&(Arc, Option>)> { 127 | self.entries.get(name) 128 | } 129 | 130 | pub fn entries(&self) -> impl Iterator, Option>))> { 131 | self.entries.iter() 132 | } 133 | } 134 | 135 | impl Default for Globals { 136 | fn default() -> Globals { 137 | let mut entries = FxHashMap::default(); 138 | 139 | let global = |name: &str| Arc::new(Term::generated(TermData::Global(name.to_owned()))); 140 | let type_type = || Arc::new(Term::generated(TermData::TypeType)); 141 | let function_type = |input_type, output_type| { 142 | Arc::new(Term::generated(TermData::FunctionType( 143 | None, 144 | input_type, 145 | output_type, 146 | ))) 147 | }; 148 | 149 | entries.insert("Type".to_owned(), (type_type(), Some(type_type()))); 150 | entries.insert("Bool".to_owned(), (global("Type"), None)); 151 | entries.insert("U8".to_owned(), (global("Type"), None)); 152 | entries.insert("U16".to_owned(), (global("Type"), None)); 153 | entries.insert("U32".to_owned(), (global("Type"), None)); 154 | entries.insert("U64".to_owned(), (global("Type"), None)); 155 | entries.insert("S8".to_owned(), (global("Type"), None)); 156 | entries.insert("S16".to_owned(), (global("Type"), None)); 157 | entries.insert("S32".to_owned(), (global("Type"), None)); 158 | entries.insert("S64".to_owned(), (global("Type"), None)); 159 | entries.insert("F32".to_owned(), (global("Type"), None)); 160 | entries.insert("F64".to_owned(), (global("Type"), None)); 161 | entries.insert("Char".to_owned(), (global("Type"), None)); 162 | entries.insert("String".to_owned(), (global("Type"), None)); 163 | entries.insert("true".to_owned(), (global("Bool"), None)); 164 | entries.insert("false".to_owned(), (global("Bool"), None)); 165 | entries.insert( 166 | "Array".to_owned(), 167 | ( 168 | function_type(global("U32"), function_type(type_type(), type_type())), 169 | None, 170 | ), 171 | ); 172 | entries.insert( 173 | "List".to_owned(), 174 | (function_type(type_type(), type_type()), None), 175 | ); 176 | 177 | Globals::new(entries) 178 | } 179 | } 180 | 181 | /// A [de Bruijn index][de-bruijn-index] in the current [environment]. 182 | /// 183 | /// De Bruijn indices describe an occurrence of a variable in terms of the 184 | /// number of binders between the occurrence and its associated binder. 185 | /// For example: 186 | /// 187 | /// | Representation | Example (S combinator) | 188 | /// | ----------------- | ----------------------- | 189 | /// | Named | `λx. λy. λz. x z (y z)` | 190 | /// | De Bruijn indices | `λ_. λ_. λ_. 2 0 (1 0)` | 191 | /// 192 | /// This is a helpful representation because it allows us to easily compare 193 | /// terms for equivalence based on their binding structure without maintaining a 194 | /// list of name substitutions. For example we want `λx. x` to be the same as 195 | /// `λy. y`. With de Bruijn indices these would both be described as `λ 0`. 196 | /// 197 | /// [environment]: `Env` 198 | /// [de-bruijn-index]: https://en.wikipedia.org/wiki/De_Bruijn_index 199 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 200 | pub struct VarIndex(u32); 201 | 202 | impl VarIndex { 203 | /// Convert the variable index to a `usize`. 204 | pub fn to_usize(self) -> usize { 205 | self.0 as usize 206 | } 207 | } 208 | 209 | /// An infinite iterator of variable indices. 210 | pub fn var_indices() -> impl Iterator { 211 | (0..).map(VarIndex) 212 | } 213 | 214 | /// A de Bruijn level in the current [environment]. 215 | /// 216 | /// This describes an occurrence of a variable by counting the binders inwards 217 | /// from the top of the term until the occurrence is reached. For example: 218 | /// 219 | /// | Representation | Example (S combinator) | 220 | /// | ----------------- | ----------------------- | 221 | /// | Named | `λx. λy. λz. x z (y z)` | 222 | /// | De Bruijn levels | `λ_. λ_. λ_. 0 2 (1 2)` | 223 | /// 224 | /// Levels are used in [values][semantics::Value] because they are not context- 225 | /// dependent (this is in contrast to [indices][LocalIndex]). Because of this, 226 | /// we're able to sidestep the need for expensive variable shifting in the 227 | /// semantics. More information can be found in Soham Chowdhury's blog post, 228 | /// “[Real-world type theory I: untyped normalisation by evaluation for λ-calculus][untyped-nbe-for-lc]”. 229 | /// 230 | /// [environment]: `Env` 231 | /// [untyped-nbe-for-lc]: https://colimit.net/posts/normalisation-by-evaluation/ 232 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 233 | pub struct VarLevel(u32); 234 | 235 | impl VarLevel { 236 | /// Convert the variable level to a `usize`. 237 | pub fn to_usize(self) -> usize { 238 | self.0 as usize 239 | } 240 | } 241 | 242 | /// The number of entries in a [environment]. 243 | /// 244 | /// This is used for [index-to-level] and [level-to-index] conversions. 245 | /// 246 | /// Rather than using the actual environment in [read-back] and [conversion 247 | /// checking], it is more efficient to simply increment this count. This could 248 | /// be thought of as an 'erased environment' where the only thing we care about 249 | /// is how many entries are contained within it. 250 | /// 251 | /// [environment]: `Env` 252 | /// [index-to-level]: `EnvSize::index_to_level` 253 | /// [level-to-index]: `EnvSize::level_to_index` 254 | /// [readback]: `semantics::read_back` 255 | /// [conversion checking]: `semantics::is_equal` 256 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 257 | pub struct EnvSize(u32); 258 | 259 | impl EnvSize { 260 | /// Convert the size to a `usize`. 261 | pub fn to_usize(self) -> usize { 262 | self.0 as usize 263 | } 264 | 265 | /// Get the next size in the environment. 266 | pub fn next_size(self) -> EnvSize { 267 | EnvSize(self.0 + 1) 268 | } 269 | 270 | /// Return the level of the next variable to be added to the environment. 271 | pub fn next_level(self) -> VarLevel { 272 | VarLevel(self.0) 273 | } 274 | 275 | /// Convert a variable index to a variable level in the current environment. 276 | /// 277 | /// `None` is returned if the environment is not large enough to 278 | /// contain the variable. 279 | pub fn index_to_level(self, index: VarIndex) -> Option { 280 | Some(VarLevel(self.0.checked_sub(index.0)?.checked_sub(1)?)) 281 | } 282 | 283 | /// Convert a variable level to a variable index in the current environment. 284 | /// 285 | /// `None` is returned if the environment is not large enough to 286 | /// contain the variable. 287 | pub fn level_to_index(self, level: VarLevel) -> Option { 288 | Some(VarIndex(self.0.checked_sub(level.0)?.checked_sub(1)?)) 289 | } 290 | } 291 | 292 | /// An environment, backed by a persistent vector. 293 | /// 294 | /// Prefer mutating this in place, but if necessary this can be cloned in order 295 | /// to maintain a degree of sharing between copies. 296 | #[derive(Clone)] 297 | pub struct Env { 298 | /// The entries that are currently defined in the environment. 299 | entries: im::Vector, 300 | } 301 | 302 | impl Env { 303 | /// Create a new environment. 304 | pub fn new() -> Env { 305 | Env { 306 | entries: im::Vector::new(), 307 | } 308 | } 309 | 310 | /// Get the size of the environment. 311 | pub fn size(&self) -> EnvSize { 312 | EnvSize(self.entries.len() as u32) 313 | } 314 | 315 | /// Convert a variable index to a variable level in the current environment. 316 | /// 317 | /// `None` is returned if the environment is not large enough to 318 | /// contain the variable. 319 | pub fn index_to_level(&self, index: VarIndex) -> Option { 320 | self.size().index_to_level(index) 321 | } 322 | 323 | /// Lookup an entry in the environment. 324 | pub fn get(&self, index: VarIndex) -> Option<&Entry> { 325 | let level = self.index_to_level(index)?; 326 | self.entries.get(level.0 as usize) 327 | } 328 | 329 | /// Push an entry onto the environment. 330 | pub fn push(&mut self, entry: Entry) { 331 | self.entries.push_back(entry); // FIXME: Check for `u32` overflow? 332 | } 333 | 334 | /// Pop an entry off the environment. 335 | pub fn pop(&mut self) -> Option { 336 | self.entries.pop_back() 337 | } 338 | 339 | /// Truncate the environment to the given environment size. 340 | pub fn truncate(&mut self, env_size: EnvSize) { 341 | self.entries.truncate(env_size.to_usize()); 342 | } 343 | 344 | /// Clear the entries from the environment. 345 | pub fn clear(&mut self) { 346 | self.entries.clear(); 347 | } 348 | } 349 | 350 | impl fmt::Debug for Env { 351 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 352 | f.debug_struct("Locals") 353 | .field("entries", &self.entries) 354 | .finish() 355 | } 356 | } 357 | -------------------------------------------------------------------------------- /pikelet/src/lang/core/marshall.rs: -------------------------------------------------------------------------------- 1 | //! Marshalling API between Rust types and Pikelet's core language. 2 | 3 | use std::sync::Arc; 4 | 5 | use crate::lang::core::{Constant, Term, TermData}; 6 | 7 | pub trait HasType { 8 | fn r#type() -> Arc; 9 | } 10 | 11 | macro_rules! impl_has_type { 12 | ($Self:ty, $term:expr) => { 13 | impl HasType for $Self { 14 | fn r#type() -> Arc { 15 | Arc::new($term) 16 | } 17 | } 18 | }; 19 | } 20 | 21 | impl_has_type!(bool, Term::generated(TermData::Global("Bool".to_owned()))); 22 | impl_has_type!(u8, Term::generated(TermData::Global("U8".to_owned()))); 23 | impl_has_type!(u16, Term::generated(TermData::Global("U16".to_owned()))); 24 | impl_has_type!(u32, Term::generated(TermData::Global("U32".to_owned()))); 25 | impl_has_type!(u64, Term::generated(TermData::Global("U64".to_owned()))); 26 | impl_has_type!(i8, Term::generated(TermData::Global("S8".to_owned()))); 27 | impl_has_type!(i16, Term::generated(TermData::Global("S16".to_owned()))); 28 | impl_has_type!(i32, Term::generated(TermData::Global("S32".to_owned()))); 29 | impl_has_type!(i64, Term::generated(TermData::Global("S64".to_owned()))); 30 | impl_has_type!(f32, Term::generated(TermData::Global("F32".to_owned()))); 31 | impl_has_type!(f64, Term::generated(TermData::Global("F64".to_owned()))); 32 | impl_has_type!(char, Term::generated(TermData::Global("Char".to_owned()))); 33 | impl_has_type!( 34 | String, 35 | Term::generated(TermData::Global("String".to_owned())) 36 | ); 37 | impl_has_type!(str, Term::generated(TermData::Global("String".to_owned()))); 38 | 39 | impl HasType for Vec { 40 | fn r#type() -> Arc { 41 | Arc::new(Term::generated(TermData::FunctionElim( 42 | Arc::new(Term::generated(TermData::Global("List".to_owned()))), 43 | T::r#type(), 44 | ))) 45 | } 46 | } 47 | 48 | impl HasType for [T; LEN] { 49 | fn r#type() -> Arc { 50 | Arc::new(Term::generated(TermData::FunctionElim( 51 | Arc::new(Term::generated(TermData::FunctionElim( 52 | Arc::new(Term::generated(TermData::Global("List".to_owned()))), 53 | Arc::new(Term::generated(TermData::from(Constant::U32(LEN as u32)))), // FIXME: this could overflow! 54 | ))), 55 | T::r#type(), 56 | ))) 57 | } 58 | } 59 | 60 | /// Attempt to deserialize something from a `Term`. 61 | /// 62 | /// # Laws 63 | /// 64 | /// ```skipped 65 | /// check_type(&term, &Self::r#type()) && Self::try_from_term(term).is_ok() 66 | /// ``` 67 | // TODO: Make more efficient with visitors 68 | pub trait TryFromTerm: HasType + Sized { 69 | type Error: Sized; 70 | fn try_from_term(term: &Term) -> Result; 71 | } 72 | 73 | macro_rules! impl_try_from_term { 74 | ($Self:ty, |$p:pat| $term:expr) => { 75 | impl TryFromTerm for $Self { 76 | type Error = (); 77 | 78 | fn try_from_term(term: &Term) -> Result<$Self, ()> { 79 | match &term.data { 80 | $p => $term, 81 | _ => Err(()), 82 | } 83 | } 84 | } 85 | }; 86 | } 87 | 88 | impl_try_from_term!(bool, |TermData::Global(name)| match name.as_str() { 89 | "true" => Ok(true), 90 | "false" => Ok(false), 91 | _ => Err(()), 92 | }); 93 | impl_try_from_term!(u8, |TermData::Constant(Constant::U8(value))| Ok(*value)); 94 | impl_try_from_term!(u16, |TermData::Constant(Constant::U16(value))| Ok(*value)); 95 | impl_try_from_term!(u32, |TermData::Constant(Constant::U32(value))| Ok(*value)); 96 | impl_try_from_term!(u64, |TermData::Constant(Constant::U64(value))| Ok(*value)); 97 | impl_try_from_term!(i8, |TermData::Constant(Constant::S8(value))| Ok(*value)); 98 | impl_try_from_term!(i16, |TermData::Constant(Constant::S16(value))| Ok(*value)); 99 | impl_try_from_term!(i32, |TermData::Constant(Constant::S32(value))| Ok(*value)); 100 | impl_try_from_term!(i64, |TermData::Constant(Constant::S64(value))| Ok(*value)); 101 | impl_try_from_term!(f32, |TermData::Constant(Constant::F32(value))| Ok(*value)); 102 | impl_try_from_term!(f64, |TermData::Constant(Constant::F64(value))| Ok(*value)); 103 | impl_try_from_term!(char, |TermData::Constant(Constant::Char(value))| Ok(*value)); 104 | impl_try_from_term!(String, |TermData::Constant(Constant::String(value))| Ok( 105 | value.clone(), 106 | )); 107 | 108 | impl TryFromTerm for Vec { 109 | type Error = (); 110 | 111 | fn try_from_term(term: &Term) -> Result, ()> { 112 | match &term.data { 113 | TermData::ListTerm(entry_terms) => entry_terms 114 | .iter() 115 | .map(|entry_term| T::try_from_term(entry_term).map_err(|_| ())) 116 | .collect::, ()>>(), 117 | _ => Err(()), 118 | } 119 | } 120 | } 121 | 122 | impl TryFromTerm for [T; LEN] { 123 | type Error = (); 124 | 125 | fn try_from_term(term: &Term) -> Result<[T; LEN], ()> { 126 | match &term.data { 127 | TermData::ArrayTerm(entry_terms) if entry_terms.len() == LEN => { 128 | use std::mem::MaybeUninit; 129 | 130 | let mut entries: [MaybeUninit; LEN] = 131 | unsafe { MaybeUninit::uninit().assume_init() }; 132 | for (i, entry_term) in entry_terms.iter().enumerate() { 133 | entries[i] = MaybeUninit::new(T::try_from_term(entry_term).map_err(|_| ())?); 134 | } 135 | 136 | // NOTE: We'd prefer to do the following: 137 | // 138 | // ``` 139 | // Ok(unsafe { std::mem::transmute::<_, [T; LEN]>(entries) }) 140 | // ``` 141 | // 142 | // Sadly we run into the following issue: https://github.com/rust-lang/rust/issues/61956 143 | // For this reason we need to do the following (hideous) workaround: 144 | 145 | let ptr = &mut entries as *mut _ as *mut [T; LEN]; 146 | let result = unsafe { ptr.read() }; 147 | core::mem::forget(entries); 148 | Ok(result) 149 | } 150 | _ => Err(()), 151 | } 152 | } 153 | } 154 | 155 | /// Serialize something to a `Term`. 156 | /// 157 | /// # Laws 158 | /// 159 | /// ```skipped 160 | /// check_type(&Self::to_term(&value), &Self::r#type()) == true 161 | /// ``` 162 | // TODO: Make more efficient with visitors 163 | pub trait ToTerm: HasType { 164 | fn to_term(&self) -> Term; 165 | } 166 | 167 | macro_rules! impl_to_term { 168 | ($Self:ty, |$p:pat| $term_data:expr) => { 169 | impl ToTerm for $Self { 170 | fn to_term(&self) -> Term { 171 | let $p = self; 172 | Term::generated($term_data) 173 | } 174 | } 175 | }; 176 | } 177 | 178 | impl_to_term!(bool, |value| match value { 179 | true => TermData::Global("true".to_owned()), 180 | false => TermData::Global("false".to_owned()), 181 | }); 182 | impl_to_term!(u8, |value| TermData::from(Constant::U8(*value))); 183 | impl_to_term!(u16, |value| TermData::from(Constant::U16(*value))); 184 | impl_to_term!(u32, |value| TermData::from(Constant::U32(*value))); 185 | impl_to_term!(u64, |value| TermData::from(Constant::U64(*value))); 186 | impl_to_term!(i8, |value| TermData::from(Constant::S8(*value))); 187 | impl_to_term!(i16, |value| TermData::from(Constant::S16(*value))); 188 | impl_to_term!(i32, |value| TermData::from(Constant::S32(*value))); 189 | impl_to_term!(i64, |value| TermData::from(Constant::S64(*value))); 190 | impl_to_term!(f32, |value| TermData::from(Constant::F32(*value))); 191 | impl_to_term!(f64, |value| TermData::from(Constant::F64(*value))); 192 | impl_to_term!(char, |value| TermData::from(Constant::Char(*value))); 193 | impl_to_term!(String, |value| TermData::from(Constant::String( 194 | value.clone() 195 | ))); 196 | impl_to_term!(str, |value| TermData::from(Constant::String( 197 | value.to_owned() 198 | ))); 199 | 200 | impl ToTerm for Vec { 201 | fn to_term(&self) -> Term { 202 | Term::generated(TermData::ListTerm( 203 | self.iter().map(T::to_term).map(Arc::new).collect(), 204 | )) 205 | } 206 | } 207 | 208 | impl ToTerm for [T; LEN] { 209 | fn to_term(&self) -> Term { 210 | Term::generated(TermData::ArrayTerm( 211 | self.iter().map(T::to_term).map(Arc::new).collect(), 212 | )) 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /pikelet/src/lang/core/typing.rs: -------------------------------------------------------------------------------- 1 | //! Bidirectional type checker for the [core language]. 2 | //! 3 | //! This is a simpler implementation of type checking than the one found in 4 | //! [pass::surface_to_core], because it only needs to check the (much simpler) 5 | //! core language, and doesn't need to perform any additional elaboration. 6 | //! We can use it as a way to validate that elaborated terms are well-formed 7 | //! for debugging and development purposes. 8 | //! 9 | //! [core language]: crate::lang::core 10 | //! [`pass::surface_to_core`]: crate::pass::surface_to_core 11 | 12 | use contracts::debug_ensures; 13 | use crossbeam_channel::Sender; 14 | use std::sync::Arc; 15 | 16 | use crate::lang::core::semantics::{self, Elim, Unfold, Value}; 17 | use crate::lang::core::{Constant, Env, EnvSize, Globals, Term, TermData, VarIndex}; 18 | use crate::reporting::{AmbiguousTerm, CoreTypingMessage, ExpectedType, Message}; 19 | 20 | /// Type checking context. 21 | pub struct Context<'globals> { 22 | /// Global definition environment. 23 | globals: &'globals Globals, 24 | /// Type environment (used for getting the types of variables). 25 | types: Vec>, 26 | /// Value environment (used for evaluation). 27 | values: Env>, 28 | /// The diagnostic messages accumulated during type checking. 29 | message_tx: Sender, 30 | } 31 | 32 | impl<'globals> Context<'globals> { 33 | /// Construct a new type checker state. 34 | pub fn new(globals: &'globals Globals, message_tx: Sender) -> Context<'globals> { 35 | Context { 36 | globals, 37 | types: Vec::new(), 38 | values: Env::new(), 39 | message_tx, 40 | } 41 | } 42 | 43 | /// Get the number of values in the context. 44 | fn size(&self) -> EnvSize { 45 | self.values.size() 46 | } 47 | 48 | /// Get the type of a variable. 49 | fn get_type(&self, index: VarIndex) -> Option<&Arc> { 50 | let level = self.size().index_to_level(index)?; 51 | self.types.get(level.to_usize()) 52 | } 53 | 54 | /// Push a new definition onto the context, along with its type annotation. 55 | fn push_definition(&mut self, value: Arc, r#type: Arc) { 56 | self.types.push(r#type); 57 | self.values.push(value); 58 | } 59 | 60 | /// Push a variable onto the context. 61 | fn push_variable(&mut self, r#type: Arc) -> Arc { 62 | let value = Arc::new(Value::var(self.size().next_level(), [])); 63 | self.push_definition(value.clone(), r#type); 64 | value 65 | } 66 | 67 | /// Pop a scope off the context. 68 | fn pop_scope(&mut self) { 69 | self.types.pop(); 70 | self.values.pop(); 71 | } 72 | 73 | /// Truncate the scopes in the context to the given size. 74 | fn truncate_scopes(&mut self, env_size: EnvSize) { 75 | self.types.truncate(env_size.to_usize()); 76 | self.values.truncate(env_size); 77 | } 78 | 79 | /// Report a diagnostic message. 80 | fn report(&self, message: CoreTypingMessage) { 81 | self.message_tx.send(message.into()).unwrap(); 82 | } 83 | 84 | /// Evaluate a [`Term`] into a [`Value`]. 85 | /// 86 | /// [`Value`]: crate::lang::core::semantics::Value 87 | /// [`Term`]: crate::lang::core::Term 88 | pub fn eval(&mut self, term: &Term) -> Arc { 89 | semantics::eval(self.globals, &mut self.values, term) 90 | } 91 | 92 | /// Return the type of the record elimination. 93 | pub fn record_elim_type( 94 | &mut self, 95 | head_term: &Term, 96 | head_type: &Arc, 97 | label: &str, 98 | ) -> Option> { 99 | semantics::record_elim_type(self.globals, &mut self.values, head_term, head_type, label) 100 | } 101 | 102 | /// Read back a value into a normal form using the current state of the elaborator. 103 | pub fn read_back(&self, value: &Value) -> Term { 104 | semantics::read_back(self.globals, self.size(), Unfold::Never, value) 105 | } 106 | 107 | /// Check that one [`Value`] is computationally equal to another [`Value`]. 108 | /// 109 | /// Returns `false` if either value is not a type. 110 | /// 111 | /// [`Value`]: crate::lang::core::semantics::Value 112 | pub fn is_equal(&self, value0: &Value, value1: &Value) -> bool { 113 | semantics::is_equal(self.globals, self.size(), value0, value1) 114 | } 115 | 116 | /// Check that a term is a type. 117 | #[debug_ensures(self.types.len() == old(self.types.len()))] 118 | #[debug_ensures(self.values.size() == old(self.values.size()))] 119 | pub fn is_type(&mut self, term: &Term) -> bool { 120 | let r#type = self.synth_type(term); 121 | match r#type.force(self.globals) { 122 | Value::TypeType => true, 123 | Value::Error => false, 124 | _ => { 125 | self.report(CoreTypingMessage::MismatchedTypes { 126 | found_type: self.read_back(&r#type), 127 | expected_type: ExpectedType::Universe, 128 | }); 129 | false 130 | } 131 | } 132 | } 133 | 134 | /// Check that a term is an element of a type. 135 | #[debug_ensures(self.types.len() == old(self.types.len()))] 136 | #[debug_ensures(self.values.size() == old(self.values.size()))] 137 | pub fn check_type(&mut self, term: &Term, expected_type: &Arc) { 138 | match (&term.data, expected_type.force(self.globals)) { 139 | (_, Value::Error) => {} 140 | 141 | ( 142 | TermData::FunctionTerm(_, output_term), 143 | Value::FunctionType(_, input_type, output_closure), 144 | ) => { 145 | let input_term = self.push_variable(input_type.clone()); 146 | let output_type = output_closure.apply(self.globals, input_term); 147 | self.check_type(output_term, &output_type); 148 | self.pop_scope(); 149 | } 150 | (TermData::FunctionTerm(_, _), _) => { 151 | self.report(CoreTypingMessage::TooManyInputsInFunctionTerm); 152 | } 153 | 154 | (TermData::RecordTerm(term_labels, terms), Value::RecordType(type_labels, closure)) => { 155 | if term_labels.len() != terms.len() { 156 | self.report(CoreTypingMessage::InvalidRecordTermLabelCount); 157 | return; 158 | } 159 | if term_labels != type_labels { 160 | self.report(CoreTypingMessage::UnexpectedRecordTermLabels { 161 | found_labels: term_labels.clone(), 162 | expected_labels: type_labels.clone(), 163 | }); 164 | return; 165 | } 166 | 167 | let initial_size = self.size(); 168 | let mut pending_terms = terms.iter(); 169 | 170 | closure.for_each_entry(self.globals, |r#type| match pending_terms.next() { 171 | Some(term) => { 172 | self.check_type(&term, &r#type); 173 | let value = self.eval(&term); 174 | self.push_definition(value.clone(), r#type); 175 | value 176 | } 177 | None => Arc::new(Value::Error), 178 | }); 179 | 180 | self.truncate_scopes(initial_size); 181 | } 182 | 183 | (TermData::ArrayTerm(entry_terms), forced_type) => match forced_type.try_global() { 184 | Some(("Array", [Elim::Function(len), Elim::Function(entry_type)])) => { 185 | let forced_entry_type = entry_type.force(self.globals); 186 | for entry_term in entry_terms { 187 | self.check_type(entry_term, forced_entry_type); 188 | } 189 | 190 | match len.force(self.globals).as_ref() { 191 | Value::Constant(Constant::U32(len)) 192 | if *len as usize == entry_terms.len() => {} 193 | _ => { 194 | self.report(CoreTypingMessage::MismatchedTypes { 195 | expected_type: ExpectedType::Type(self.read_back(expected_type)), 196 | found_type: self.read_back(&Value::global( 197 | "Array", 198 | [ 199 | Elim::Function(len.clone()), 200 | Elim::Function(entry_type.clone()), 201 | ], 202 | )), 203 | }); 204 | } 205 | } 206 | } 207 | Some(_) | None => self.report(CoreTypingMessage::UnexpectedArrayTerm { 208 | expected_type: self.read_back(expected_type), 209 | }), 210 | }, 211 | (TermData::ListTerm(entry_terms), forced_type) => match forced_type.try_global() { 212 | Some(("List", [Elim::Function(entry_type)])) => { 213 | let forced_entry_type = entry_type.force(self.globals); 214 | for entry_term in entry_terms { 215 | self.check_type(entry_term, forced_entry_type); 216 | } 217 | } 218 | Some(_) | None => self.report(CoreTypingMessage::UnexpectedListTerm { 219 | expected_type: self.read_back(expected_type), 220 | }), 221 | }, 222 | 223 | (_, _) => match self.synth_type(term) { 224 | found_type if self.is_equal(&found_type, expected_type) => {} 225 | found_type => self.report(CoreTypingMessage::MismatchedTypes { 226 | found_type: self.read_back(&found_type), 227 | expected_type: ExpectedType::Type(self.read_back(expected_type)), 228 | }), 229 | }, 230 | } 231 | } 232 | 233 | /// Synthesize the type of a term. 234 | #[debug_ensures(self.types.len() == old(self.types.len()))] 235 | #[debug_ensures(self.values.size() == old(self.values.size()))] 236 | pub fn synth_type(&mut self, term: &Term) -> Arc { 237 | match &term.data { 238 | TermData::Global(name) => match self.globals.get(name) { 239 | Some((r#type, _)) => self.eval(r#type), 240 | None => { 241 | self.report(CoreTypingMessage::UnboundGlobal { 242 | name: name.to_owned(), 243 | }); 244 | Arc::new(Value::Error) 245 | } 246 | }, 247 | TermData::Var(index) => match self.get_type(*index) { 248 | Some(r#type) => r#type.clone(), 249 | None => { 250 | self.report(CoreTypingMessage::UnboundVar); 251 | Arc::new(Value::Error) 252 | } 253 | }, 254 | 255 | TermData::Ann(term, r#type) => { 256 | if !self.is_type(r#type) { 257 | return Arc::new(Value::Error); 258 | } 259 | let r#type = self.eval(r#type); 260 | self.check_type(term, &r#type); 261 | r#type 262 | } 263 | 264 | TermData::TypeType => Arc::new(Value::TypeType), 265 | 266 | TermData::FunctionType(_, input_type, output_type) => { 267 | if !self.is_type(input_type) { 268 | return Arc::new(Value::Error); 269 | } 270 | let input_type = self.eval(input_type); 271 | 272 | self.push_variable(input_type); 273 | if !self.is_type(output_type) { 274 | self.pop_scope(); 275 | return Arc::new(Value::Error); 276 | } 277 | self.pop_scope(); 278 | Arc::new(Value::TypeType) 279 | } 280 | TermData::FunctionTerm(_, _) => { 281 | self.report(CoreTypingMessage::AmbiguousTerm { 282 | term: AmbiguousTerm::FunctionTerm, 283 | }); 284 | Arc::new(Value::Error) 285 | } 286 | TermData::FunctionElim(head_term, input_term) => { 287 | let head_type = self.synth_type(head_term); 288 | match head_type.force(self.globals) { 289 | Value::FunctionType(_, input_type, output_closure) => { 290 | self.check_type(input_term, &input_type); 291 | let input_value = self.eval(input_term); 292 | output_closure.apply(self.globals, input_value) 293 | } 294 | Value::Error => Arc::new(Value::Error), 295 | _ => { 296 | self.report(CoreTypingMessage::TooManyInputsInFunctionElim { 297 | head_type: self.read_back(&head_type), 298 | }); 299 | Arc::new(Value::Error) 300 | } 301 | } 302 | } 303 | 304 | TermData::RecordTerm(_, _) => { 305 | self.report(CoreTypingMessage::AmbiguousTerm { 306 | term: AmbiguousTerm::RecordTerm, 307 | }); 308 | Arc::new(Value::Error) 309 | } 310 | TermData::RecordType(labels, types) => { 311 | use std::collections::BTreeSet; 312 | 313 | if labels.len() != types.len() { 314 | self.report(CoreTypingMessage::InvalidRecordTypeLabelCount); 315 | return Arc::new(Value::Error); 316 | } 317 | 318 | let initial_size = self.size(); 319 | let mut duplicate_labels = Vec::new(); 320 | let mut seen_labels = BTreeSet::new(); 321 | 322 | for (name, r#type) in Iterator::zip(labels.iter(), types.iter()) { 323 | if !seen_labels.insert(name) { 324 | duplicate_labels.push(name.clone()); 325 | } 326 | if !self.is_type(r#type) { 327 | self.truncate_scopes(initial_size); 328 | return Arc::new(Value::Error); 329 | } 330 | let r#type = self.eval(r#type); 331 | self.push_variable(r#type); 332 | } 333 | 334 | self.truncate_scopes(initial_size); 335 | 336 | if !duplicate_labels.is_empty() { 337 | self.report(CoreTypingMessage::InvalidRecordType { duplicate_labels }); 338 | } 339 | 340 | Arc::new(Value::TypeType) 341 | } 342 | TermData::RecordElim(head_term, label) => { 343 | let head_type = self.synth_type(head_term); 344 | 345 | match self.record_elim_type(&head_term, &head_type, label) { 346 | Some(entry_type) => entry_type, 347 | None => { 348 | self.report(CoreTypingMessage::LabelNotFound { 349 | expected_label: label.clone(), 350 | head_type: self.read_back(&head_type), 351 | }); 352 | Arc::new(Value::Error) 353 | } 354 | } 355 | } 356 | 357 | TermData::ArrayTerm(_) => { 358 | self.report(CoreTypingMessage::AmbiguousTerm { 359 | term: AmbiguousTerm::Sequence, 360 | }); 361 | Arc::new(Value::Error) 362 | } 363 | TermData::ListTerm(_) => { 364 | self.report(CoreTypingMessage::AmbiguousTerm { 365 | term: AmbiguousTerm::Sequence, 366 | }); 367 | Arc::new(Value::Error) 368 | } 369 | 370 | TermData::Constant(Constant::U8(_)) => Arc::new(Value::global("U8", [])), 371 | TermData::Constant(Constant::U16(_)) => Arc::new(Value::global("U16", [])), 372 | TermData::Constant(Constant::U32(_)) => Arc::new(Value::global("U32", [])), 373 | TermData::Constant(Constant::U64(_)) => Arc::new(Value::global("U64", [])), 374 | TermData::Constant(Constant::S8(_)) => Arc::new(Value::global("S8", [])), 375 | TermData::Constant(Constant::S16(_)) => Arc::new(Value::global("S16", [])), 376 | TermData::Constant(Constant::S32(_)) => Arc::new(Value::global("S32", [])), 377 | TermData::Constant(Constant::S64(_)) => Arc::new(Value::global("S64", [])), 378 | TermData::Constant(Constant::F32(_)) => Arc::new(Value::global("F32", [])), 379 | TermData::Constant(Constant::F64(_)) => Arc::new(Value::global("F64", [])), 380 | TermData::Constant(Constant::Char(_)) => Arc::new(Value::global("Char", [])), 381 | TermData::Constant(Constant::String(_)) => Arc::new(Value::global("String", [])), 382 | 383 | TermData::Error => Arc::new(Value::Error), 384 | } 385 | } 386 | } 387 | -------------------------------------------------------------------------------- /pikelet/src/lang/surface.rs: -------------------------------------------------------------------------------- 1 | //! The surface language. 2 | //! 3 | //! This is a user-friendly concrete syntax for the language. 4 | 5 | use crossbeam_channel::Sender; 6 | 7 | use crate::lang::{FileId, Located, Location}; 8 | use crate::reporting::Message; 9 | 10 | mod lexer; 11 | 12 | #[allow(clippy::all, unused_parens)] 13 | mod grammar { 14 | include!(concat!(env!("OUT_DIR"), "/lang/surface/grammar.rs")); 15 | } 16 | 17 | /// Entry in a [record type](Term::RecordType). 18 | pub type TypeEntry = (Located, Option>, Term); 19 | /// Entry in a [record term](Term::RecordTerm). 20 | pub type TermEntry = (Located, Option>, Term); 21 | /// A group of function inputs that are elements of the same type. 22 | pub type InputGroup = (Vec>, Term); 23 | 24 | pub type Term = Located; 25 | 26 | /// Terms in the surface language. 27 | #[derive(Debug, Clone)] 28 | pub enum TermData { 29 | /// Names. 30 | Name(String), 31 | 32 | /// Annotated terms. 33 | Ann(Box, Box), 34 | 35 | /// Function types. 36 | /// 37 | /// Also known as: pi type, dependent product type. 38 | FunctionType(Vec, Box), 39 | /// Arrow function types. 40 | /// 41 | /// Also known as: non-dependent function type. 42 | FunctionArrowType(Box, Box), 43 | /// Function terms. 44 | /// 45 | /// Also known as: lambda abstraction, anonymous function. 46 | FunctionTerm(Vec>, Box), 47 | /// Function eliminations. 48 | /// 49 | /// Also known as: function application. 50 | FunctionElim(Box, Vec), 51 | 52 | /// Record types. 53 | RecordType(Vec), 54 | /// Record terms. 55 | RecordTerm(Vec), 56 | /// Record eliminations. 57 | /// 58 | /// Also known as: record projections, field lookup. 59 | RecordElim(Box, Located), 60 | 61 | /// Ordered sequences. 62 | SequenceTerm(Vec), 63 | /// Character literals. 64 | CharTerm(String), 65 | /// String literals. 66 | StringTerm(String), 67 | /// Numeric literals. 68 | NumberTerm(String), 69 | 70 | /// Error sentinel. 71 | Error, 72 | } 73 | 74 | impl<'input> Term { 75 | /// Parse a term from an input string. 76 | #[allow(clippy::should_implement_trait)] 77 | pub fn from_str(file_id: FileId, input: &str, messages_tx: &Sender) -> Term { 78 | let tokens = lexer::tokens(file_id, input); 79 | grammar::TermParser::new() 80 | .parse(file_id, tokens) 81 | .unwrap_or_else(|error| { 82 | messages_tx 83 | .send(Message::from_lalrpop(file_id, error)) 84 | .unwrap(); 85 | Term::new( 86 | Location::file_range(file_id, 0..input.len()), 87 | TermData::Error, 88 | ) 89 | }) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /pikelet/src/lang/surface/grammar.lalrpop: -------------------------------------------------------------------------------- 1 | use crate::lang::{FileId, Located, Location}; 2 | use crate::lang::surface::{Term, TermData, TypeEntry, TermEntry}; 3 | use crate::lang::surface::lexer::Token; 4 | use crate::reporting::LexerError; 5 | 6 | grammar<'input>(file_id: FileId); 7 | 8 | extern { 9 | type Location = usize; 10 | type Error = LexerError; 11 | 12 | enum Token<'input> { 13 | "doc comment" => Token::DocComment(<&'input str>), 14 | "character literal" => Token::CharLiteral(<&'input str>), 15 | "string literal" => Token::StringLiteral(<&'input str>), 16 | "numeric literal" => Token::NumericLiteral(<&'input str>), 17 | "name" => Token::Name(<&'input str>), 18 | 19 | "as" => Token::As, 20 | "fun" => Token::FunTerm, 21 | "Fun" => Token::FunType, 22 | "record" => Token::RecordTerm, 23 | "Record" => Token::RecordType, 24 | 25 | "->" => Token::Arrow, 26 | "=>" => Token::DArrow, 27 | ":" => Token::Colon, 28 | "," => Token::Comma, 29 | "." => Token::Dot, 30 | 31 | "{" => Token::LBrace, 32 | "}" => Token::RBrace, 33 | "[" => Token::LBrack, 34 | "]" => Token::RBrack, 35 | "(" => Token::LParen, 36 | ")" => Token::RParen, 37 | "=" => Token::Equal, 38 | } 39 | } 40 | 41 | pub Term: Term = Located; 42 | #[inline] ArrowTerm: Term = Located; 43 | #[inline] ExprTerm: Term = Located; 44 | #[inline] AppTerm: Term = Located; 45 | #[inline] AtomicTerm: Term = Located; 46 | 47 | TermData: TermData = { 48 | ExprTermData, 49 | ":" => TermData::Ann(Box::new(term), Box::new(type_)), 50 | }; 51 | 52 | ExprTermData: TermData = { 53 | ArrowTermData, 54 | "fun" +> "=>" => { 55 | TermData::FunctionTerm(input_names, Box::new(output_term)) 56 | }, 57 | }; 58 | 59 | ArrowTermData: TermData = { 60 | AppTermData, 61 | "Fun" +> ":" ")")+> 62 | "->" => 63 | { 64 | TermData::FunctionType(input_type_groups, Box::new(output_type)) 65 | }, 66 | "->" => { 67 | TermData::FunctionArrowType(Box::new(input_type), Box::new(output_type)) 68 | }, 69 | }; 70 | 71 | AppTermData: TermData = { 72 | AtomicTermData, 73 | => { 74 | TermData::FunctionElim(Box::new(head_term), input_terms) 75 | }, 76 | }; 77 | 78 | AtomicTermData: TermData = { 79 | "(" ")" => term, 80 | => TermData::Name(name), 81 | "Record" "{" > "}" => TermData::RecordType(entries), 82 | "record" "{" > "}" => TermData::RecordTerm(entries), 83 | "." > => TermData::RecordElim(Box::new(head_term), label), 84 | "[" > "]" => TermData::SequenceTerm(entries), 85 | "character literal" => TermData::CharTerm(<>.to_owned()), 86 | "string literal" => TermData::StringTerm(<>.to_owned()), 87 | "numeric literal" => TermData::NumberTerm(<>.to_owned()), 88 | }; 89 | 90 | #[inline] 91 | List: Vec = { 92 | ",")*> => { 93 | entries.extend(last); 94 | entries 95 | } 96 | } 97 | 98 | #[inline] 99 | TypeEntry: TypeEntry = { 100 | // TODO: Use doc comments 101 | <_docs: "doc comment"*> 102 | > >)?> ":" => (label, name, term), 103 | }; 104 | 105 | #[inline] 106 | TermEntry: TermEntry = { 107 | // TODO: Use doc comments 108 | <_docs: "doc comment"*> 109 | > >)?> "=" => (label, name, term), 110 | }; 111 | 112 | #[inline] 113 | Name: String = { 114 | "name" => (<>).to_owned(), 115 | }; 116 | 117 | #[inline] 118 | Located: Located = { 119 | => Located::new(Location::file_range(file_id, start..end), data), 120 | }; 121 | -------------------------------------------------------------------------------- /pikelet/src/lang/surface/lexer.rs: -------------------------------------------------------------------------------- 1 | use logos::Logos; 2 | use std::fmt; 3 | 4 | use crate::lang::{FileId, Location}; 5 | use crate::reporting::LexerError; 6 | 7 | /// Tokens in the surface language. 8 | #[derive(Debug, Clone, Logos)] 9 | pub enum Token<'a> { 10 | #[regex(r"\|\|\|(.*)\n")] 11 | DocComment(&'a str), 12 | #[regex(r#"'([^'\\]|\\.)*'"#)] 13 | CharLiteral(&'a str), 14 | #[regex(r#""([^"\\]|\\.)*""#)] 15 | StringLiteral(&'a str), 16 | #[regex(r"[-+]?[0-9][a-zA-Z0-9_\.]*")] 17 | NumericLiteral(&'a str), 18 | #[regex(r"[a-zA-Z][a-zA-Z0-9\-]*")] 19 | Name(&'a str), 20 | 21 | #[token("as")] 22 | As, 23 | #[token("fun")] 24 | FunTerm, 25 | #[token("Fun")] 26 | FunType, 27 | #[token("record")] 28 | RecordTerm, 29 | #[token("Record")] 30 | RecordType, 31 | 32 | #[token(":")] 33 | Colon, 34 | #[token(",")] 35 | Comma, 36 | #[token("=>")] 37 | DArrow, 38 | #[token("->")] 39 | Arrow, 40 | #[token(".")] 41 | Dot, 42 | #[token("=")] 43 | Equal, 44 | 45 | #[token("(")] 46 | LParen, 47 | #[token(")")] 48 | RParen, 49 | #[token("[")] 50 | LBrack, 51 | #[token("]")] 52 | RBrack, 53 | #[token("{")] 54 | LBrace, 55 | #[token("}")] 56 | RBrace, 57 | 58 | #[error] 59 | #[regex(r"\p{Whitespace}", logos::skip)] 60 | #[regex(r"--(.*)\n", logos::skip)] 61 | Error, 62 | } 63 | 64 | impl<'a> fmt::Display for Token<'a> { 65 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 66 | match self { 67 | Token::DocComment(s) => write!(f, "{}", s), 68 | Token::CharLiteral(s) => write!(f, "{}", s), 69 | Token::StringLiteral(s) => write!(f, "{}", s), 70 | Token::NumericLiteral(s) => write!(f, "{}", s), 71 | Token::Name(s) => write!(f, "{}", s), 72 | 73 | Token::As => write!(f, "as"), 74 | Token::FunTerm => write!(f, "fun"), 75 | Token::FunType => write!(f, "Fun"), 76 | Token::RecordTerm => write!(f, "record"), 77 | Token::RecordType => write!(f, "Record"), 78 | 79 | Token::Colon => write!(f, ":"), 80 | Token::Comma => write!(f, ","), 81 | Token::DArrow => write!(f, "=>"), 82 | Token::Arrow => write!(f, "->"), 83 | Token::Equal => write!(f, "="), 84 | Token::Dot => write!(f, "."), 85 | 86 | Token::LParen => write!(f, "("), 87 | Token::RParen => write!(f, ")"), 88 | Token::LBrack => write!(f, "["), 89 | Token::RBrack => write!(f, "]"), 90 | Token::LBrace => write!(f, "{{"), 91 | Token::RBrace => write!(f, "}}"), 92 | 93 | Token::Error => write!(f, ""), 94 | } 95 | } 96 | } 97 | 98 | pub type Spanned = Result<(Loc, Tok, Loc), Error>; 99 | 100 | pub fn tokens( 101 | file_id: FileId, 102 | source: &str, 103 | ) -> impl Iterator, usize, LexerError>> { 104 | Token::lexer(source) 105 | .spanned() 106 | .map(move |(token, range)| match token { 107 | Token::Error => Err(LexerError::InvalidToken { 108 | location: Location::file_range(file_id, range), 109 | }), 110 | token => Ok((range.start, token, range.end)), 111 | }) 112 | } 113 | 114 | #[test] 115 | fn behavior_after_error() { 116 | let starts_with_invalid = "@."; 117 | // [Err(...), Some(Token::DOT)] 118 | let from_lex: Vec<_> = tokens(0, starts_with_invalid).collect(); 119 | let result: Vec<_> = from_lex.iter().map(Result::is_ok).collect(); 120 | assert_eq!(result, vec![false, true]); 121 | } 122 | -------------------------------------------------------------------------------- /pikelet/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A simple language. 2 | 3 | #![allow(clippy::drop_copy)] 4 | #![allow(clippy::drop_ref)] 5 | #![allow(clippy::new_without_default)] 6 | #![allow(clippy::while_let_on_iterator)] 7 | 8 | pub mod lang; 9 | pub mod pass; 10 | 11 | mod literal; 12 | pub mod reporting; 13 | -------------------------------------------------------------------------------- /pikelet/src/pass.rs: -------------------------------------------------------------------------------- 1 | //! Passes between intermediate languages. 2 | //! 3 | //! The most significant step in this process is the [`surface_to_core`] pass, 4 | //! which handles elaboration of the surface language into the core language, 5 | //! and is the source of most user-facing typing diagnostics. 6 | 7 | pub mod core_to_pretty; 8 | pub mod core_to_surface; 9 | pub mod surface_to_core; 10 | pub mod surface_to_pretty; 11 | -------------------------------------------------------------------------------- /pikelet/src/pass/core_to_pretty.rs: -------------------------------------------------------------------------------- 1 | //! Pretty prints the [core language] to a [pretty] document. 2 | //! 3 | //! [core language]: crate::lang::core 4 | 5 | use pretty::{DocAllocator, DocBuilder}; 6 | 7 | use crate::lang::core::{Constant, Term, TermData}; 8 | 9 | /// The precedence of a term. 10 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] 11 | pub enum Prec { 12 | Term = 0, 13 | Expr, 14 | Arrow, 15 | App, 16 | Atomic, 17 | } 18 | 19 | pub fn from_term<'a, D>(alloc: &'a D, term: &'a Term) -> DocBuilder<'a, D> 20 | where 21 | D: DocAllocator<'a>, 22 | D::Doc: Clone, 23 | { 24 | from_term_prec(alloc, term, Prec::Term) 25 | } 26 | 27 | pub fn from_term_prec<'a, D>(alloc: &'a D, term: &'a Term, prec: Prec) -> DocBuilder<'a, D> 28 | where 29 | D: DocAllocator<'a>, 30 | D::Doc: Clone, 31 | { 32 | match &term.data { 33 | TermData::Global(name) => (alloc.nil()) 34 | .append(alloc.text("global")) 35 | .append(alloc.space()) 36 | .append(alloc.text(name)), 37 | TermData::Var(index) => (alloc.nil()) 38 | .append(alloc.text("var")) 39 | .append(alloc.space()) 40 | .append(alloc.as_string(index.to_usize())), 41 | 42 | TermData::Ann(term, r#type) => paren( 43 | alloc, 44 | prec > Prec::Term, 45 | (alloc.nil()) 46 | .append(from_term_prec(alloc, term, Prec::Expr)) 47 | .append(alloc.space()) 48 | .append(":") 49 | .append( 50 | (alloc.space()) 51 | .append(from_term_prec(alloc, r#type, Prec::Term)) 52 | .group() 53 | .nest(4), 54 | ), 55 | ), 56 | 57 | TermData::TypeType => alloc.text("Type"), 58 | 59 | TermData::FunctionType(_, input_type, output_type) => paren( 60 | alloc, 61 | prec > Prec::Arrow, 62 | (alloc.nil()) 63 | .append(from_term_prec(alloc, input_type, Prec::App)) 64 | .append(alloc.space()) 65 | .append("->") 66 | .append(alloc.space()) 67 | .append(from_term_prec(alloc, output_type, Prec::Arrow)), 68 | ), 69 | TermData::FunctionTerm(_, output_term) => paren( 70 | alloc, 71 | prec > Prec::Expr, 72 | (alloc.nil()) 73 | .append("fun") 74 | .append(alloc.space()) 75 | .append("_") 76 | .append(alloc.space()) 77 | .append("=>") 78 | .group() 79 | .append(alloc.space()) 80 | .append(from_term_prec(alloc, output_term, Prec::Expr).nest(4)), 81 | ), 82 | TermData::FunctionElim(head_term, input_term) => paren( 83 | alloc, 84 | prec > Prec::App, 85 | from_term_prec(alloc, head_term, Prec::App).append( 86 | (alloc.space()) 87 | .append(from_term_prec(alloc, input_term, Prec::Arrow)) 88 | .group() 89 | .nest(4), 90 | ), 91 | ), 92 | 93 | TermData::RecordType(labels, types) => (alloc.nil()) 94 | .append("Record") 95 | .append(alloc.space()) 96 | .append("{") 97 | .group() 98 | .append(alloc.space().append(alloc.intersperse( 99 | labels.iter().map(|label| alloc.text(label).group().nest(4)), 100 | alloc.text(",").append(alloc.space()), 101 | ))) 102 | .append("}") 103 | .append(alloc.space()) 104 | .append("{") 105 | .group() 106 | .append(alloc.space().append(alloc.intersperse( 107 | (types.iter()).map(|ty| from_term_prec(alloc, ty, Prec::Term).group().nest(4)), 108 | alloc.text(",").append(alloc.space()), 109 | ))) 110 | .append("}"), 111 | TermData::RecordTerm(labels, terms) => (alloc.nil()) 112 | .append("record") 113 | .append(alloc.space()) 114 | .append("{") 115 | .group() 116 | .append(alloc.space().append(alloc.intersperse( 117 | labels.iter().map(|label| alloc.text(label).group().nest(4)), 118 | alloc.text(",").append(alloc.space()), 119 | ))) 120 | .append("}") 121 | .append(alloc.space()) 122 | .append("{") 123 | .group() 124 | .append(alloc.space().append(alloc.intersperse( 125 | (terms.iter()).map(|term| from_term_prec(alloc, term, Prec::Term).group().nest(4)), 126 | alloc.text(",").append(alloc.space()), 127 | ))) 128 | .append("}"), 129 | TermData::RecordElim(head_term, label) => (alloc.nil()) 130 | .append(from_term_prec(alloc, head_term, Prec::Atomic)) 131 | .append(".") 132 | .append(alloc.text(label)), 133 | 134 | TermData::ArrayTerm(term_entries) | TermData::ListTerm(term_entries) => (alloc.nil()) 135 | .append("[") 136 | .group() 137 | .append( 138 | alloc.intersperse( 139 | term_entries 140 | .iter() 141 | .map(|term| from_term_prec(alloc, term, Prec::Term).group().nest(4)), 142 | alloc.text(",").append(alloc.space()), 143 | ), 144 | ) 145 | .append("]"), 146 | 147 | TermData::Constant(constant) => from_constant(alloc, constant), 148 | 149 | TermData::Error => alloc.text("!"), 150 | } 151 | } 152 | 153 | pub fn from_constant<'a, D>(alloc: &'a D, constant: &'a Constant) -> DocBuilder<'a, D> 154 | where 155 | D: DocAllocator<'a>, 156 | D::Doc: Clone, 157 | { 158 | match constant { 159 | Constant::U8(value) => alloc.text(format!("{}", value)), 160 | Constant::U16(value) => alloc.text(format!("{}", value)), 161 | Constant::U32(value) => alloc.text(format!("{}", value)), 162 | Constant::U64(value) => alloc.text(format!("{}", value)), 163 | Constant::S8(value) => alloc.text(format!("{}", value)), 164 | Constant::S16(value) => alloc.text(format!("{}", value)), 165 | Constant::S32(value) => alloc.text(format!("{}", value)), 166 | Constant::S64(value) => alloc.text(format!("{}", value)), 167 | Constant::F32(value) => alloc.text(format!("{}", value)), 168 | Constant::F64(value) => alloc.text(format!("{}", value)), 169 | Constant::Char(value) => alloc.text(format!("{:?}", value)), 170 | Constant::String(value) => alloc.text(format!("{:?}", value)), 171 | } 172 | } 173 | 174 | fn paren<'a, D>(alloc: &'a D, b: bool, doc: DocBuilder<'a, D>) -> DocBuilder<'a, D> 175 | where 176 | D: DocAllocator<'a>, 177 | D::Doc: Clone, 178 | { 179 | if b { 180 | alloc.text("(").append(doc).append(")") 181 | } else { 182 | doc 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /pikelet/src/pass/core_to_surface.rs: -------------------------------------------------------------------------------- 1 | //! Distills the [core language] into the [surface language]. 2 | //! 3 | //! This is the inverse of [`pass::surface_to_core`], and is useful for pretty 4 | //! printing terms when presenting them to the user. 5 | //! 6 | //! [surface language]: crate::lang::surface 7 | //! [core language]: crate::lang::core 8 | //! [`pass::surface_to_core`]: crate::pass::surface_to_core 9 | 10 | use contracts::debug_ensures; 11 | use fxhash::FxHashMap; 12 | 13 | use crate::lang::core::{Constant, EnvSize, Globals, Term, TermData, VarIndex}; 14 | use crate::lang::surface; 15 | use crate::lang::Located; 16 | 17 | /// Distillation context. 18 | pub struct Context<'globals> { 19 | globals: &'globals Globals, 20 | usages: FxHashMap, 21 | names: Vec, 22 | } 23 | 24 | struct Usage { 25 | base_name: Option, 26 | count: usize, 27 | } 28 | 29 | impl Usage { 30 | fn new() -> Usage { 31 | Usage { 32 | base_name: None, 33 | count: 1, 34 | } 35 | } 36 | } 37 | 38 | const DEFAULT_NAME: &str = "t"; 39 | 40 | impl<'globals> Context<'globals> { 41 | /// Construct a new distillation state. 42 | pub fn new(globals: &'globals Globals) -> Context<'globals> { 43 | let usages = globals 44 | .entries() 45 | .map(|(name, _)| (name.to_owned(), Usage::new())) 46 | .collect(); 47 | 48 | Context { 49 | globals, 50 | usages, 51 | names: Vec::new(), 52 | } 53 | } 54 | 55 | fn index_to_level(&self, index: VarIndex) -> usize { 56 | let index = index.to_usize(); 57 | self.names.len().saturating_sub(index).saturating_sub(1) 58 | } 59 | 60 | fn get_name(&self, index: VarIndex) -> Option<&str> { 61 | Some(self.names.get(self.index_to_level(index))?.as_str()) 62 | } 63 | 64 | // FIXME: This is incredibly horrific and I do not like it! 65 | // 66 | // We could investigate finding more optimal optimal names by using free 67 | // variables, or look into [scope sets](https://typesanitizer.com/blog/scope-sets-as-pinata.html) 68 | // for a more principled approach to scope names. 69 | pub fn push_scope(&mut self, name_hint: Option<&str>) -> String { 70 | let base_name = name_hint.unwrap_or(DEFAULT_NAME); 71 | let (fresh_name, base_name) = match self.usages.get_mut(base_name) { 72 | // The name has not been used yet 73 | None => (base_name.to_owned(), None), 74 | // The name is in use - find a free one to use! 75 | Some(usage) => { 76 | let mut suffix = usage.count; 77 | // Update the usage count to make finding the next name faster. 78 | usage.count += 1; 79 | // Attempt names with incrementing numeric suffixes until we 80 | // find one that has yet to be used. 81 | loop { 82 | // TODO: Reduce string allocations 83 | match format!("{}-{}", base_name, suffix) { 84 | // Candidate name has been used - try another! 85 | name if self.usages.contains_key(&name) => suffix += 1, 86 | // The candidate has not been used - we're free to use it 87 | name => break (name, Some(base_name.to_owned())), 88 | } 89 | } 90 | } 91 | }; 92 | 93 | let usage = Usage { 94 | base_name, 95 | count: 1, 96 | }; 97 | // TODO: Reduce cloning of names 98 | self.usages.insert(fresh_name.clone(), usage); 99 | self.names.push(fresh_name.clone()); 100 | fresh_name 101 | } 102 | 103 | pub fn pop_scope(&mut self) { 104 | if let Some(mut name) = self.names.pop() { 105 | while let Some(base_name) = self.remove_usage(name) { 106 | name = base_name; 107 | } 108 | } 109 | } 110 | 111 | pub fn pop_scopes(&mut self, count: usize) { 112 | (0..count).for_each(|_| self.pop_scope()); 113 | } 114 | 115 | pub fn truncate_scopes(&mut self, count: EnvSize) { 116 | (count.to_usize()..self.names.len()).for_each(|_| self.pop_scope()); 117 | } 118 | 119 | fn remove_usage(&mut self, name: String) -> Option { 120 | use std::collections::hash_map::Entry; 121 | 122 | match self.usages.entry(name) { 123 | Entry::Occupied(entry) if entry.get().count >= 1 => entry.remove().base_name, 124 | Entry::Occupied(mut entry) => { 125 | entry.get_mut().count -= 1; 126 | None 127 | } 128 | Entry::Vacant(_) => None, 129 | } 130 | } 131 | 132 | /// Distill a [`core::Term`] into a [`surface::Term`]. 133 | /// 134 | /// [`core::Term`]: crate::lang::core::Term 135 | /// [`surface::Term`]: crate::lang::surface::Term 136 | #[debug_ensures(self.names.len() == old(self.names.len()))] 137 | pub fn from_term(&mut self, term: &Term) -> surface::Term { 138 | let term_data = match &term.data { 139 | TermData::Global(name) => match self.globals.get(name) { 140 | Some(_) => surface::TermData::Name(name.to_owned()), 141 | None => surface::TermData::Error, // TODO: Log error? 142 | }, 143 | TermData::Var(index) => match self.get_name(*index) { 144 | Some(name) => surface::TermData::Name(name.to_owned()), 145 | None => surface::TermData::Error, // TODO: Log error? 146 | }, 147 | 148 | TermData::Ann(term, r#type) => surface::TermData::Ann( 149 | Box::new(self.from_term(term)), 150 | Box::new(self.from_term(r#type)), 151 | ), 152 | 153 | TermData::TypeType => surface::TermData::Name("Type".to_owned()), 154 | 155 | TermData::FunctionType(input_name_hint, input_type, output_type) => { 156 | // FIXME: properly group inputs! 157 | let input_type = self.from_term(input_type); 158 | let fresh_input_name = 159 | self.push_scope(input_name_hint.as_ref().map(String::as_str)); 160 | let input_type_groups = 161 | vec![(vec![Located::generated(fresh_input_name)], input_type)]; 162 | let output_type = self.from_term(output_type); 163 | self.pop_scopes(input_type_groups.iter().map(|(ns, _)| ns.len()).sum()); 164 | 165 | surface::TermData::FunctionType(input_type_groups, Box::new(output_type)) 166 | } 167 | TermData::FunctionTerm(input_name_hint, output_term) => { 168 | let mut current_output_term = output_term; 169 | 170 | let fresh_input_name = self.push_scope(Some(input_name_hint)); 171 | let mut input_names = vec![Located::generated(fresh_input_name)]; 172 | 173 | while let TermData::FunctionTerm(input_name_hint, output_term) = 174 | ¤t_output_term.data 175 | { 176 | let fresh_input_name = self.push_scope(Some(input_name_hint)); 177 | input_names.push(Located::generated(fresh_input_name)); 178 | current_output_term = output_term; 179 | } 180 | 181 | let output_term = self.from_term(current_output_term); 182 | self.pop_scopes(input_names.len()); 183 | 184 | surface::TermData::FunctionTerm(input_names, Box::new(output_term)) 185 | } 186 | TermData::FunctionElim(head_term, input_term) => { 187 | let mut current_head_term = head_term; 188 | 189 | let mut input_terms = vec![self.from_term(input_term)]; 190 | while let TermData::FunctionElim(head_term, input_term) = ¤t_head_term.data { 191 | input_terms.push(self.from_term(input_term)); 192 | current_head_term = head_term; 193 | } 194 | input_terms.reverse(); 195 | 196 | let head_term = self.from_term(current_head_term); 197 | surface::TermData::FunctionElim(Box::new(head_term), input_terms) 198 | } 199 | 200 | TermData::RecordType(labels, types) => { 201 | let type_entries = Iterator::zip(labels.iter(), types.iter()) 202 | .map(|(label, entry_type)| { 203 | let entry_type = self.from_term(entry_type); 204 | let label = label.clone(); 205 | match self.push_scope(Some(&label)) { 206 | name if name == label => (Located::generated(label), None, entry_type), 207 | name => ( 208 | Located::generated(label), 209 | Some(Located::generated(name)), 210 | entry_type, 211 | ), 212 | } 213 | }) 214 | .collect::>(); 215 | self.pop_scopes(type_entries.len()); 216 | 217 | surface::TermData::RecordType(type_entries) 218 | } 219 | TermData::RecordTerm(labels, terms) => { 220 | let term_entries = Iterator::zip(labels.iter(), terms.iter()) 221 | .map(|(label, entry_type)| { 222 | let entry_type = self.from_term(entry_type); 223 | let label = label.clone(); 224 | match self.push_scope(Some(&label)) { 225 | name if name == label => (Located::generated(label), None, entry_type), 226 | name => ( 227 | Located::generated(label), 228 | Some(Located::generated(name)), 229 | entry_type, 230 | ), 231 | } 232 | }) 233 | .collect::>(); 234 | self.pop_scopes(term_entries.len()); 235 | 236 | surface::TermData::RecordTerm(term_entries) 237 | } 238 | TermData::RecordElim(head_term, label) => surface::TermData::RecordElim( 239 | Box::new(self.from_term(head_term)), 240 | Located::generated(label.clone()), 241 | ), 242 | 243 | TermData::ArrayTerm(entry_terms) | TermData::ListTerm(entry_terms) => { 244 | let core_entry_terms = entry_terms 245 | .iter() 246 | .map(|entry_term| self.from_term(entry_term)) 247 | .collect(); 248 | 249 | surface::TermData::SequenceTerm(core_entry_terms) 250 | } 251 | 252 | TermData::Constant(constant) => match constant { 253 | Constant::U8(value) => surface::TermData::NumberTerm(value.to_string()), 254 | Constant::U16(value) => surface::TermData::NumberTerm(value.to_string()), 255 | Constant::U32(value) => surface::TermData::NumberTerm(value.to_string()), 256 | Constant::U64(value) => surface::TermData::NumberTerm(value.to_string()), 257 | Constant::S8(value) => surface::TermData::NumberTerm(value.to_string()), 258 | Constant::S16(value) => surface::TermData::NumberTerm(value.to_string()), 259 | Constant::S32(value) => surface::TermData::NumberTerm(value.to_string()), 260 | Constant::S64(value) => surface::TermData::NumberTerm(value.to_string()), 261 | Constant::F32(value) => surface::TermData::NumberTerm(value.to_string()), 262 | Constant::F64(value) => surface::TermData::NumberTerm(value.to_string()), 263 | Constant::Char(value) => surface::TermData::CharTerm(format!("{:?}", value)), 264 | Constant::String(value) => surface::TermData::StringTerm(format!("{:?}", value)), 265 | }, 266 | 267 | TermData::Error => surface::TermData::Error, 268 | }; 269 | 270 | surface::Term::generated(term_data) 271 | } 272 | } 273 | 274 | #[cfg(test)] 275 | mod tests { 276 | use super::*; 277 | 278 | #[test] 279 | fn push_default_name() { 280 | let globals = Globals::default(); 281 | let mut state = Context::new(&globals); 282 | 283 | assert_eq!(state.push_scope(None), "t"); 284 | assert_eq!(state.push_scope(Some("t")), "t-1"); 285 | assert_eq!(state.push_scope(None), "t-2"); 286 | } 287 | 288 | #[test] 289 | fn push_and_pop_default_name() { 290 | let globals = Globals::default(); 291 | let mut state = Context::new(&globals); 292 | 293 | assert_eq!(state.push_scope(None), "t"); 294 | state.pop_scope(); 295 | assert_eq!(state.push_scope(None), "t"); 296 | assert_eq!(state.push_scope(None), "t-1"); 297 | state.pop_scope(); 298 | state.pop_scope(); 299 | assert_eq!(state.push_scope(None), "t"); 300 | assert_eq!(state.push_scope(None), "t-1"); 301 | assert_eq!(state.push_scope(None), "t-2"); 302 | state.pop_scope(); 303 | state.pop_scope(); 304 | state.pop_scope(); 305 | assert_eq!(state.push_scope(None), "t"); 306 | assert_eq!(state.push_scope(None), "t-1"); 307 | assert_eq!(state.push_scope(None), "t-2"); 308 | } 309 | 310 | #[test] 311 | fn push_scope() { 312 | let globals = Globals::default(); 313 | let mut state = Context::new(&globals); 314 | 315 | assert_eq!(state.push_scope(Some("test")), "test"); 316 | assert_eq!(state.push_scope(Some("test")), "test-1"); 317 | assert_eq!(state.push_scope(Some("test")), "test-2"); 318 | } 319 | 320 | #[test] 321 | fn push_and_pop_scope() { 322 | let globals = Globals::default(); 323 | let mut state = Context::new(&globals); 324 | 325 | assert_eq!(state.push_scope(Some("test")), "test"); 326 | state.pop_scope(); 327 | assert_eq!(state.push_scope(Some("test")), "test"); 328 | assert_eq!(state.push_scope(Some("test")), "test-1"); 329 | state.pop_scope(); 330 | state.pop_scope(); 331 | assert_eq!(state.push_scope(Some("test")), "test"); 332 | assert_eq!(state.push_scope(Some("test")), "test-1"); 333 | assert_eq!(state.push_scope(Some("test")), "test-2"); 334 | state.pop_scope(); 335 | state.pop_scope(); 336 | state.pop_scope(); 337 | assert_eq!(state.push_scope(Some("test")), "test"); 338 | assert_eq!(state.push_scope(Some("test")), "test-1"); 339 | assert_eq!(state.push_scope(Some("test")), "test-2"); 340 | } 341 | 342 | #[test] 343 | fn push_fresh_name() { 344 | let globals = Globals::default(); 345 | let mut state = Context::new(&globals); 346 | 347 | assert_eq!(state.push_scope(Some("test")), "test"); 348 | assert_eq!(state.push_scope(Some("test")), "test-1"); 349 | assert_eq!(state.push_scope(Some("test-1")), "test-1-1"); 350 | assert_eq!(state.push_scope(Some("test-1")), "test-1-2"); 351 | assert_eq!(state.push_scope(Some("test-1-2")), "test-1-2-1"); 352 | } 353 | 354 | #[test] 355 | fn push_global_name() { 356 | let globals = Globals::default(); 357 | let mut state = Context::new(&globals); 358 | 359 | assert_eq!(state.push_scope(Some("Type")), "Type-1"); 360 | assert_eq!(state.push_scope(Some("Type")), "Type-2"); 361 | } 362 | } 363 | -------------------------------------------------------------------------------- /pikelet/src/pass/surface_to_pretty.rs: -------------------------------------------------------------------------------- 1 | //! Pretty prints the [surface language] to a [pretty] document. 2 | //! 3 | //! [surface language]: crate::lang::surface 4 | 5 | use pretty::{DocAllocator, DocBuilder}; 6 | 7 | use crate::lang::surface::{Term, TermData}; 8 | 9 | /// The precedence of a term. 10 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] 11 | pub enum Prec { 12 | Term = 0, 13 | Expr, 14 | Arrow, 15 | App, 16 | Atomic, 17 | } 18 | 19 | pub fn from_term<'a, D>(alloc: &'a D, term: &'a Term) -> DocBuilder<'a, D> 20 | where 21 | D: DocAllocator<'a>, 22 | D::Doc: Clone, 23 | { 24 | from_term_prec(alloc, term, Prec::Term) 25 | } 26 | 27 | pub fn from_term_prec<'a, D>(alloc: &'a D, term: &'a Term, prec: Prec) -> DocBuilder<'a, D> 28 | where 29 | D: DocAllocator<'a>, 30 | D::Doc: Clone, 31 | { 32 | match &term.data { 33 | TermData::Name(name) => alloc.text(name), 34 | 35 | TermData::Ann(term, r#type) => paren( 36 | alloc, 37 | prec > Prec::Term, 38 | (alloc.nil()) 39 | .append(from_term_prec(alloc, term, Prec::Expr)) 40 | .append(alloc.space()) 41 | .append(":") 42 | .append( 43 | (alloc.space()) 44 | .append(from_term_prec(alloc, r#type, Prec::Term)) 45 | .group() 46 | .nest(4), 47 | ), 48 | ), 49 | 50 | TermData::FunctionType(input_type_groups, output_type) => paren( 51 | alloc, 52 | prec > Prec::Arrow, 53 | (alloc.nil()) 54 | .append("Fun") 55 | .append(alloc.space()) 56 | .append(alloc.intersperse( 57 | input_type_groups.iter().map(|(input_names, input_type)| { 58 | (alloc.nil()) 59 | .append("(") 60 | .append(alloc.intersperse( 61 | input_names.iter().map(|input_name| &input_name.data), 62 | alloc.space(), 63 | )) 64 | .append(alloc.space()) 65 | .append(":") 66 | .append(alloc.space()) 67 | .append(from_term_prec(alloc, input_type, Prec::Term)) 68 | .append(")") 69 | }), 70 | alloc.space(), 71 | )) 72 | .append(alloc.space()) 73 | .append("->") 74 | .group() 75 | .append( 76 | (alloc.nil()).append(alloc.space()).append( 77 | from_term_prec(alloc, output_type, Prec::Arrow) 78 | .group() 79 | .nest(4), 80 | ), 81 | ), 82 | ), 83 | TermData::FunctionArrowType(input_type, output_type) => paren( 84 | alloc, 85 | prec > Prec::Arrow, 86 | (alloc.nil()) 87 | .append(from_term_prec(alloc, input_type, Prec::App)) 88 | .append(alloc.space()) 89 | .append("->") 90 | .append(alloc.space()) 91 | .append(from_term_prec(alloc, output_type, Prec::Arrow)), 92 | ), 93 | TermData::FunctionTerm(input_names, output_term) => paren( 94 | alloc, 95 | prec > Prec::Expr, 96 | (alloc.nil()) 97 | .append("fun") 98 | .append(alloc.space()) 99 | .append(alloc.intersperse( 100 | input_names.iter().map(|input_name| &input_name.data), 101 | alloc.space(), 102 | )) 103 | .append(alloc.space()) 104 | .append("=>") 105 | .group() 106 | .append( 107 | (alloc.nil()).append(alloc.space()).append( 108 | from_term_prec(alloc, output_term, Prec::Expr) 109 | .group() 110 | .nest(4), 111 | ), 112 | ), 113 | ), 114 | TermData::FunctionElim(head_term, input_terms) => paren( 115 | alloc, 116 | prec > Prec::App, 117 | from_term_prec(alloc, head_term, Prec::App).append( 118 | (alloc.nil()) 119 | .append(alloc.concat(input_terms.iter().map(|input_term| { 120 | alloc 121 | .space() 122 | .append(from_term_prec(alloc, input_term, Prec::Arrow)) 123 | }))) 124 | .group() 125 | .nest(4), 126 | ), 127 | ), 128 | 129 | TermData::RecordType(type_entries) => (alloc.nil()) 130 | .append("Record") 131 | .append(alloc.space()) 132 | .append("{") 133 | .group() 134 | .append( 135 | alloc.concat(type_entries.iter().map(|(label, name, entry_type)| { 136 | (alloc.nil()) 137 | .append(alloc.hardline()) 138 | .append(match name { 139 | None => alloc.text(&label.data).append(alloc.space()), 140 | Some(name) => alloc 141 | .text(&label.data) 142 | .append(alloc.space()) 143 | .append("as") 144 | .append(alloc.space()) 145 | .append(&name.data) 146 | .append(alloc.space()), 147 | }) 148 | .append(":") 149 | .group() 150 | .append( 151 | (alloc.space()) 152 | .append(from_term_prec(alloc, entry_type, Prec::Term)) 153 | .append(",") 154 | .group() 155 | .nest(4), 156 | ) 157 | .nest(4) 158 | .group() 159 | })), 160 | ) 161 | .append("}"), 162 | TermData::RecordTerm(term_entries) => (alloc.nil()) 163 | .append("record") 164 | .append(alloc.space()) 165 | .append("{") 166 | .group() 167 | .append( 168 | alloc.concat(term_entries.iter().map(|(label, name, entry_term)| { 169 | (alloc.nil()) 170 | .append(alloc.hardline()) 171 | .append(match name { 172 | None => alloc.text(&label.data).append(alloc.space()), 173 | Some(name) => alloc 174 | .text(&label.data) 175 | .append(alloc.space()) 176 | .append("as") 177 | .append(alloc.space()) 178 | .append(&name.data) 179 | .append(alloc.space()), 180 | }) 181 | .append("=") 182 | .group() 183 | .append( 184 | (alloc.space()) 185 | .append(from_term_prec(alloc, entry_term, Prec::Term)) 186 | .append(",") 187 | .group() 188 | .nest(4), 189 | ) 190 | .nest(4) 191 | .group() 192 | })), 193 | ) 194 | .append("}"), 195 | TermData::RecordElim(head_term, label) => (alloc.nil()) 196 | .append(from_term_prec(alloc, head_term, Prec::Atomic)) 197 | .append(".") 198 | .append(&label.data), 199 | 200 | TermData::SequenceTerm(term_entries) => (alloc.nil()) 201 | .append("[") 202 | .group() 203 | .append( 204 | alloc.intersperse( 205 | term_entries 206 | .iter() 207 | .map(|term| from_term_prec(alloc, term, Prec::Term).group().nest(4)), 208 | alloc.text(",").append(alloc.space()), 209 | ), 210 | ) 211 | .append("]"), 212 | 213 | TermData::CharTerm(text) | TermData::StringTerm(text) | TermData::NumberTerm(text) => { 214 | alloc.text(text) 215 | } 216 | 217 | TermData::Error => alloc.text("!"), 218 | } 219 | } 220 | 221 | fn paren<'a, D>(alloc: &'a D, b: bool, doc: DocBuilder<'a, D>) -> DocBuilder<'a, D> 222 | where 223 | D: DocAllocator<'a>, 224 | D::Doc: Clone, 225 | { 226 | if b { 227 | alloc.text("(").append(doc).append(")") 228 | } else { 229 | doc 230 | } 231 | } 232 | -------------------------------------------------------------------------------- /tests/comments.pi: -------------------------------------------------------------------------------- 1 | --! check.enable = true 2 | 3 | -- This is a line comment 4 | record { 5 | -- Another line comment 6 | ||| This is a doc comment 7 | x = Record {} 8 | } : Record { 9 | ||| This is another doc comment 10 | x : Type, 11 | } 12 | -------------------------------------------------------------------------------- /tests/functions.pi: -------------------------------------------------------------------------------- 1 | --! check.enable = true 2 | 3 | record { 4 | id-String = fun a => a, 5 | const-String-S32 = fun a b => a, 6 | 7 | id = fun A a => a, 8 | const = fun A B a b => a, 9 | } : Record { 10 | id-String : String -> String, 11 | const-String-S32 : String -> S32 -> String, 12 | 13 | id : Fun (A : Type) -> A -> A, 14 | const : Fun (A : Type) (B : Type) -> A -> B -> A, 15 | } 16 | -------------------------------------------------------------------------------- /tests/literals.pi: -------------------------------------------------------------------------------- 1 | --! check.enable = true 2 | 3 | record { 4 | b2 = 0b1001_0101, 5 | b8 = 0o01234567, 6 | b10 = 0123456789, 7 | b16 = 0x01234_abcdef_ABCDEF, 8 | 9 | char-tab = '\t', 10 | char-carriage-return = '\r', 11 | char-newline = '\n', 12 | char-null = '\0', 13 | char-backslash = '\\', 14 | char-single-quote = '\'', 15 | char-double-quote = '\"', 16 | char-ascii-escape = '\x32', 17 | char-unicode-escape = '\u{0001}', 18 | 19 | string-utf8 = "Γ ⊢ e ∈ τ ... 🌱🌳🌲🌿", 20 | string-escapes = "\t\r\n\0\\ \'\" \x32 \u{0001}", 21 | } : Record { 22 | b2 : S32, 23 | b8 : S32, 24 | b10 : S32, 25 | b16 : U64, 26 | 27 | char-tab : Char, 28 | char-carriage-return : Char, 29 | char-newline : Char, 30 | char-null : Char, 31 | char-backslash : Char, 32 | char-single-quote : Char, 33 | char-double-quote : Char, 34 | char-ascii-escape : Char, 35 | char-unicode-escape : Char, 36 | 37 | string-utf8 : String, 38 | string-escapes : String, 39 | } 40 | -------------------------------------------------------------------------------- /tests/record-term-deps.pi: -------------------------------------------------------------------------------- 1 | --! check.enable = true 2 | 3 | (record { 4 | A = U32, 5 | B = A, 6 | a = 23, 7 | b = a, 8 | } : Record { 9 | A : Type, 10 | B : Type, 11 | a : A, 12 | b : B, 13 | }).b : U32 14 | -------------------------------------------------------------------------------- /tests/record-type-deps.pi: -------------------------------------------------------------------------------- 1 | --! check.enable = true 2 | 3 | (record { 4 | A = U32, 5 | a = 23, 6 | } : Record { 7 | A : Type, 8 | a : A, 9 | }).a : U32 10 | --------------------------------------------------------------------------------