├── .cargo └── config.toml ├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .github ├── bors.toml └── workflows │ └── test.yml ├── .gitignore ├── .nvmrc ├── .typos.toml ├── .vscode └── launch.json ├── CLAUDE.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── book ├── .gitignore ├── .prettierignore ├── .prettierrc.jsonc ├── README.md ├── babel.config.js ├── blog │ ├── 2021-08-01-mdx-blog-post.mdx │ └── authors.yml ├── docs │ ├── about.md │ ├── about │ │ └── faq.md │ ├── acknowledgments.md │ ├── build_and_run.md │ ├── caveat.md │ ├── contributing.mdx │ ├── contributing │ │ ├── build.mdx │ │ ├── coc.mdx │ │ └── guidelines.mdx │ ├── design_docs.mdx │ ├── design_docs │ │ ├── calculus.md │ │ ├── experience.md │ │ ├── goals.md │ │ ├── sharing_synthesized_values.md │ │ ├── sketchy_ideas.md │ │ ├── types.md │ │ └── why_not_explicit_end_for_lease.md │ ├── dyn_tutorial │ │ ├── any.md │ │ ├── atomic.md │ │ ├── class.md │ │ ├── field_permissions.md │ │ ├── house_party.md │ │ ├── index.md │ │ ├── labeled_arguments.md │ │ ├── lease.md │ │ ├── my.md │ │ ├── our.md │ │ ├── permissions.md │ │ ├── sharing_xor_mutation.md │ │ ├── shlease.md │ │ └── sublease.md │ ├── reference.md │ ├── reference │ │ ├── grammar.md │ │ ├── raw-string-literals.md │ │ └── string-literals.md │ ├── tutorials.md │ └── typed_tutorial.md ├── docusaurus.config.js ├── package-lock.json ├── package.json ├── sidebars.js ├── src │ ├── components │ │ └── HomepageFeatures │ │ │ ├── index.tsx │ │ │ └── styles.module.css │ ├── css │ │ ├── custom.css │ │ ├── ide.css │ │ └── speech-bubbles.css │ └── pages │ │ ├── index.module.css │ │ ├── index.tsx │ │ └── markdown-page.md ├── static │ ├── .nojekyll │ └── img │ │ ├── dada.svg │ │ ├── docusaurus.png │ │ ├── favicon.ico │ │ ├── logo.svg │ │ ├── tutorial │ │ ├── docsVersionDropdown.png │ │ └── localeDropdown.png │ │ ├── undraw_docusaurus_mountain.svg │ │ ├── undraw_docusaurus_react.svg │ │ └── undraw_docusaurus_tree.svg └── tsconfig.json ├── components ├── dada-check │ ├── Cargo.toml │ ├── docs │ │ └── overview.md │ └── src │ │ └── lib.rs ├── dada-codegen │ ├── Cargo.toml │ └── src │ │ ├── cx.rs │ │ ├── cx │ │ ├── generate_expr.rs │ │ ├── generate_expr │ │ │ └── wasm_place_repr.rs │ │ ├── generate_fn.rs │ │ ├── wasm_fn_type.rs │ │ └── wasm_repr.rs │ │ └── lib.rs ├── dada-compiler │ ├── Cargo.toml │ └── src │ │ ├── fork.rs │ │ ├── lib.rs │ │ ├── realfs.rs │ │ └── vfs.rs ├── dada-debug │ ├── Cargo.toml │ ├── assets │ │ ├── README.md │ │ ├── github.css │ │ ├── highlight.min.js │ │ ├── highlightjs-line-numbers.min.js │ │ ├── jsontree.css │ │ ├── jsontree.js │ │ └── makejsontree.js │ ├── src │ │ ├── assets.rs │ │ ├── error.rs │ │ ├── events.rs │ │ ├── hbs.rs │ │ ├── lib.rs │ │ ├── root.rs │ │ ├── server.rs │ │ ├── source.rs │ │ └── view.rs │ └── templates │ │ ├── header.hbs │ │ ├── index.hbs │ │ ├── log.hbs │ │ ├── render_event.hbs │ │ ├── render_nested_event.hbs │ │ ├── source.hbs │ │ └── task_children.hbs ├── dada-ir-ast │ ├── Cargo.toml │ └── src │ │ ├── ast.rs │ │ ├── ast │ │ ├── class_item.rs │ │ ├── expr.rs │ │ ├── function.rs │ │ ├── member.rs │ │ ├── types.rs │ │ ├── use_item.rs │ │ └── util.rs │ │ ├── diagnostic.rs │ │ ├── diagnostic │ │ └── render.rs │ │ ├── inputs.rs │ │ ├── lib.rs │ │ ├── macro_rules.rs │ │ └── span.rs ├── dada-ir-sym │ ├── Cargo.toml │ ├── docs │ │ ├── overview.md │ │ ├── permission_system.md │ │ ├── subtyping.md │ │ ├── type_checking.md │ │ ├── type_inference.md │ │ └── type_inference_discussion_notes.md │ └── src │ │ ├── check.rs │ │ ├── check │ │ ├── blocks.rs │ │ ├── debug.rs │ │ ├── debug │ │ │ └── export.rs │ │ ├── env.rs │ │ ├── env │ │ │ ├── combinator.rs │ │ │ └── infer_bounds.rs │ │ ├── exprs.rs │ │ ├── fields.rs │ │ ├── functions.rs │ │ ├── generics.rs │ │ ├── inference.rs │ │ ├── inference │ │ │ ├── reconcile.rs │ │ │ └── serialize.rs │ │ ├── live_places.rs │ │ ├── member_lookup.rs │ │ ├── modules.rs │ │ ├── places.rs │ │ ├── predicates.rs │ │ ├── predicates │ │ │ ├── is_provably_lent.rs │ │ │ ├── is_provably_owned.rs │ │ │ ├── is_provably_shared.rs │ │ │ ├── is_provably_unique.rs │ │ │ ├── require_lent.rs │ │ │ ├── require_owned.rs │ │ │ ├── require_shared.rs │ │ │ ├── require_unique.rs │ │ │ ├── require_where_clause.rs │ │ │ └── var_infer.rs │ │ ├── red.rs │ │ ├── red │ │ │ ├── lattice.rs │ │ │ └── sub.rs │ │ ├── report.rs │ │ ├── resolve.rs │ │ ├── runtime.rs │ │ ├── scope.rs │ │ ├── scope_tree.rs │ │ ├── signature.rs │ │ ├── statements.rs │ │ ├── stream.rs │ │ ├── subst_impls.rs │ │ ├── subtype.rs │ │ ├── subtype │ │ │ ├── is_future.rs │ │ │ ├── is_numeric.rs │ │ │ ├── perms.rs │ │ │ ├── relate_infer_bounds.rs │ │ │ └── terms.rs │ │ ├── temporaries.rs │ │ ├── to_red.rs │ │ ├── types.rs │ │ └── universe.rs │ │ ├── ir.rs │ │ ├── ir │ │ ├── binder.rs │ │ ├── classes.rs │ │ ├── exprs.rs │ │ ├── functions.rs │ │ ├── generics.rs │ │ ├── indices.rs │ │ ├── module.rs │ │ ├── populate.rs │ │ ├── primitive.rs │ │ ├── subst.rs │ │ ├── types.rs │ │ └── variables.rs │ │ ├── lib.rs │ │ └── well_known.rs ├── dada-lang │ ├── Cargo.toml │ ├── docs │ │ └── overview.md │ └── src │ │ ├── lib.rs │ │ ├── main_lib.rs │ │ └── main_lib │ │ ├── compile.rs │ │ ├── run.rs │ │ ├── test.rs │ │ └── test │ │ ├── expected.rs │ │ ├── panic_hook.rs │ │ └── timeout_warning.rs ├── dada-lsp-server │ ├── Cargo.toml │ └── src │ │ ├── lsp.rs │ │ ├── lsp │ │ └── dispatch.rs │ │ └── main.rs ├── dada-parser │ ├── Cargo.toml │ └── src │ │ ├── classes.rs │ │ ├── expr.rs │ │ ├── functions.rs │ │ ├── generics.rs │ │ ├── lib.rs │ │ ├── miscellaneous.rs │ │ ├── module_body.rs │ │ ├── prelude.rs │ │ ├── square_bracket_args.rs │ │ ├── tokenizer.rs │ │ └── types.rs ├── dada-probe │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── dada-util-procmacro │ ├── Cargo.toml │ └── src │ │ ├── boxed_async_fn.rs │ │ ├── boxed_async_fn │ │ └── parse.rs │ │ ├── lib.rs │ │ └── salsa_serialize.rs ├── dada-util │ ├── Cargo.toml │ └── src │ │ ├── arena.rs │ │ ├── fixed_depth_json.rs │ │ ├── fixed_depth_json_README.md │ │ ├── lib.rs │ │ ├── log.rs │ │ ├── typedvec.rs │ │ ├── vecext.rs │ │ └── vecset.rs ├── vscode │ ├── .github │ │ └── workflows │ │ │ └── build-extension.yml │ ├── .gitignore │ ├── .vscodeignore │ ├── README.md │ ├── bin │ │ └── README.md │ ├── images │ │ └── icon.svg │ ├── language-configuration.json │ ├── package-lock.json │ ├── package.json │ ├── scripts │ │ └── package-server.js │ ├── src │ │ ├── extension.ts │ │ └── utils │ │ │ └── serverUtils.ts │ ├── syntaxes │ │ └── dada.tmLanguage.json │ └── tsconfig.json └── xtask │ ├── Cargo.toml │ └── src │ ├── build.rs │ ├── deploy.rs │ └── main.rs ├── dada.code-workspace ├── justfile ├── libdada └── prelude.dada ├── package-lock.json ├── rust-toolchain ├── src ├── lib.rs └── main.rs ├── tests ├── .gitignore ├── class_inputs.dada ├── default_perms │ ├── class_field_class_ty.dada │ ├── class_field_primitive_ty.dada │ ├── class_field_struct_ty.dada │ ├── class_method_class_ty.dada │ ├── class_method_self_ty.dada │ ├── class_method_struct_ty.dada │ └── struct_method_self_ty.dada ├── harness.rs ├── hello_world.dada ├── parser │ ├── class_body_ill_formed.dada │ ├── class_body_two_fields.dada │ ├── fn_body.dada │ ├── fn_named_class.dada │ ├── operator_precedence.dada │ ├── operator_precedence.fn_asts.ref │ ├── pair.dada │ └── perm_ref.dada ├── spikes │ ├── bank_account.dada │ ├── class_arguments.dada │ └── leased_method.dada ├── symbols │ ├── bad_local_variable_in_perm_ref.dada │ └── bad_name.dada └── type_check │ ├── infer_add_u32s.dada │ ├── infer_conflicting_bounds.dada │ ├── infer_ref_string.dada │ ├── infer_var_u32.dada │ ├── predicate_mut_string.dada │ ├── predicate_my_string.dada │ ├── predicate_our_string.dada │ ├── predicate_ref_string.dada │ ├── predicate_u32.dada │ └── predicate_via_spec.dada └── walkthrough.md /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [alias] 2 | xtask = "run -q --package xtask --" 3 | dada = "run -q --package dada --" -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use the Debian base image 2 | FROM mcr.microsoft.com/devcontainers/base:bookworm 3 | 4 | # Install Node.js 5 | RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - \ 6 | && apt-get install -y nodejs 7 | 8 | # Install additional tools or dependencies if needed 9 | RUN npm install -g yo generator-code 10 | 11 | # Install OpenSSH client 12 | RUN apt-get update && apt-get install -y openssh-client 13 | 14 | USER vscode 15 | 16 | # Install Rust and the wasm32-wasip1-threads target 17 | RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y 18 | ENV PATH="/home/vscode/.cargo/bin:${PATH}" 19 | RUN rustup install nightly-2025-03-01 20 | RUN rustup target add wasm32-wasip1-threads 21 | 22 | 23 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/rust 3 | { 4 | "name": "Rust", 5 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile 6 | "image": "mcr.microsoft.com/devcontainers/rust:1-1-bookworm", 7 | "features": { 8 | "devwasm.azurecr.io/dev-wasm/dev-wasm-feature/rust-wasi:0": {} 9 | } 10 | 11 | // Use 'mounts' to make the cargo cache persistent in a Docker Volume. 12 | // "mounts": [ 13 | // { 14 | // "source": "devcontainer-cargo-cache-${devcontainerId}", 15 | // "target": "/usr/local/cargo", 16 | // "type": "volume" 17 | // } 18 | // ] 19 | 20 | // Features to add to the dev container. More info: https://containers.dev/features. 21 | // "features": {}, 22 | 23 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 24 | // "forwardPorts": [], 25 | 26 | // Use 'postCreateCommand' to run commands after the container is created. 27 | // "postCreateCommand": "rustc --version", 28 | 29 | // Configure tool-specific properties. 30 | // "customizations": {}, 31 | 32 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 33 | // "remoteUser": "root" 34 | } 35 | -------------------------------------------------------------------------------- /.github/bors.toml: -------------------------------------------------------------------------------- 1 | status = [ 2 | "Check", 3 | "Test", 4 | "Rustfmt", 5 | "Visual Studio Code", 6 | "Clippy", 7 | "Deploy", 8 | "Typos" 9 | ] 10 | delete_merged_branches = true -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - staging 8 | - trying 9 | pull_request: 10 | 11 | jobs: 12 | check: 13 | name: Check 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v3 17 | - uses: dtolnay/rust-toolchain@nightly 18 | - uses: Swatinem/rust-cache@v2 19 | - run: cargo check 20 | 21 | test: 22 | name: Test 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v3 26 | - uses: dtolnay/rust-toolchain@nightly 27 | - uses: Swatinem/rust-cache@v2 28 | - run: cargo test 29 | 30 | deploy: 31 | name: Deploy 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@v3 35 | - uses: dtolnay/rust-toolchain@nightly 36 | - uses: Swatinem/rust-cache@v2 37 | - uses: actions/setup-node@v3 38 | with: 39 | node-version-file: '.nvmrc' 40 | - run: cargo xtask deploy --check 41 | 42 | rustfmt: 43 | name: Rustfmt 44 | runs-on: ubuntu-latest 45 | steps: 46 | - uses: actions/checkout@v3 47 | - uses: dtolnay/rust-toolchain@nightly 48 | with: 49 | components: rustfmt 50 | - uses: Swatinem/rust-cache@v2 51 | - run: cargo fmt --check 52 | 53 | clippy: 54 | name: Clippy 55 | continue-on-error: true 56 | runs-on: ubuntu-latest 57 | steps: 58 | - uses: actions/checkout@v3 59 | - uses: dtolnay/rust-toolchain@nightly 60 | with: 61 | components: clippy 62 | - uses: Swatinem/rust-cache@v2 63 | - run: cargo clippy -- -Dwarnings 64 | 65 | typos: 66 | name: Typos 67 | runs-on: ubuntu-latest 68 | steps: 69 | - uses: actions/checkout@v3 70 | - uses: crate-ci/typos@v1.0.4 71 | with: 72 | config: ./.typos.toml 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .vscode 3 | playground 4 | dada_debug 5 | 6 | # Internal IR files used for debugging 7 | syntax.debug 8 | validated.debug 9 | bir.debug 10 | lsp.debug 11 | dada_debug.* -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | v16.8.0 2 | 3 | -------------------------------------------------------------------------------- /.typos.toml: -------------------------------------------------------------------------------- 1 | [files] 2 | extend-exclude = ["components/dada-web/ace/", "book/src/css/speech-bubbles.css", "book/package-lock.json", "editors/code/package-lock.json"] 3 | [default.extend-words] 4 | fo = "fo" 5 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | 8 | { 9 | "type": "lldb", 10 | "request": "launch", 11 | "name": "Debug executable 'dada' on a test", 12 | "cargo": { 13 | "args": [ 14 | "build", 15 | "--bin=dada", 16 | "--package=dada" 17 | ], 18 | "filter": { 19 | "name": "dada", 20 | "kind": "bin" 21 | } 22 | }, 23 | "args": [ 24 | "compile", 25 | "${input:enterProgram}" 26 | ], 27 | "cwd": "${workspaceFolder}" 28 | }, 29 | { 30 | // Used for testing the extension with a local build of the LSP server. 31 | "name": "Run Extension (Debug Build)", 32 | "type": "extensionHost", 33 | "request": "launch", 34 | "runtimeExecutable": "${execPath}", 35 | "args": [ 36 | "--disable-extensions", 37 | "--extensionDevelopmentPath=${workspaceFolder}/editors/code" 38 | ], 39 | "outFiles": [ 40 | "${workspaceFolder}/editors/code/out/**/*.js" 41 | ], 42 | "preLaunchTask": "Build Dada and Extension", 43 | "skipFiles": [ 44 | "/**/*.js" 45 | ], 46 | "env": { 47 | "__DADA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/dada" 48 | } 49 | }, 50 | ], 51 | "inputs": [ 52 | { 53 | "id": "enterProgram", 54 | "type": "promptString", 55 | "description": "Program to compile", 56 | "default": "tests/" 57 | } 58 | ] 59 | } -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Dada is pretty young and informal, but we welcome contributions! 2 | 3 | You can read more about contributing here: 4 | 5 | https://dada-lang.org/docs/contributing/guidelines 6 | 7 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [".", "components/dada-*", "components/xtask"] 3 | default-members = [ # Exclude xtask so it isn't installed with `cargo install` 4 | ".", 5 | "components/dada-*", 6 | ] 7 | 8 | [workspace.package] 9 | version = "0.1.0" 10 | repository = "https://github.com/dada-lang" 11 | edition = "2024" 12 | 13 | [workspace.dependencies] 14 | salsa = { git = "https://github.com/salsa-rs/salsa.git" } 15 | anyhow = "1.0.93" 16 | url = "2.5.3" 17 | annotate-snippets = "0.11.4" 18 | wasm-encoder = "0.220.0" 19 | 20 | [workspace.lints.clippy] 21 | needless_lifetimes = "allow" 22 | 23 | [package] 24 | name = "dada" 25 | version.workspace = true 26 | edition.workspace = true 27 | repository.workspace = true 28 | default-run = "dada" 29 | 30 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 31 | 32 | [[test]] 33 | name = "harness" 34 | harness = false 35 | 36 | [dependencies] 37 | dada-lang = { path = "components/dada-lang" } 38 | dada-util = { version = "0.1.0", path = "components/dada-util" } 39 | structopt = "0.3.26" 40 | thiserror = "1.0.63" 41 | tokio = { version = "1.38.0", features = ["macros","rt"] } 42 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 dada-lang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dada 2 | 3 | 4 | Tzara 5 | 6 | > I speak only of myself since I do not wish to convince, I have no right to drag others into my river, I oblige no one to follow me and everybody practices his art in his own way. 7 | > 8 | > *Tristan Tzara, "Dada Manifesto 1918”* 9 | 10 | ## What the heck is Dada? 11 | 12 | Dada is a thought experiment. What if we were making a language like Rust, but one that was meant to feel more like Java or JavaScript, and less like C++? One that didn't aspire to being used in kernels or tiny embedded devices and was willing to require a minimal runtime. What might that look like? 13 | 14 | ## To try it yourself... 15 | 16 | As of right now, Dada doesn't really exist. Check back soon. 17 | 18 | ## License 19 | 20 | Licensed under either of [Apache License, Version 2.0][apache] or [MIT license][mit] at your option. 21 | 22 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this repository by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 23 | 24 | [apache]: LICENSE-APACHE 25 | [mit]: LICENSE-MIT 26 | -------------------------------------------------------------------------------- /book/.gitignore: -------------------------------------------------------------------------------- 1 | # Dependencies 2 | /node_modules 3 | 4 | # Production 5 | /build 6 | /book 7 | 8 | # Generated files 9 | .docusaurus 10 | .cache-loader 11 | 12 | # Misc 13 | .DS_Store 14 | .env.local 15 | .env.development.local 16 | .env.test.local 17 | .env.production.local 18 | 19 | npm-debug.log* 20 | yarn-debug.log* 21 | yarn-error.log* 22 | -------------------------------------------------------------------------------- /book/.prettierignore: -------------------------------------------------------------------------------- 1 | /node_modules 2 | /.docusaurus 3 | 4 | /build 5 | 6 | /blog 7 | /docs 8 | /static -------------------------------------------------------------------------------- /book/.prettierrc.jsonc: -------------------------------------------------------------------------------- 1 | // empty file to prevent editor settings from taking effect 2 | // and esure we use prettier default settings 3 | {} 4 | -------------------------------------------------------------------------------- /book/README.md: -------------------------------------------------------------------------------- 1 | # Website 2 | 3 | This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. 4 | 5 | ### Installation 6 | 7 | ``` 8 | $ yarn 9 | ``` 10 | 11 | ### Local Development 12 | 13 | ``` 14 | $ yarn start 15 | ``` 16 | 17 | This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. 18 | 19 | ### Build 20 | 21 | ``` 22 | $ yarn build 23 | ``` 24 | 25 | This command generates static content into the `build` directory and can be served using any static contents hosting service. 26 | 27 | ### Deployment 28 | 29 | Using SSH: 30 | 31 | ``` 32 | $ USE_SSH=true yarn deploy 33 | ``` 34 | 35 | Not using SSH: 36 | 37 | ``` 38 | $ GIT_USER= yarn deploy 39 | ``` 40 | 41 | If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. 42 | -------------------------------------------------------------------------------- /book/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [require.resolve("@docusaurus/core/lib/babel/preset")], 3 | }; 4 | -------------------------------------------------------------------------------- /book/blog/authors.yml: -------------------------------------------------------------------------------- 1 | nikomatsakis: 2 | name: Nicholas Matsakis 3 | title: Chief Dadaist 4 | url: https://github.com/nikomatsakis 5 | image_url: https://github.com/nikomatsakis.png 6 | -------------------------------------------------------------------------------- /book/docs/about.md: -------------------------------------------------------------------------------- 1 | # Welcome 2 | 3 | > I speak only of myself since I do not wish to convince, I have no right to drag others into my river, I oblige no one to follow me and everybody practices their art their own way. 4 | > 5 | > Tristan Tzara, "Dada Manifesto 1918”[^1] 6 | 7 | ## What the heck is Dada? 8 | 9 | Dada is a thought experiment. What if we were making a language like Rust, but one that was meant to feel more like Java or JavaScript, and less like C++? One that didn't aspire to being used in kernels or tiny embedded devices and was willing to require a minimal runtime. What might that look like? 10 | 11 | ## What is the state of Dada? 12 | 13 | Active hacking. Trying to get something up and going by the Rust All Hands. 14 | 15 | OK, from here on out I'm going to pretend that Dada really exists in its full glory. 16 | 17 | ## Dada in a nutshell 18 | 19 | Dada is an ownership-based language that is in some ways similar to Rust: 20 | 21 | * Like Rust, Dada doesn't require a garbage collector. 22 | * Like Rust, Dada guarantees memory safety and data-race freedom. 23 | * Like Rust, Dada data structures can be allocated in the stack and use flat memory layouts. 24 | 25 | In other ways, though, Dada is very different: 26 | 27 | * Like TypeScript, Dada is a **gradually typed** language: 28 | * That means you can **start out using Dada in the interpreter, with no type annotations**, to get a feel for how it works. 29 | * Once you've gotten comfortable with it, you can **add type annotations and use the compiler for performance comparable to Rust**. 30 | * Dada **targets WebAssembly** first and foremost: 31 | * You can build native targets with Dada, but its FFI system is based on [WebAssembly interface types](https://hacks.mozilla.org/2019/08/webassembly-interface-types/). 32 | * Dada is **object-oriented**, though not in a purist way: 33 | * Dada combines OO with nice features like pattern matching, taking inspiration from languages like Scala. 34 | 35 | Dada also has some limitations compared to Rust: 36 | 37 | * Dada has a required runtime and does not target "bare metal systems" or kernels. 38 | * Dada does not support inline assembly or arbitrary unsafe code. 39 | 40 | ## Curious to learn more? 41 | 42 | Read our [FAQ](/docs/about/faq/) or one of our [tutorials](/docs/tutorials/). 43 | 44 | ## Footnotes 45 | 46 | [^1]: Updated to use modern pronouns. 47 | -------------------------------------------------------------------------------- /book/docs/about/faq.md: -------------------------------------------------------------------------------- 1 | # FAQ 2 | 3 | ## Isn't Dada competing with Rust? 4 | 5 | I don't think of it that way. To me, the goal of Dada is to explore some ideas without worrying about backwards compatibility with Rust for a time. I figure that this leads us to one of two places: 6 | 7 | * Dada informs new Rust features and designs, and remains an experiment. There are tons of ideas in Dada that could apply to Rust, although in some cases we'd have to think about how to adapt them. 8 | * Dada becomes a successful language in its own right, one that interoperates smoothly and well with Rust. 9 | 10 | ## Why work on Dada? 11 | 12 | Working on Dada is really fun and, frankly, kind of relaxing for me. It's also a way to explore different language ideas unfettered by constraints of backwards compatibility. It is my hope that some of the ideas in Dada can make their way back to Rust. --nikomatsakis -------------------------------------------------------------------------------- /book/docs/acknowledgments.md: -------------------------------------------------------------------------------- 1 | # Acknowledgments 2 | 3 | Dada is designed and implemented by the [dadaists](https://github.com/orgs/dada-lang/teams/dadaists). [nikomatsakis] is the Chief Dadaist. 4 | 5 | Dada started as a "fork" of the ideas in [Lark], which was a joint project by [nikomatsakis], [wycats], and [JT]. 6 | 7 | [Ralf Jung]'s work on [stacked borrows] is a key ingredient for Dada's operational semantics and overall approach. 8 | 9 | I found [Felienne Herman]'s book [The Programmer's Brain] quite intriguing, and I've been leaning on the framework it suggests as I iterate and thinking about the experience of learning Dada. I feel confident it is showing up in here. 10 | 11 | I also want to highlight [Lionel Parreaux]'s work on [Seagl]. It has a lot of similarities to Dada's leases and Polonius's origins. Parreaux described it to me many years ago and I didn't fully appreciate the power of this direction at the time. Cool stuff. 12 | 13 | --nikomatsakis 14 | 15 | [ic]: https://www.artspace.com/jenny_holzer/all-things-are-delicately-interconnected 16 | [Felienne Herman]: https://twitter.com/Felienne/ 17 | [The Programmer's Brain]: https://www.manning.com/books/the-programmers-brain 18 | [Ralf Jung]: https://www.ralfj.de/research/ 19 | [stacked borrows]: https://plv.mpi-sws.org/rustbelt/stacked-borrows/ 20 | [Seagl]: https://www.dropbox.com/s/be1u4xp1t2h0uxa/Seagl_Report.pdf 21 | [Lionel Parreaux]: https://twitter.com/lparreaux?lang=en 22 | [nikomatsakis]: https://github.com/nikomatsakis/ 23 | [JT]: https://twitter.com/jntrnr/ 24 | [wycats]: https://twitter.com/wycats/ 25 | [Lark]: https://github.com/lark-exploration/lark 26 | 27 | -------------------------------------------------------------------------------- /book/docs/build_and_run.md: -------------------------------------------------------------------------------- 1 | # Building Dada and running tests 2 | 3 | If you're interested in contributing to Dada development, the first thing you will want to do is build and run tests. Here's a quick guide to how it works. Note that Dada is implemented in Rust, so you have to [install Rust](https://doc.rust-lang.org/cargo/getting-started/installation.html) first. 4 | 5 | ## Build and run Dada 6 | 7 | Building Dada is easy. Simply clone the repository and type: 8 | 9 | ``` 10 | > cargo build 11 | ``` 12 | 13 | Once it is built, you can run it by doing 14 | 15 | ``` 16 | > cargo dada --help 17 | ``` 18 | 19 | `dada` is a [cargo alias] for `cargo run`; it turns off some of cargo's output about building things and so forth. If you prefer, you can do 20 | 21 | [cargo alias]: https://doc.rust-lang.org/cargo/reference/config.html#alias 22 | 23 | ``` 24 | > cargo run -- --help 25 | ``` 26 | 27 | ## Running tests 28 | 29 | Like any cargo package, Dada's test suite can be run with `cargo test`. You may also find it convenient to run the Dada test runner alone via... 30 | 31 | ``` 32 | > cargo dada test 33 | ``` 34 | 35 | ...as this allows you to pass more options. Read the test runner documentation for more details. 36 | 37 | ## Checking a particular file for compilation errors 38 | 39 | You can check a particular file for compilation errors by using 40 | 41 | ``` 42 | > cargo dada check path/to/file.dada 43 | ``` 44 | 45 | There are other options too, e.g. for dumping out the IR in various stages, check out 46 | 47 | ``` 48 | > cargo dada check --help 49 | ``` 50 | 51 | to see the list. 52 | 53 | ## Logs and debugging 54 | 55 | If you are debugging Dada, you will probably want to see the logs. You can configure them using the `--log` parameter. Dada uses [tracing] so it takes the usual configuration options. 56 | 57 | [tracing]: https://docs.rs/tracing/latest/tracing/ 58 | 59 | For example, this will dump out *all* debug logs: 60 | 61 | ``` 62 | > cargo dada --log debug check dada_tests/hello_world.dada 63 | ``` 64 | 65 | Or you can dump the logs from a particular crate or module: 66 | 67 | ``` 68 | > cargo dada --log dada_brew check dada_tests/hello_world.dada 69 | ``` 70 | 71 | -------------------------------------------------------------------------------- /book/docs/caveat.md: -------------------------------------------------------------------------------- 1 | > ⚠️ **DADA DOESN'T REALLY EXIST.** ⚠️ 2 | > 3 | > See the [About page](/docs/about) for more information. Also, you have to pretend that all the code examples are editable and runnable, with live IDE tooltips and so forth. =) 4 | -------------------------------------------------------------------------------- /book/docs/contributing.mdx: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Dada doesn't exist... but we're the folks making it happen! 4 | 5 | If you'd like to help build Dada, this section is for you! -------------------------------------------------------------------------------- /book/docs/contributing/build.mdx: -------------------------------------------------------------------------------- 1 | # How to build Dada 2 | 3 | ## Building Dada as a standalone executable 4 | 5 | You can build Dada just like any other Rust project: 6 | 7 | * Install Rust via [rustup](https://rustup.rs/) (note that Dada uses the nightly toolchain) 8 | * Checkout dada from github 9 | * Run tests `cargo test` 10 | 11 | You can also use the `cargo dada` subcommand: 12 | 13 | * Run tests: `cargo dada test` or `cargo dada test --bless` 14 | * Run an individual file: `cargo dada run file.rs` 15 | 16 | ## Building the website 17 | 18 | The Dada website is a docusaurus project. 19 | 20 | ### Installation 21 | 22 | * Install node.js, we recommend [nvm](https://github.com/nvm-sh/nvm) 23 | * You want the version of node found in `.nvmrc` 24 | * With nvm, just use `nvm install` to get the correct version of node 25 | * Run `cargo xtask deploy` 26 | * This will compile Dada with webassembly and build a static copy of the site 27 | * To test the site locally: 28 | * You must first run `cargo xtask deploy` to build the webassembly package 29 | * You can then `cd book` and run `npm start` -------------------------------------------------------------------------------- /book/docs/contributing/guidelines.mdx: -------------------------------------------------------------------------------- 1 | # Contribution guidelines 2 | 3 | Dada is pretty young and informal, but we welcome contributions! 4 | 5 | Keep in mind that Dada is my "spare time, relaxation and exploration" project. Responses may be slow and I am driving the language direction, syntax, and semantics. Naturally I would absolutely love to hear what people think, but I am acting as Chief Dadaist and final decision maker for the time being. --nikomatsakis 6 | 7 | ## Code of conduct 8 | 9 | All participation in the dada-lang repository is subject to the [code of conduct](./coc). 10 | 11 | Note that this governs not only things like sexist behavior but also the responsibility to demonstrate empathy, respect other opinions, and both give and accept (constructive) feedback. 12 | 13 | ## Feature gates 14 | 15 | Dada uses nightly, but we are careful with the feature gates we use. We only want to use features 16 | that are close to stabilizing. Currently accepted feature gates: 17 | 18 | ```rust 19 | #![feature(trait_upcasting)] 20 | #![feature(try_blocks)] 21 | ``` 22 | 23 | ## Finding an issue 24 | 25 | Issues on the repository that are tagged with "good first issue' and the like are a good bet. 26 | 27 | ## Review policy 28 | 29 | Anyone is welcome to review using the github review tool. Best practice is to get PRs reviewed by others. 30 | 31 | **To merge a PR, do NOT use the merge button.** Instead use `bors r+`. Bors only accepts commands from the dadaists team (see below). 32 | 33 | Naturally one should only approve PRs if you have thorough knowledge of the relevant code and are confident the code is correct. 34 | 35 | **Furthermore, anything which changes the syntax/semantics of Dada in some notable way needs approval from the BDFL.** 36 | 37 | ## The dadaists team 38 | 39 | The dadaists team consists of recognized contributors. For the time being, since we are so young, anyone with a merged PR will be offered membership, but you should only accept if you are expecting to continue contributing. As the project grows, we will revisit the criteria for membership. 40 | 41 | Membership in the dadaists team gives the ability to approve PRs, adjust labels, and so forth. 42 | 43 | Members of the Dadaists team are expected to not only obey the CoC but to exemplify its principles, particularly demonstrating empathy and kindness to all (including those who may be violating the CoC, or bordering on it). Note that demonstrating kindness does not mean people can do whatever they want. 44 | 45 | ## BDFL and decision making 46 | 47 | Decision making strives to be by consensus. Please voice your opinion, keeping in mind the [code-of-conduct]. 48 | 49 | [code-of-conduct]: ./coc 50 | 51 | For the time being, nikomatsakis is BDFL and has final say whenever anything gets controversial. 52 | -------------------------------------------------------------------------------- /book/docs/design_docs.mdx: -------------------------------------------------------------------------------- 1 | import Caveat from './caveat.md' 2 | 3 | # Dada Design Docs 4 | 5 | This section of the site contains design docs and reference material. 6 | 7 | There is an experimental, and rather dated, operational semantics and type system at the [dada-lang/dada-model](https://github.com/dada-lang/dada-model/) repository. 8 | 9 | -------------------------------------------------------------------------------- /book/docs/design_docs/calculus.md: -------------------------------------------------------------------------------- 1 | # Dada: the calculus 2 | 3 | ## Repository 4 | 5 | You will find the Dada model in the [dada-lang/dada-model] repository. The model is implemented in [PLT Redex], which is a fantastic tool for exploring type systems and operational semantics. This section is meant to explain how it works, but I've not written it yet! 6 | 7 | [Dada model]: https://github.com/dada-lang/dada-model/ 8 | [PLT Redex]: https://redex.racket-lang.org/why-redex.html 9 | 10 | ## Current status 11 | 12 | * Working on the operational semantics, which I believe to be "getting close". 13 | * The static type system is on hold while those are finalized. 14 | * There is a complete (but no doubt unsound) working type system model, but it targets an older version of Dada. -------------------------------------------------------------------------------- /book/docs/design_docs/why_not_explicit_end_for_lease.md: -------------------------------------------------------------------------------- 1 | # Why not have an explicit end for lease? 2 | 3 | For a while I attempted to have an **explicit end** for every lease 4 | rather than adopting the "stacked borrows"-like approach in which 5 | leases are ended by their owner taking contrary action. 6 | 7 | In many ways, explicit ends of leases is a cleaner approach. 8 | The difference is where the error occurs: 9 | 10 | ``` 11 | class Counter(counter) 12 | 13 | let x = Counter(0) 14 | 15 | // Create a lease of x 16 | let y = x.lease 17 | 18 | // In the "explicit end" version, this is an error. 19 | // In the "permissive" variant, it cancels the lease. 20 | x.counter = 1 21 | 22 | // In the "permissive" variant, error occurs here. 23 | print(y.counter) 24 | ``` 25 | 26 | It's hard to say which of these statements is wrong. 27 | Deferring the error until something has definitively gone wrong 28 | is more permissive and hence the required approach for Rust's unsafe code 29 | (which wants to be as backwards compatible with common practice as possible). 30 | Dada's untyped mode is analogous to unsafe code, so that's plausibly a good choice here, 31 | but I wanted to try the alternative. 32 | 33 | ## Where it goes wrong 34 | 35 | To make things usable, you don't want to _EXPLICITLY_ end leases, 36 | so we want to have some kind of _drop_ that is auto-inserted. 37 | I imagined we would do this based on liveness information. 38 | But that has a flaw: when you compute liveness, you see direct 39 | uses of a variable, but not indirect. Consider this (correct) code: 40 | 41 | ``` 42 | class Counter(counter) 43 | let x = Counter(0) 44 | let y = x.lease // <-- last (direct) use of `x` 45 | print(y.counter) 46 | ``` 47 | 48 | Liveness-based analysis would drop `x` immediately after the lease, 49 | since it has no more direct uses. But that's not what we want. 50 | 51 | We could in principle say that when something is dropped, 52 | it remains in scope until the leases end, 53 | but that's basically GC. 54 | 55 | ## Doing better requires type data 56 | 57 | We could do better if we took types into account, but gradual typing implies they may not be available. 58 | Besides, that's getting pretty complex. 59 | 60 | ## Implications for Rust 61 | 62 | I would like to have liveness-based drops for Rust, but this reveals the (obvious in hindsight) flaw: 63 | we never know when raw pointers are out there. So unless we know that, or declare it UB in some way, 64 | we can't promote drops earlier. 65 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/any.md: -------------------------------------------------------------------------------- 1 | # The `any` permission 2 | 3 | import Caveat from '../caveat.md' 4 | 5 | 6 | 7 | Rather than labeling variables as `my` or `our`, you can also use the `any` keyword. This will permit the variable to store an object with any permission. When using `any`, the `give` and `share` keywords allow you to control the ownership: 8 | 9 | ``` 10 | class Point(x: our, y: our) 11 | 12 | # The point is `my` when first created 13 | let my_p: any = Point(22, 44) 14 | 15 | # You can `give` it to another variable 16 | let my_p_now: any = my_p.give 17 | 18 | # You can `share` it 19 | let our_p: any = my_p_now.ref 20 | 21 | # Giving a shared thing is a copy 22 | let also_our_p: any = our_p.give 23 | 24 | # So is sharing 25 | let and_our_p_too: any = our_p.ref 26 | ``` 27 | 28 | ## Using `any` to operate on multiple permissions with one function 29 | 30 | The `any` permission is useful if you want to have functions that operate over multiple permissions. Consider the function `give_a`: 31 | 32 | ``` 33 | class Pair(a: my, b: my) 34 | 35 | fn give_a(pair: any) -> { 36 | pair.a.give 37 | } 38 | ``` 39 | 40 | If `give_a` is called on a `my` object, it will return a `my` object, as shown here: 41 | 42 | ``` 43 | # class Pair(a: my, b: my) 44 | # 45 | # fn give_a(pair: any) -> { 46 | # pair.a.give 47 | # } 48 | 49 | class Widget(name: our) 50 | let my_pair: my = Pair(Widget("a"), Widget("b")) 51 | let my_widget: my = give_a(my_pair) 52 | print(my_widget).await # Prints 'Widget("a")' 53 | print(my_pair).await # Error, my_pair has been given away 54 | ``` 55 | 56 | But if `give_a` is called on an `our` object, it will return an `our` object: 57 | 58 | ``` 59 | # class Pair(a: my, b: my) 60 | # 61 | # fn give_a(pair: any) -> { 62 | # pair.a.give 63 | # } 64 | 65 | class Widget(name: our) 66 | let our_pair: our = Pair(Widget("a"), Widget("b")) 67 | let our_widget: our = give_a(our_pair) 68 | print(our_widget).await # Prints 'Widget("a")' 69 | print(our_pair).await # Prints 'Pair(Widget("a"), Widget("b"))' 70 | ``` 71 | 72 | ## A hint of what's to come: generic functions 73 | 74 | In Typed Dada, `any` functions become a shorthand for generic functions. 75 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/class.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 2 3 | --- 4 | 5 | # Declaring the `Point` class 6 | 7 | import Caveat from '../caveat.md' 8 | 9 | 10 | 11 | The main data structures in Dada are classes. The full class syntax has various bells and whistles, but let's start off with the simplest form. We'll define a class `Point` for storing `(x, y)` values. It will have two fields, `x` and `y`: 12 | 13 | ``` 14 | class Point(x, y) 15 | # ^^^^^ ^ ^ 16 | # | | | 17 | # | | Field name 18 | # | Field name 19 | # Class name 20 | ``` 21 | 22 | ## Constructor functions 23 | 24 | The `class Point(..)` syntax also creates a constructor function that creates an instance of `Point` when called. To get a feel for how classes work in practice, work with the following code. Feel free to make changes and see what happens! You'll also notice that when you move your cursor, the code executes up until the line you selected. 25 | 26 | ```dada ide 27 | class Point(x, y) 28 | 29 | # This function is declared as `async` because it 30 | # awaits the result of print. 31 | async fn print_point(p) { 32 | # Note that you can use `{...}` to embed an expression 33 | # into the value of a string (in this case, the 34 | # variable `p`). 35 | print("The point is: {p}").await 36 | } 37 | 38 | # Declares a function that computes a new value. 39 | # (It doesn't await anything, so the function is not `async`.) 40 | fn compute_new_value() -> { 41 | # ^^ this `->` indicates that 42 | # the function returns a value. 43 | 33 44 | } 45 | 46 | # Writing `let p = ...` declares a new variable `p` 47 | # and assigns its initial value (`Point(22, 44)`) 48 | let p = Point(22, 44) 49 | 50 | # Invoke `print_point`; it's an `async` function, 51 | # so await the result 52 | print_point(p).await 53 | 54 | # The `=` operator is used to modify an existing field. 55 | p.x = compute_new_value() 56 | 57 | # You can also use `+=` to modify an existing field 58 | # (this time by adding to it). Other operators, like 59 | # `-=`, `*=`, `/=`, also work. 60 | p.x += 1 61 | 62 | # Print the new value. 63 | print_point(p).await 64 | ``` 65 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/field_permissions.md: -------------------------------------------------------------------------------- 1 | # Permissions on fields 2 | 3 | import Caveat from '../caveat.md' 4 | 5 | 6 | 7 | Like other variables, class fields have permissions. The `Point` class we've been working with, for example, declares its `x` and `y` fields to have `our` permission: 8 | 9 | ``` 10 | class Point(x: our, y: our) 11 | # ~~~ ~~~ 12 | ``` 13 | 14 | We could also declare fields to have `my` permission, like this `Pair` class does: 15 | 16 | ``` 17 | class Pair(a: my, b: my) 18 | ``` 19 | 20 | Because the fields on `Pair` are declared as `my`, they will take ownership of the data stored in them. You can see that creating a `Pair` moves the values into the `Pair` by exploring examples like this one: 21 | 22 | ``` 23 | class Widget() 24 | class Pair(a: my, b: my) 25 | 26 | let w_a: my = Widget() 27 | let w_b: my = Widget() 28 | let pair: my = Pair(w_a, w_b) 29 | print(pair).await # Prints `Pair(Widget(), Widget())` 30 | print(w_a).await # Error: moved! 31 | ``` 32 | 33 | Once you create a `Pair`, you can also move values out from its fields. Try moving the cursor in this example to just after the `pair.a`: 34 | 35 | ``` 36 | class Widget() 37 | class Pair(a: my, b: my) 38 | 39 | let pair: my = Pair(Widget(), Widget()) 40 | let w_a: my = pair.a 41 | # ▲ 42 | # ──────────────────┘ 43 | print(w_a).await # Prints `Widget()` 44 | 45 | # You see: 46 | # 47 | # ┌──────┐ ┌──────┐ 48 | # │ pair ├──my──►│ Pair │ 49 | # │ │ │ ──── │ 50 | # │ │ │ a │ ┌────────┐ 51 | # │ │ │ b: ├──my──►│ Widget │ 52 | # │ │ └──────┘ └────────┘ 53 | # │ │ ┌────────┐ 54 | # │ w_a ├──my──►│ Widget │ 55 | # └──────┘ └────────┘ 56 | ``` 57 | 58 | ## Inherited `our` permissions 59 | 60 | When you access a field, the permission you get is determined not only by the permission declared on the field itself but by the path you take to reach it. In particular, if you have `our` permission to an object, all of its `my` fields also become `our`, as you can see in this next. Assigning to `w_a: my` gets an error, because `pair.a` has the wrong permissions; try changing it to `w_a: our` and you will see that it works fine: 61 | 62 | ``` 63 | class Widget() 64 | class Pair(a: my, b: my) 65 | 66 | let pair: our = Pair(Widget(), Widget()) 67 | let w_a: my = pair.a # Error: `pair.a` has `our` permission 68 | print(w_a).await 69 | ``` 70 | 71 | This might seem surprising, but think about it: if you have `our` permission, then there can be other variables that have `our` permission as well, and you can't _both_ have `my` permission to the fields. Otherwise, in an example like this, both `w_a1` and `w_a2` would have `my` permission to the same `Widget`, and that can't be: 72 | 73 | ``` 74 | class Widget() 75 | class Pair(a: my, b: my) 76 | 77 | let pair1: our = Pair(Widget(), Widget()) 78 | let pair2: our = pair1 79 | let w_a1: my = pair1.a # Error: `pair.a` has `our` permission 80 | let w_a2: my = pair2.a 81 | ``` 82 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/index.md: -------------------------------------------------------------------------------- 1 | # Hello, Dada! 2 | 3 | import Caveat from '../caveat.md' 4 | 5 | 6 | 7 | You can see a classic “Hello, World” program in Dada below; it should be quite familiar. Note that this is a live-editing IDE -- try editing the program to see the new output! 8 | 9 | ```dada ide 10 | # Print a classic quote 11 | print(" 12 | I have forced myself to contradict myself 13 | in order to avoid conforming to my own taste. 14 | -- Marcel Duchamp 15 | ").await 16 | ``` 17 | 18 | Some interesting things: 19 | 20 | - Comments in Dada are written with `#`, like Python or Ruby, not `//` like JavaScript or Rust. 21 | - Dada, like JavaScript, is based exclusively on **async-await**. This means that operations that perform I/O, like `print`, don't execute immediately. Instead, they return a _thunk_, which is basically "code waiting to run" (but not running yet). The thunk doesn't execute until you _await_ it by using the `.await` operation. 22 | - Strings in Dada can spread over multiple lines. Leading and trailing whitespace is stripped by default, and we also remove any common indentation from each line. 23 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/labeled_arguments.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 3 3 | --- 4 | 5 | # Aside: Labeled arguments 6 | 7 | Before we go further with the tutorial, it's worth noting that Dada supports _labeled arguments_. That means that instead of writing `Point(22, 44)` one can also give labels to each argument, like `Point(x: 22, y: 44)`: 8 | 9 | ```dada ide 10 | class Point(x, y) 11 | 12 | let p: my = Point(x: 22, y: 44) 13 | print("The point is `{p}`").await 14 | ``` 15 | 16 | Try changing the code above to give the parameters in a different order, such as `Point(y: 44, x: 22)` -- you will see that the output doesn't change. 17 | 18 | Adding labels can help make it clearer what is going on. The rules are as follows: 19 | 20 | - You must also give the arguments in the order in which they were declared in the function, whether or not labels were provided. 21 | - Once you give a label to a parameter, you must give a label to all the remaining parameters (so you can't do `Point(x: 22, yy)` but you can do `Point(22, y: 44)`. 22 | 23 | Dada will also sometimes suggest you use labels if it thinks you might be making a mistake. For example, try this: 24 | 25 | ```dada ide 26 | class Point(x, y) 27 | 28 | async fn print_line(start, end) { 29 | print("The start is {start}").await 30 | print("The end is {end}").await 31 | } 32 | 33 | let start = Point(22, 44) 34 | let end = Point(33, 55) 35 | print_line(end, start).await 36 | # ~~~~~~~~~~ warning: are these parameters in the right order? 37 | ``` 38 | 39 | See the squiggly line? That is Dada telling us that we may have reversed the order of `end` and `start`. We can disable this warning by giving explicit labels to the arguments, making it clear that we _meant_ to switch the order: 40 | 41 | ```dada ide 42 | class Point(x, y) 43 | 44 | async fn print_line(start, end) { 45 | print("The start is {start}").await 46 | print("The end is {end}").await 47 | } 48 | 49 | let start = Point(22, 44) 50 | let end = Point(33, 55) 51 | print_line(start: end, end: start).await 52 | ``` 53 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/permissions.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 4 3 | --- 4 | 5 | # Permissions 6 | 7 | Dada hopefully feels familiar to you thus far, but if you played a lot with the programs, you may have noticed some errors you didn't expect. Consider this program...what do you expect it to print? Take a guess, and then hit the "Run" button to see what happens... 8 | 9 | ```dada ide 10 | class Point(x, y) 11 | 12 | let p = Point(22, 44) 13 | let q = p 14 | q.x = 23 15 | print(p).await 16 | ``` 17 | 18 | Surprise! It gets an error! What is going on? The answer lies in the key Dada concept of **permissions**. 19 | 20 | ## What is a permission? 21 | 22 | In Dada, variables don't just store a reference to an object, like they do in Python or Java. Instead, they store a reference to an object _with some permission_. These permissions determine whether you can read or write to the object. 23 | 24 | Permissions in Dada can be divided across two axes. We'll cover those two axes separately: 25 | 26 | - **Read** vs **write** -- covered now! 27 | - **Owned** vs **mutable** -- covered later, in the chapters on ownership 28 | 29 | ## Read permission is the default 30 | 31 | When you write something like `let q = p` in Dada, the default is that you get a **mutable, read permission**. Leasing will be covered in more detail later, but for now it suffices to say that the permission for `q` is tied to the permission from `p`; when `p` goes out of scope, for example, then `q`'s permission will also be canceled. 32 | 33 | As the name suggests, **read permissions** can only be used to read fields. This is why we get an error! 34 | 35 | Dada comes equipped with a visual debugger that lets you visualize permissions. To see how it works, try hitting the "Debug" button and then position your cursor write after the line for `let q = p`: 36 | 37 | ```dada ide 38 | class Point(x, y) 39 | 40 | let p = Point(22, 44) 41 | let q = p 42 | # ▲ 43 | # ───────┘ 44 | # put your cursor here -- you will see a diagram below 45 | # that shows that while `p` and `q` reference the same 46 | # point, `q` has read permissions (indicated with a blue 47 | # line). 48 | ``` 49 | 50 | ## Requesting write permission 51 | 52 | You can explicitly request write permission by using the `lease` keyword, like `p.lease`. If you use the debugger and position it after `let q = p.lease`, you will see that `q` is given write permission this time. As a result, `q.x = 23` succeeds and, when we print the variable `p`, we see the new value. 53 | 54 | ```dada ide 55 | class Point(x, y) 56 | 57 | let p = Point(22, 44) 58 | let q = p.lease 59 | q.x = 23 60 | print(p).await 61 | ``` 62 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/shlease.md: -------------------------------------------------------------------------------- 1 | # Shleases: Shared leases 2 | 3 | We've nearly completed our tour of Dada's permissions. It's time to visit the last square in our table: 4 | 5 | | | Unique | Shared | 6 | | ---------- | ---------------------- | -------------------- | 7 | | Owned | [`my`](./my.md) | [`our`](./our.md) | 8 | | **Mutable** | [`mutable`](./lease.md) | ⭐ **`shleased`** ⭐ | 9 | 10 | A shlease[^pronounced] is a _shared lease_ and it combines attributes of a `mutable` value and an `our` value: 11 | 12 | - Like a `mutable` permission, `shleased` permissions are _temporary_. The lessor can terminate the shlease and reclaim their full permission. 13 | - Like an `our` permission, `shleased` permissions are _shared_. They can be copied freely, and hence -- because [friends don't let friends mutate shared data](./sharing_xor_mutation.md) -- shleased objects are read-only so long as the shlease lasts. 14 | 15 | [^pronounced]: Pronounced "shlease". 16 | 17 | ## Example 18 | 19 | Let's see an example of shleases in action. We are going to create a pair (owned by `m`) and then shlease it out to a bunch of objects. All of them will be able to freely read from the pair: 20 | 21 | ``` 22 | class Pair(a: our, b: our) 23 | 24 | # `m` owns the `Pair` 25 | let m: my = Pair(22, 44) 26 | 27 | # `s1` shleases the pair from `m` 28 | let s1: shleased = m 29 | 30 | # `s2` copies the shlease 31 | let s2: shleased = m 32 | 33 | # we can now read from `m`, `s1`, and `s2` interchangeably 34 | print(m.a).await 35 | print(s1.a).await 36 | print(s2.a).await 37 | ``` 38 | 39 | ## The `shlease` keyword 40 | 41 | You can make an explicit shlease by using the `shlease` keyword: 42 | 43 | ``` 44 | class Pair(a: our, b: our) 45 | let m: my = Pair(22, 44) 46 | let s1: any = m.shlease 47 | ``` 48 | 49 | If you position your cursor after `s1`, you will see that it has `shleased` permissions. 50 | 51 | ## Canceling a shlease 52 | 53 | When you have a shlease to an object, you know that the object will not be mutated so long as your shlease remains valid. Once a lessor writes to the object, your shlease is canceled. Let's see that in action: 54 | 55 | ``` 56 | class Pair(a: our, b: our) 57 | let m: my = Pair(22, 44) 58 | let s: shleased = m 59 | 60 | # When `m` writes to `a`, that cancels the shlease 61 | m.a += 1 62 | 63 | # Accessing `s` is an error now 64 | print(s.a).await # Error! 65 | ``` 66 | 67 | ## Giving, sharing, and leasing a shleased value 68 | 69 | You can also apply the `give`, `share`, and `lease` keywords to shleased values. In all cases, they simply reproduce the `shlease` value: 70 | 71 | ``` 72 | class Pair(a: our, b: our) 73 | let m: my = Pair(22, 44) 74 | let s1: any = m.shlease 75 | let s2: any = s1.give # s2 is just a copy of s1 76 | let s3: any = s1.ref # s3 is also just a copy of s1 77 | let s4: any = s1.lease # s4 is ALSO just a copy of s1 78 | ``` 79 | -------------------------------------------------------------------------------- /book/docs/dyn_tutorial/sublease.md: -------------------------------------------------------------------------------- 1 | # Subleases 2 | 3 | When you have a mutable value, you can lease it again, creating a sublease. Here is an example where we create a lease `l1` and then a sublease `l2`. Try putting your cursor after `let l2: mutable = l1`, you will see that both `p` and `l1` are drawn with "dashes", indicating that those variables have mutable our their object to another: 4 | 5 | ``` 6 | class Point(x: our, y: our) 7 | 8 | let p: my = Point(22, 44) 9 | 10 | # `l1` is mutable from `p` 11 | let l1: mutable = p 12 | 13 | # `l2` is mutable from `l1` 14 | let l2: mutable = l1 15 | # ▲ 16 | # ─────────────────┘ 17 | 18 | # You see: 19 | # ┌────┐ 20 | # │ │ 21 | # │ p ├╌my╌╌╌╌╌╌╌╌╌╌╌╌╌╌►┌───────┐ 22 | # │ │ │ Point │ 23 | # │ l1 ├╌mutable╌╌╌╌╌╌╌╌╌╌►│ ───── │ 24 | # │ │ │ x: 22 │ 25 | # │ l2 ├─mutable──────────►│ y: 44 │ 26 | # │ │ └───────┘ 27 | # └────┘ 28 | ``` 29 | 30 | Subleases can be ended just like any other lease, except that a sublease can be terminated either by the lessor (`l1`, here) or by the original owner (`p`, here). Try inserting commands like `l1.x += 1` or `p.x += 1` and see how the diagram changes. 31 | 32 | ## Giving a mutable value 33 | 34 | When you [`give`](./my.md) a lease value, it results in a sublease. This preserves the rule for "give", that giving an object always creates a new value with equivalent permissions: a sublease permits all the same access to the object as the original lease. 35 | 36 | ``` 37 | class Point(x: our, y: our) 38 | let p: my = Point(22, 44) 39 | let l1: mutable = p 40 | let l2: any = l1.give # subleases from `l1` 41 | l2.x += 1 # modifies the `Point` 42 | ``` 43 | -------------------------------------------------------------------------------- /book/docs/reference.md: -------------------------------------------------------------------------------- 1 | # Dada Reference 2 | 3 | Reference material. Wildly incomplete. -------------------------------------------------------------------------------- /book/docs/reference/grammar.md: -------------------------------------------------------------------------------- 1 | # Grammar 2 | -------------------------------------------------------------------------------- /book/docs/reference/raw-string-literals.md: -------------------------------------------------------------------------------- 1 | # Raw string literals 2 | 3 | :::info 4 | Raw string literals are not yet implemented. See [#179](https://github.com/dada-lang/dada/issues/179) for the current status. 5 | ::: 6 | 7 | A raw string literal `r"..."` is a string literal with no escape sequences. All characters within the raw string literal are added into the final string until the terminator. Raw string literals can also include any number of `#` characters to permit embedding `"`. 8 | 9 | ## Examples 10 | 11 | ``` 12 | let x = r"fo\o{}" 13 | ``` 14 | 15 | yields the [string literal](./string-literals) `"fo\\o\{\}"`. 16 | 17 | ``` 18 | let x = r#"fo\o{}"bar"# 19 | ``` 20 | 21 | yields the [string literal](./string-literals) `"fo\\o\{\}\"bar"`. 22 | -------------------------------------------------------------------------------- /book/docs/tutorials.md: -------------------------------------------------------------------------------- 1 | import Caveat from './caveat.md' 2 | 3 | # Dada Tutorials 4 | 5 | 6 | 7 | ## Gradual ownership 8 | 9 | Dada is a **gradual, ownership-based** language. Let's unpack those two things: 10 | 11 | * **Ownership-based:** Dada leverages the concept of *ownership* to ensure that (a) your memory is freed at the right times, without any garbage collection and (b) your parallel programs are data-race free. 12 | * If you've used Rust, Dada's ownership system will be familiar, but keep in mind that there are some key differences between them. (If you've not used Rust, don't worry, we don't assume any prior knowledge in this tutorial.) 13 | * **Gradual:** Dada lets you smoothly transition from an interpreted, dynamic language (similar to Python or JavaScript) over to a statically typed, fully optimized one (similar to Rust). You can even mix code written in the two styles. 14 | 15 | ## Dynamic Dada tutorial 16 | 17 | This tutorial covers the dynamic flavor of Dada and use that to introduce the concepts of ownership and the like. Once you've gotten familiar to that, the [Typed Dada](./typed_tutorial.md) introduces Dada's type system and shows how you can use it to check that your Dada code is free of permission-related errors. [Let's get started!](./dyn_tutorial) 18 | 19 | ## Typed Dada tutorial 20 | 21 | Not yet written. -------------------------------------------------------------------------------- /book/docs/typed_tutorial.md: -------------------------------------------------------------------------------- 1 | # Tutorial: Typed Dada 2 | 3 | import Caveat from './caveat.md' 4 | 5 | 6 | 7 | This tutorial picks up[^1] where the [Dynamic Dada](/docs/dyn_tutorial) tutorial leaves off. It introduces the static type system used by the Dada compiler to help your code avoid errors related to ownership permissions. Using Typed Dada not only makes your code more secure, it also allows us to compile down to tight and efficient machine code that doesn't require any kind of permission checks. 8 | 9 | [^1]: Or it will, once it is written, lol. -------------------------------------------------------------------------------- /book/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dada", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "docusaurus": "docusaurus", 7 | "start": "docusaurus start", 8 | "build": "docusaurus build", 9 | "swizzle": "docusaurus swizzle", 10 | "deploy": "docusaurus deploy", 11 | "clear": "docusaurus clear", 12 | "serve": "docusaurus serve", 13 | "write-translations": "docusaurus write-translations", 14 | "write-heading-ids": "docusaurus write-heading-ids", 15 | "typecheck": "tsc", 16 | "format": "prettier --write .", 17 | "format:check": "prettier --check ." 18 | }, 19 | "dependencies": { 20 | "@docusaurus/core": "2.0.0-beta.18", 21 | "@docusaurus/preset-classic": "2.0.0-beta.18", 22 | "@mdx-js/react": "^1.6.22", 23 | "ace-builds": "^1.4.14", 24 | "ansi_up": "^5.1.0", 25 | "clsx": "^1.1.1", 26 | "graphviz-react": "^1.2.0", 27 | "prism-react-renderer": "^1.3.1", 28 | "react": "^17.0.2", 29 | "react-ace": "^9.5.0", 30 | "react-bootstrap": "^2.2.0", 31 | "react-dom": "^17.0.2" 32 | }, 33 | "devDependencies": { 34 | "@docusaurus/module-type-aliases": "2.0.0-beta.18", 35 | "@tsconfig/docusaurus": "^1.0.5", 36 | "prettier": "^2.7.1", 37 | "typescript": "^4.6.3" 38 | }, 39 | "browserslist": { 40 | "production": [ 41 | ">0.5%", 42 | "not dead", 43 | "not op_mini all" 44 | ], 45 | "development": [ 46 | "last 1 chrome version", 47 | "last 1 firefox version", 48 | "last 1 safari version" 49 | ] 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /book/sidebars.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Creating a sidebar enables you to: 3 | - create an ordered group of docs 4 | - render a sidebar for each doc of that group 5 | - provide next/previous navigation 6 | 7 | The sidebars can be generated from the filesystem, or explicitly defined here. 8 | 9 | Create as many sidebars as you want. 10 | */ 11 | 12 | // @ts-check 13 | 14 | /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ 15 | const sidebars = { 16 | // By default, Docusaurus generates a sidebar from the docs folder structure 17 | // tutorialSidebar: [{type: 'autogenerated', dirName: '.'}], 18 | 19 | // But you can create a sidebar manually 20 | tutorialSidebar: [ 21 | { 22 | type: "category", 23 | label: "Tutorials", 24 | link: { type: "doc", id: "tutorials" }, 25 | items: [ 26 | { 27 | type: "category", 28 | label: "Dynamic Dada", 29 | items: [ 30 | "dyn_tutorial/index", 31 | "dyn_tutorial/class", 32 | "dyn_tutorial/labeled_arguments", 33 | "dyn_tutorial/permissions", 34 | "dyn_tutorial/my", 35 | "dyn_tutorial/our", 36 | "dyn_tutorial/sharing_xor_mutation", 37 | "dyn_tutorial/field_permissions", 38 | "dyn_tutorial/any", 39 | "dyn_tutorial/lease", 40 | "dyn_tutorial/sublease", 41 | "dyn_tutorial/shlease", 42 | "dyn_tutorial/house_party", 43 | "dyn_tutorial/atomic", 44 | ], 45 | }, 46 | { 47 | type: "category", 48 | label: "Typed Dada", 49 | items: ["typed_tutorial"], 50 | }, 51 | ], 52 | }, 53 | ], 54 | 55 | designDocsSidebar: [ 56 | { 57 | type: "category", 58 | label: "Design Docs", 59 | link: { type: "doc", id: "design_docs" }, 60 | items: [ 61 | "design_docs/goals", 62 | "design_docs/experience", 63 | "design_docs/calculus", 64 | "design_docs/sketchy_ideas", 65 | "design_docs/sharing_synthesized_values", 66 | "design_docs/why_not_explicit_end_for_lease", 67 | "reference", 68 | ], 69 | }, 70 | ], 71 | 72 | referenceSidebar: [ 73 | { 74 | type: "category", 75 | label: "Reference", 76 | link: { type: "doc", id: "reference" }, 77 | items: [ 78 | { 79 | type: "autogenerated", 80 | dirName: "reference", 81 | }, 82 | ], 83 | }, 84 | ], 85 | 86 | contributingSidebar: [ 87 | { 88 | type: "category", 89 | label: "Contributing", 90 | link: { type: "doc", id: "contributing" }, 91 | items: [ 92 | "contributing/coc", 93 | "contributing/guidelines", 94 | "contributing/build", 95 | ], 96 | }, 97 | ], 98 | 99 | aboutSidebar: [ 100 | { 101 | type: "category", 102 | label: "About", 103 | link: { type: "doc", id: "about" }, 104 | items: ["about/faq"], 105 | }, 106 | ], 107 | }; 108 | 109 | module.exports = sidebars; 110 | -------------------------------------------------------------------------------- /book/src/components/HomepageFeatures/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import clsx from "clsx"; 3 | import styles from "./styles.module.css"; 4 | 5 | type FeatureItem = { 6 | title: string; 7 | Svg: React.ComponentType>; 8 | description: JSX.Element; 9 | }; 10 | 11 | const FeatureList: FeatureItem[] = [ 12 | { 13 | title: "Easy to Use", 14 | Svg: require("@site/static/img/undraw_docusaurus_mountain.svg").default, 15 | description: ( 16 | <> 17 | Docusaurus was designed from the ground up to be easily installed and 18 | used to get your website up and running quickly. 19 | 20 | ), 21 | }, 22 | { 23 | title: "Focus on What Matters", 24 | Svg: require("@site/static/img/undraw_docusaurus_tree.svg").default, 25 | description: ( 26 | <> 27 | Docusaurus lets you focus on your docs, and we'll do the chores. Go 28 | ahead and move your docs into the docs directory. 29 | 30 | ), 31 | }, 32 | { 33 | title: "Powered by React", 34 | Svg: require("@site/static/img/undraw_docusaurus_react.svg").default, 35 | description: ( 36 | <> 37 | Extend or customize your website layout by reusing React. Docusaurus can 38 | be extended while reusing the same header and footer. 39 | 40 | ), 41 | }, 42 | ]; 43 | 44 | function Feature({ title, Svg, description }: FeatureItem) { 45 | return ( 46 |
47 |
48 | 49 |
50 |
51 |

{title}

52 |

{description}

53 |
54 |
55 | ); 56 | } 57 | 58 | export default function HomepageFeatures(): JSX.Element { 59 | return ( 60 |
61 |
62 |
63 | {FeatureList.map((props, idx) => ( 64 | 65 | ))} 66 |
67 |
68 |
69 | ); 70 | } 71 | -------------------------------------------------------------------------------- /book/src/components/HomepageFeatures/styles.module.css: -------------------------------------------------------------------------------- 1 | .features { 2 | display: flex; 3 | align-items: center; 4 | padding: 2rem 0; 5 | width: 100%; 6 | } 7 | 8 | .featureSvg { 9 | height: 200px; 10 | width: 200px; 11 | } 12 | -------------------------------------------------------------------------------- /book/src/css/custom.css: -------------------------------------------------------------------------------- 1 | /** 2 | * Any CSS included here will be global. The classic template 3 | * bundles Infima by default. Infima is a CSS framework designed to 4 | * work well for content-centric websites. 5 | */ 6 | 7 | /* You can override the default Infima variables here. */ 8 | :root { 9 | --ifm-color-primary: #cdc2eb; 10 | --ifm-color-primary-dark: #9575d5; 11 | --ifm-color-primary-darker: #6f3dd3; 12 | /* fixme */ 13 | --ifm-color-primary-darkest: #4a20e1; 14 | /* fixme */ 15 | --ifm-color-primary-light: #beb3de; 16 | --ifm-color-primary-lighter: #cbc3e1; 17 | --ifm-color-primary-lightest: #dad6e3; 18 | --ifm-code-font-size: 95%; 19 | } 20 | 21 | /* For readability concerns, you should choose a lighter palette in dark mode. */ 22 | [data-theme="dark"] { 23 | --ifm-color-primary: #25c2a0; 24 | --ifm-color-primary-dark: #21af90; 25 | --ifm-color-primary-darker: #1fa588; 26 | --ifm-color-primary-darkest: #1a8870; 27 | --ifm-color-primary-light: #29d5b0; 28 | --ifm-color-primary-lighter: #32d8b4; 29 | --ifm-color-primary-lightest: #4fddbf; 30 | } 31 | 32 | .docusaurus-highlight-code-line { 33 | background-color: rgba(0, 0, 0, 0.1); 34 | display: block; 35 | margin: 0 calc(-1 * var(--ifm-pre-padding)); 36 | padding: 0 var(--ifm-pre-padding); 37 | } 38 | 39 | [data-theme="dark"] .docusaurus-highlight-code-line { 40 | background-color: rgba(0, 0, 0, 0.3); 41 | } 42 | 43 | .dada-left-justify { 44 | text-align: left; 45 | } 46 | -------------------------------------------------------------------------------- /book/src/css/ide.css: -------------------------------------------------------------------------------- 1 | .dada-editor { 2 | background-color: #eeeeee; 3 | } 4 | 5 | .dada-output { 6 | font-family: "Source Code Pro", monospace; 7 | height: 100%; 8 | width: 100%; 9 | white-space: pre-wrap; 10 | overflow: auto; 11 | background-color: var(--ifm-color-emphasis-100); 12 | } 13 | 14 | .dada-ir-output { 15 | font-family: "Source Code Pro", monospace; 16 | width: 100%; 17 | background-color: var(--ifm-color-emphasis-100); 18 | border: 4px solid var(--ifm-color-emphasis-500); 19 | white-space: pre-wrap; 20 | overflow: auto; 21 | } 22 | 23 | .ide { 24 | display: grid; 25 | grid-template-columns: 1fr 1fr; 26 | gap: 20px; 27 | } 28 | 29 | .heap-cell { 30 | margin-top: 20px; 31 | width: 100%; 32 | } 33 | 34 | .ide-header { 35 | font-family: "Amatic SC", cursive; 36 | background-color: var(--ifm-color-primary-dark); 37 | color: var(--ifm-color-primary-lightest); 38 | width: 100%; 39 | } 40 | -------------------------------------------------------------------------------- /book/src/pages/index.module.css: -------------------------------------------------------------------------------- 1 | /** 2 | * CSS files with the .module.css suffix will be treated as CSS modules 3 | * and scoped locally. 4 | */ 5 | 6 | .heroBanner { 7 | padding: 4rem 0; 8 | text-align: center; 9 | position: relative; 10 | overflow: hidden; 11 | } 12 | 13 | @media screen and (max-width: 996px) { 14 | .heroBanner { 15 | padding: 2rem; 16 | } 17 | } 18 | 19 | .buttons { 20 | display: flex; 21 | align-items: center; 22 | justify-content: center; 23 | } 24 | -------------------------------------------------------------------------------- /book/src/pages/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import clsx from "clsx"; 3 | import Layout from "@theme/Layout"; 4 | import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; 5 | import styles from "./index.module.css"; 6 | // import HomepageFeatures from '@site/src/components/HomepageFeatures'; 7 | import Col from "react-bootstrap/Col"; 8 | import Row from "react-bootstrap/Row"; 9 | 10 | function HomepageHeader() { 11 | const { siteConfig } = useDocusaurusContext(); 12 | return ( 13 |
14 |
15 | 16 | 17 | 21 | 22 | 23 |
24 |

25 | Welcome to Dada, an experimental new programming language 26 | for building WebAssembly components! 27 |

28 | 29 |

30 | You've come at a bit of an awkward time. We're in the midst of 31 | renovating the place. More info coming soon! 32 |

33 |
34 | 35 |
36 |
37 |
38 | ); 39 | } 40 | 41 | export default function Home(): JSX.Element { 42 | const { siteConfig } = useDocusaurusContext(); 43 | return ( 44 | 48 | 49 | 50 |
51 |
52 | ); 53 | } 54 | -------------------------------------------------------------------------------- /book/src/pages/markdown-page.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Markdown page example 3 | --- 4 | 5 | # Markdown page example 6 | 7 | You don't need React to write simple standalone pages. 8 | -------------------------------------------------------------------------------- /book/static/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dada-lang/dada/8bdf2b1cec0192d0ad1f67c35e2cb8472d1a3e1d/book/static/.nojekyll -------------------------------------------------------------------------------- /book/static/img/dada.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /book/static/img/docusaurus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dada-lang/dada/8bdf2b1cec0192d0ad1f67c35e2cb8472d1a3e1d/book/static/img/docusaurus.png -------------------------------------------------------------------------------- /book/static/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dada-lang/dada/8bdf2b1cec0192d0ad1f67c35e2cb8472d1a3e1d/book/static/img/favicon.ico -------------------------------------------------------------------------------- /book/static/img/tutorial/docsVersionDropdown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dada-lang/dada/8bdf2b1cec0192d0ad1f67c35e2cb8472d1a3e1d/book/static/img/tutorial/docsVersionDropdown.png -------------------------------------------------------------------------------- /book/static/img/tutorial/localeDropdown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dada-lang/dada/8bdf2b1cec0192d0ad1f67c35e2cb8472d1a3e1d/book/static/img/tutorial/localeDropdown.png -------------------------------------------------------------------------------- /book/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/docusaurus/tsconfig.json", 3 | "compilerOptions": { 4 | "baseUrl": ".", 5 | "target": "es2016", 6 | "moduleResolution": "node", 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "noFallthroughCasesInSwitch": true, 10 | "isolatedModules": true, 11 | "skipLibCheck": true 12 | }, 13 | "include": ["src"] 14 | } 15 | -------------------------------------------------------------------------------- /components/dada-check/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-check" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [lib] 7 | doctest = false 8 | 9 | [dependencies] 10 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 11 | dada-ir-sym = { version = "0.1.0", path = "../dada-ir-sym" } 12 | dada-parser = { version = "0.1.0", path = "../dada-parser" } 13 | dada-util = { version = "0.1.0", path = "../dada-util" } 14 | salsa = { workspace = true } 15 | -------------------------------------------------------------------------------- /components/dada-check/docs/overview.md: -------------------------------------------------------------------------------- 1 | # Type Checking Orchestration 2 | 3 | This crate provides the high-level orchestration for Dada's type checking process. It implements the [`Check`] trait that defines what it means for a Dada program to successfully compile. 4 | 5 | ## Purpose 6 | 7 | While the detailed type checking logic lives in [`dada_ir_sym::check`], this crate provides: 8 | - **Top-level entry points** for type checking entire programs 9 | - **Orchestration logic** that coordinates different phases of checking 10 | - **The [`Check`] trait** that unifies type checking across different AST nodes 11 | 12 | ## The Check Trait 13 | 14 | The core abstraction is the [`Check`] trait: 15 | 16 | ```rust 17 | pub trait Check<'db> { 18 | fn check(&self, db: &'db dyn crate::Db); 19 | } 20 | ``` 21 | 22 | This trait is implemented for all major AST and IR nodes: 23 | - **[`SourceFile`]** - Check an entire source file 24 | - **[`SymModule`]** - Check a module and all its items 25 | - **[`SymFunction`]** - Check a function signature and body 26 | - **[`SymAggregate`]** - Check class definitions and members 27 | 28 | ## Checking Pipeline 29 | 30 | When you call `.check()` on a source file, it triggers a cascading validation: 31 | 32 | 1. **Module checking** - Validates module structure and use statements 33 | 2. **Item checking** - Validates each top-level item (classes, functions) 34 | 3. **Signature checking** - Validates function signatures and generic parameters 35 | 4. **Body checking** - Validates function implementations 36 | 5. **Field checking** - Validates class field types 37 | 38 | ## Error Accumulation 39 | 40 | The checking process accumulates errors rather than failing fast. This allows the compiler to report multiple issues at once. 41 | 42 | ## Integration with Symbolic IR 43 | 44 | This crate serves as a bridge between the parsed AST and the detailed type checking in [`dada_ir_sym`]. It: 45 | - Converts AST nodes to symbolic IR 46 | - Invokes the appropriate type checking logic 47 | - Ensures all necessary validations are performed 48 | 49 | ## Usage 50 | 51 | Typically, you'll check an entire program like this: 52 | 53 | ```rust 54 | use dada_check::Check; 55 | 56 | // Check a source file 57 | source_file.check(db); 58 | 59 | // Or check a specific function 60 | sym_function.check(db); 61 | ``` 62 | 63 | The actual type checking algorithms and detailed analysis are implemented in [`dada_ir_sym::check`]. -------------------------------------------------------------------------------- /components/dada-codegen/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-codegen" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | dada-ir-sym = { version = "0.1.0", path = "../dada-ir-sym" } 8 | wasm-encoder = "0.220.0" 9 | salsa = { workspace = true } 10 | dada-util = { version = "0.1.0", path = "../dada-util" } 11 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 12 | -------------------------------------------------------------------------------- /components/dada-codegen/src/cx.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_sym::{ir::functions::SymFunction, ir::types::SymGenericTerm}; 2 | use dada_util::{FromImpls, Map}; 3 | use salsa::Update; 4 | use wasm_encoder::{CodeSection, FunctionSection, TypeSection}; 5 | 6 | mod generate_expr; 7 | mod generate_fn; 8 | mod wasm_fn_type; 9 | mod wasm_repr; 10 | 11 | /// Core codegen context. 12 | pub(crate) struct Cx<'db> { 13 | db: &'db dyn crate::Db, 14 | function_section: FunctionSection, 15 | type_section: TypeSection, 16 | code_section: CodeSection, 17 | functions: Map, FnIndex>, 18 | codegen_queue: Vec>, 19 | } 20 | 21 | impl<'db> Cx<'db> { 22 | pub fn new(db: &'db dyn crate::Db) -> Self { 23 | Self { 24 | db, 25 | function_section: Default::default(), 26 | type_section: Default::default(), 27 | code_section: Default::default(), 28 | functions: Default::default(), 29 | codegen_queue: Default::default(), 30 | } 31 | } 32 | 33 | /// Generates all code reachable from the given fn instantiated with the given arguments. 34 | pub fn generate_from_fn( 35 | mut self, 36 | function: SymFunction<'db>, 37 | generics: Vec>, 38 | ) -> wasm_encoder::Module { 39 | self.declare_fn(function, generics); 40 | while let Some(item) = self.codegen_queue.pop() { 41 | match item { 42 | CodegenQueueItem::Function(fn_key) => self.codegen_fn(fn_key), 43 | } 44 | } 45 | 46 | let mut module = wasm_encoder::Module::new(); 47 | module.section(&self.type_section); 48 | module.section(&self.function_section); 49 | module.section(&self.code_section); 50 | 51 | module 52 | } 53 | } 54 | 55 | #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Update)] 56 | pub(crate) struct FnKey<'db>(SymFunction<'db>, Vec>); 57 | 58 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Update)] 59 | pub(crate) struct FnIndex(u32); 60 | 61 | #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Update, FromImpls)] 62 | enum CodegenQueueItem<'db> { 63 | Function(FnKey<'db>), 64 | } 65 | -------------------------------------------------------------------------------- /components/dada-codegen/src/cx/wasm_fn_type.rs: -------------------------------------------------------------------------------- 1 | use wasm_encoder::ValType; 2 | 3 | use super::Cx; 4 | 5 | pub(crate) struct FnTypeIndex(u32); 6 | 7 | impl From for u32 { 8 | fn from(value: FnTypeIndex) -> Self { 9 | value.0 10 | } 11 | } 12 | 13 | impl Cx<'_> { 14 | /// Declares an instantiation of a function with a given set of arguments and returns its index. 15 | /// If the function is already declared, nothing happens. 16 | /// If the function is not already declared, it is enqueued for code-generation. 17 | pub(crate) fn declare_fn_type( 18 | &mut self, 19 | inputs: Vec, 20 | outputs: Vec, 21 | ) -> FnTypeIndex { 22 | let index = self.type_section.len(); 23 | self.type_section.ty().function(inputs, outputs); 24 | FnTypeIndex(index) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /components/dada-codegen/src/lib.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::{ast::Identifier, diagnostic::Diagnostic, inputs::SourceFile}; 2 | use dada_ir_sym::{ 3 | Db, 4 | ir::{functions::SymFunction, types::SymGenericTerm}, 5 | prelude::Symbol, 6 | }; 7 | 8 | mod cx; 9 | 10 | #[salsa::tracked(return_ref)] 11 | pub fn codegen_main_fn<'db>(db: &'db dyn Db, source_file: SourceFile) -> Option> { 12 | let main = Identifier::main(db); 13 | let module = source_file.symbol(db); 14 | let main_fn = module.function_named(db, main)?; 15 | 16 | if !main_fn.symbols(db).has_generics_of_kind(db, &[]) { 17 | let error = Diagnostic::error( 18 | db, 19 | main_fn.name_span(db), 20 | "main function must have no generics", 21 | ); 22 | error.report(db); 23 | return None; 24 | } 25 | 26 | Some(codegen(db, main_fn, vec![]).clone()) 27 | } 28 | 29 | /// Generate a self-contained wasm module from a starting function. 30 | #[salsa::tracked(return_ref)] 31 | pub fn codegen<'db>( 32 | db: &'db dyn crate::Db, 33 | function: SymFunction<'db>, 34 | generics: Vec>, 35 | ) -> Vec { 36 | cx::Cx::new(db) 37 | .generate_from_fn(function, generics) 38 | .finish() 39 | } 40 | -------------------------------------------------------------------------------- /components/dada-compiler/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-compiler" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | dada-check = { version = "0.1.0", path = "../dada-check" } 8 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 9 | dada-parser = { version = "0.1.0", path = "../dada-parser" } 10 | dada-util = { version = "0.1.0", path = "../dada-util" } 11 | salsa = { workspace = true } 12 | rust-embed = "8.5.0" 13 | extension-trait = "1.0.2" 14 | url = { workspace = true } 15 | dada-codegen = { version = "0.1.0", path = "../dada-codegen" } 16 | dada-ir-sym = { version = "0.1.0", path = "../dada-ir-sym" } 17 | dada-probe = { version = "0.1.0", path = "../dada-probe" } 18 | -------------------------------------------------------------------------------- /components/dada-compiler/src/fork.rs: -------------------------------------------------------------------------------- 1 | use std::ops::Deref; 2 | 3 | pub struct Fork { 4 | compiler: C, 5 | } 6 | 7 | impl Deref for Fork { 8 | type Target = C; 9 | 10 | fn deref(&self) -> &Self::Target { 11 | &self.compiler 12 | } 13 | } 14 | 15 | impl From for Fork { 16 | fn from(value: C) -> Self { 17 | Fork { compiler: value } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /components/dada-compiler/src/realfs.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use crate::VirtualFileSystem; 4 | use dada_util::{Fallible, anyhow, bail}; 5 | use url::Url; 6 | 7 | pub struct RealFs { 8 | base_dir: Option, 9 | } 10 | 11 | impl Default for RealFs { 12 | fn default() -> Self { 13 | Self::new() 14 | } 15 | } 16 | 17 | impl RealFs { 18 | pub fn new() -> Self { 19 | Self { 20 | base_dir: std::env::current_dir().ok(), 21 | } 22 | } 23 | 24 | fn validate_scheme(url: &Url) -> Fallible { 25 | if url.scheme() != "file" { 26 | bail!("unsupported scheme: {}", url.scheme()); 27 | } 28 | url.to_file_path() 29 | .map_err(|()| anyhow!("not a file path: {url}")) 30 | } 31 | } 32 | 33 | impl VirtualFileSystem for RealFs { 34 | fn contents(&self, url: &Url) -> Fallible { 35 | let path = Self::validate_scheme(url)?; 36 | Ok(std::fs::read_to_string(&path)?) 37 | } 38 | 39 | fn exists(&self, url: &Url) -> bool { 40 | match Self::validate_scheme(url) { 41 | Ok(path) => path.exists(), 42 | Err(_) => false, 43 | } 44 | } 45 | 46 | fn path_url(&self, path: &Path) -> Fallible { 47 | let path = if let Some(base_dir) = &self.base_dir { 48 | base_dir.join(path) 49 | } else { 50 | path.to_path_buf() 51 | }; 52 | 53 | Url::from_file_path(&path) 54 | .map_err(|()| anyhow!("unable to construct URL from `{}`", path.display())) 55 | } 56 | 57 | fn url_display(&self, url: &Url) -> String { 58 | match url.scheme() { 59 | "file" => match url.to_file_path() { 60 | Ok(path) => { 61 | if let Some(base_dir) = &self.base_dir { 62 | if let Ok(suffix) = path.strip_prefix(base_dir) { 63 | return suffix.display().to_string(); 64 | } 65 | } 66 | path.display().to_string() 67 | } 68 | 69 | Err(()) => url.to_string(), 70 | }, 71 | 72 | "libdada" => format!("[libdada] {}", url.path()), 73 | 74 | _ => url.to_string(), 75 | } 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /components/dada-debug/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-debug" 3 | version.workspace = true 4 | repository.workspace = true 5 | edition.workspace = true 6 | 7 | [lints.clippy] 8 | result_large_err = "allow" 9 | 10 | [dependencies] 11 | anyhow.workspace = true 12 | axum = "0.8.1" 13 | camino = "1.1.9" 14 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 15 | handlebars = { version = "6.3.1", features = ["rust-embed"] } 16 | html-escape = "0.2.13" 17 | notify = "8.0.0" 18 | regex = "1.11.1" 19 | rust-embed = "8.6.0" 20 | serde = { version = "1.0.219", features = ["derive"] } 21 | serde_json = "1.0.140" 22 | structopt = "0.3.26" 23 | tokio = { version = "1.44.0", features = ["full"] } 24 | tracing-subscriber = "0.3.19" 25 | url.workspace = true 26 | walkdir = "2.5.0" 27 | -------------------------------------------------------------------------------- /components/dada-debug/assets/README.md: -------------------------------------------------------------------------------- 1 | * `jsontree.css` and `jsontree.js` are from https://github.com/lmenezes/json-tree 2 | * [MIT license](https://github.com/lmenezes/json-tree/blob/058fbaedecf7cdf84a58b0f51cfa95b569731a9e/LICENSE) -------------------------------------------------------------------------------- /components/dada-debug/assets/github.css: -------------------------------------------------------------------------------- 1 | pre code.hljs { 2 | display: block; 3 | overflow-x: auto; 4 | padding: 1em 5 | } 6 | code.hljs { 7 | padding: 3px 5px 8 | } 9 | /*! 10 | Theme: GitHub 11 | Description: Light theme as seen on github.com 12 | Author: github.com 13 | Maintainer: @Hirse 14 | Updated: 2021-05-15 15 | 16 | Outdated base version: https://github.com/primer/github-syntax-light 17 | Current colors taken from GitHub's CSS 18 | */ 19 | .hljs { 20 | color: #24292e; 21 | background: #ffffff 22 | } 23 | .hljs-doctag, 24 | .hljs-keyword, 25 | .hljs-meta .hljs-keyword, 26 | .hljs-template-tag, 27 | .hljs-template-variable, 28 | .hljs-type, 29 | .hljs-variable.language_ { 30 | /* prettylights-syntax-keyword */ 31 | color: #d73a49 32 | } 33 | .hljs-title, 34 | .hljs-title.class_, 35 | .hljs-title.class_.inherited__, 36 | .hljs-title.function_ { 37 | /* prettylights-syntax-entity */ 38 | color: #6f42c1 39 | } 40 | .hljs-attr, 41 | .hljs-attribute, 42 | .hljs-literal, 43 | .hljs-meta, 44 | .hljs-number, 45 | .hljs-operator, 46 | .hljs-variable, 47 | .hljs-selector-attr, 48 | .hljs-selector-class, 49 | .hljs-selector-id { 50 | /* prettylights-syntax-constant */ 51 | color: #005cc5 52 | } 53 | .hljs-regexp, 54 | .hljs-string, 55 | .hljs-meta .hljs-string { 56 | /* prettylights-syntax-string */ 57 | color: #032f62 58 | } 59 | .hljs-built_in, 60 | .hljs-symbol { 61 | /* prettylights-syntax-variable */ 62 | color: #e36209 63 | } 64 | .hljs-comment, 65 | .hljs-code, 66 | .hljs-formula { 67 | /* prettylights-syntax-comment */ 68 | color: #6a737d 69 | } 70 | .hljs-name, 71 | .hljs-quote, 72 | .hljs-selector-tag, 73 | .hljs-selector-pseudo { 74 | /* prettylights-syntax-entity-tag */ 75 | color: #22863a 76 | } 77 | .hljs-subst { 78 | /* prettylights-syntax-storage-modifier-import */ 79 | color: #24292e 80 | } 81 | .hljs-section { 82 | /* prettylights-syntax-markup-heading */ 83 | color: #005cc5; 84 | font-weight: bold 85 | } 86 | .hljs-bullet { 87 | /* prettylights-syntax-markup-list */ 88 | color: #735c0f 89 | } 90 | .hljs-emphasis { 91 | /* prettylights-syntax-markup-italic */ 92 | color: #24292e; 93 | font-style: italic 94 | } 95 | .hljs-strong { 96 | /* prettylights-syntax-markup-bold */ 97 | color: #24292e; 98 | font-weight: bold 99 | } 100 | .hljs-addition { 101 | /* prettylights-syntax-markup-inserted */ 102 | color: #22863a; 103 | background-color: #f0fff4 104 | } 105 | .hljs-deletion { 106 | /* prettylights-syntax-markup-deleted */ 107 | color: #b31d28; 108 | background-color: #ffeef0 109 | } 110 | .hljs-char.escape_, 111 | .hljs-link, 112 | .hljs-params, 113 | .hljs-property, 114 | .hljs-punctuation, 115 | .hljs-tag { 116 | /* purposely ignored */ 117 | 118 | } -------------------------------------------------------------------------------- /components/dada-debug/assets/highlightjs-line-numbers.min.js: -------------------------------------------------------------------------------- 1 | !function(r,o){"use strict";var e,a="hljs-ln",l="hljs-ln-line",h="hljs-ln-code",s="hljs-ln-numbers",c="hljs-ln-n",m="data-line-number",i=/\r\n|\r|\n/g;function u(e){for(var n=e.toString(),t=e.anchorNode;"TD"!==t.nodeName;)t=t.parentNode;for(var r=e.focusNode;"TD"!==r.nodeName;)r=r.parentNode;var o=parseInt(t.dataset.lineNumber),i=parseInt(r.dataset.lineNumber);if(o==i)return n;var a,l=t.textContent,s=r.textContent;for(i
{6}',[l,s,c,m,h,o+n.startFrom,0{1}',[a,r])}return e}(e.innerHTML,o)}function v(e){var n=e.className;if(/hljs-/.test(n)){for(var t=g(e.innerHTML),r=0,o="";r{1}\n',[n,0 anyhow::Result { 11 | let result = Assets::get(path).ok_or_else(|| anyhow::anyhow!("no asset `{path}` found"))?; 12 | let s = String::from_utf8(result.data.to_vec())?; 13 | Ok(s) 14 | } 15 | -------------------------------------------------------------------------------- /components/dada-debug/src/error.rs: -------------------------------------------------------------------------------- 1 | pub fn error(e: anyhow::Error) -> String { 2 | format!("

Oh geez

{e}

") 3 | } 4 | -------------------------------------------------------------------------------- /components/dada-debug/src/events.rs: -------------------------------------------------------------------------------- 1 | use axum::http::header::ACCEPT; 2 | 3 | use crate::server::State; 4 | 5 | fn check_accept_header(headers: &axum::http::header::HeaderMap) -> anyhow::Result<()> { 6 | // Check the request mime type 7 | let Some(value) = headers.get(&ACCEPT) else { 8 | anyhow::bail!("header `{ACCEPT}` required"); 9 | }; 10 | 11 | if value.to_str()? != "application/json" { 12 | anyhow::bail!("this endpoint only returns `application/json`"); 13 | } 14 | 15 | Ok(()) 16 | } 17 | 18 | pub async fn events( 19 | headers: &axum::http::header::HeaderMap, 20 | state: &State, 21 | ) -> anyhow::Result> { 22 | check_accept_header(headers)?; 23 | crate::root::root_data(state).await 24 | } 25 | 26 | pub async fn try_event_data( 27 | headers: &axum::http::header::HeaderMap, 28 | event_index: usize, 29 | state: &State, 30 | ) -> anyhow::Result { 31 | check_accept_header(headers)?; 32 | crate::view::try_view_data(event_index, state).await 33 | } 34 | -------------------------------------------------------------------------------- /components/dada-debug/src/hbs.rs: -------------------------------------------------------------------------------- 1 | use camino::Utf8Path; 2 | use handlebars::handlebars_helper; 3 | use html_escape::encode_safe; 4 | use rust_embed::Embed; 5 | use serde::Serialize; 6 | 7 | #[derive(Embed)] 8 | #[folder = "templates"] 9 | struct Assets; 10 | 11 | handlebars_helper!(index: |events: array, i: usize| { 12 | events[i].clone() 13 | }); 14 | 15 | handlebars_helper!(is_type: |actual: str, expected: str| { 16 | actual == expected 17 | }); 18 | 19 | handlebars_helper!(source_snippet: |file: str, line: usize, column: usize| { 20 | file_line_col(file, line, column) 21 | }); 22 | 23 | pub(crate) fn render(name: &str, data: &impl Serialize) -> anyhow::Result { 24 | let mut handlers = handlebars::Handlebars::new(); 25 | handlers.register_embed_templates_with_extension::(".hbs")?; 26 | handlers.register_helper("index", Box::new(index)); 27 | handlers.register_helper("is_type", Box::new(is_type)); 28 | handlers.register_helper("source_snippet", Box::new(source_snippet)); 29 | Ok(handlers.render(name, data)?) 30 | } 31 | 32 | fn file_line_col(file: &str, line: usize, column: usize) -> String { 33 | let path = Utf8Path::new(file); 34 | let file_name = path.file_name().unwrap_or("rust"); 35 | format!( 36 | "badge {file} {line} {column}", 40 | href = file_line_col_href(file, line, column), 41 | file = encode_safe(file_name).replace("-", "%2D"), 42 | ) 43 | } 44 | 45 | fn file_line_col_href(file: &str, line: usize, column: usize) -> String { 46 | format!("/source/{file}?line={line}&column={column}") 47 | } 48 | -------------------------------------------------------------------------------- /components/dada-debug/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::sync::mpsc::Sender; 2 | 3 | use dada_ir_ast::DebugEvent; 4 | use structopt::StructOpt; 5 | 6 | mod assets; 7 | mod error; 8 | mod events; 9 | mod hbs; 10 | mod root; 11 | mod server; 12 | mod source; 13 | mod view; 14 | 15 | /// Command line options for the debug server 16 | #[derive(Debug, StructOpt)] 17 | pub struct DebugOptions { 18 | #[structopt(long, default_value = "2222")] 19 | pub port: u32, 20 | } 21 | 22 | impl DebugOptions { 23 | /// Create a debug server from the options 24 | pub fn to_server(&self) -> DebugServer { 25 | DebugServer { 26 | port: self.port, 27 | thread: None, 28 | } 29 | } 30 | } 31 | 32 | /// Debug server that monitors 33 | pub struct DebugServer { 34 | port: u32, 35 | thread: Option>>, 36 | } 37 | 38 | impl DebugServer { 39 | /// Start the debug server, panicking if already launched. 40 | /// 41 | /// Returns a port where you should send debug events. 42 | pub fn launch(&mut self) -> Sender { 43 | assert!(self.thread.is_none()); 44 | let (debug_tx, debug_rx) = std::sync::mpsc::channel(); 45 | let port = self.port; 46 | self.thread = Some(std::thread::spawn(move || server::main(port, debug_rx))); 47 | debug_tx 48 | } 49 | 50 | /// Block on the debug server thread (if it has been launched) 51 | pub fn block_on(self) -> anyhow::Result<()> { 52 | if let Some(thread) = self.thread { 53 | thread.join().unwrap()?; 54 | } 55 | Ok(()) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /components/dada-debug/src/source.rs: -------------------------------------------------------------------------------- 1 | use serde::Serialize; 2 | 3 | pub fn try_source(path: &str, line: u32, _column: u32) -> anyhow::Result { 4 | const WINDOW: u32 = 10; 5 | let source = std::fs::read_to_string(path)?; 6 | let start_from = line.saturating_sub(WINDOW); 7 | let excerpt = source 8 | .lines() 9 | .skip(start_from as usize) 10 | .take((WINDOW * 2) as usize) 11 | .collect::>() 12 | .join("\n"); 13 | crate::hbs::render( 14 | "source", 15 | &SourceArgs { 16 | path, 17 | source: excerpt, 18 | line, 19 | start_from, 20 | }, 21 | ) 22 | } 23 | 24 | #[derive(Serialize)] 25 | struct SourceArgs<'a> { 26 | path: &'a str, 27 | source: String, 28 | line: u32, 29 | 30 | #[serde(rename = "startFrom")] 31 | start_from: u32, 32 | } 33 | -------------------------------------------------------------------------------- /components/dada-debug/src/view.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::{DebugEvent, DebugEventPayload}; 2 | use handlebars::handlebars_helper; 3 | use rust_embed::Embed; 4 | 5 | use crate::server::State; 6 | 7 | pub async fn try_view(event_index: usize, state: &State) -> anyhow::Result { 8 | let Some(event_data) = state.debug_events.lock().unwrap().get(event_index).cloned() else { 9 | anyhow::bail!("Event not found"); 10 | }; 11 | 12 | let DebugEvent { payload, .. } = &*event_data; 13 | match payload { 14 | DebugEventPayload::CheckLog(log) => Ok(crate::hbs::render("log", &log)?), 15 | DebugEventPayload::Diagnostic(_) => { 16 | anyhow::bail!("not implemented: view diagnostics") 17 | } 18 | } 19 | } 20 | 21 | pub async fn try_view_data(event_index: usize, state: &State) -> anyhow::Result { 22 | let Some(event_data) = state.debug_events.lock().unwrap().get(event_index).cloned() else { 23 | anyhow::bail!("Event not found"); 24 | }; 25 | 26 | let DebugEvent { payload, .. } = &*event_data; 27 | match payload { 28 | DebugEventPayload::CheckLog(log) => Ok(log.clone()), 29 | DebugEventPayload::Diagnostic(_) => { 30 | anyhow::bail!("not implemented: view diagnostics") 31 | } 32 | } 33 | } 34 | 35 | #[derive(Embed)] 36 | #[folder = "templates"] 37 | struct Assets; 38 | 39 | handlebars_helper!(index: |events: array, i: usize| events[i].clone()); 40 | -------------------------------------------------------------------------------- /components/dada-debug/templates/header.hbs: -------------------------------------------------------------------------------- 1 | 2 | 3 | {{title}} 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /components/dada-debug/templates/index.hbs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | {{> header title="Dada Debug Log"}} 5 | 16 | 17 | 18 |

Dada Debug Log

19 | 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /components/dada-debug/templates/render_event.hbs: -------------------------------------------------------------------------------- 1 | {{kind}} 2 | {{{(source_snippet compiler_location.file compiler_location.line 3 | compiler_location.column)}}} 4 | 5 | 6 | 7 | {{#if infer}} 8 | 10 | {{/if}} 11 | 12 | {{#if value}} 13 | {{value}} 14 | {{/if}} 15 | 16 | {{#if spawns}} 17 | 18 | {{/if}} -------------------------------------------------------------------------------- /components/dada-debug/templates/render_nested_event.hbs: -------------------------------------------------------------------------------- 1 |
  • 2 |
    3 | {{#with (index events_flat timestamp.index)}} 4 | {{> render_event this=this}} 5 | {{/with}} 6 | 7 | {{#if children}} 8 | 9 |
      10 | {{#each children}} 11 | {{>render_nested_event this=this events_flat=../events_flat}} 12 | {{/each}} 13 |
    14 | {{/if}} 15 |
    16 |
  • -------------------------------------------------------------------------------- /components/dada-debug/templates/source.hbs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {{> header title="Dada source: {{path}}"}} 6 | 7 | 8 | 9 | 14 | 15 | 16 | 17 |

    {{path}}

    18 |
    {{source}}
    19 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /components/dada-debug/templates/task_children.hbs: -------------------------------------------------------------------------------- 1 | {{#each children}} 2 |
  • 3 | {{> render_event this=(index events_flat this.timestamp.index)}} 4 | {{#if children}} 5 | 6 |
      7 | {{> task_children this=this events_flat=../events_flat}} 8 |
    9 | {{/if}} 10 |
  • 11 | {{/each}} 12 | -------------------------------------------------------------------------------- /components/dada-ir-ast/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-ir-ast" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | annotate-snippets = { workspace = true } 8 | dada-util = { version = "0.1.0", path = "../dada-util" } 9 | salsa = { workspace = true } 10 | serde = { version = "1.0.219", features = ["derive"] } 11 | serde_json = "1.0.140" 12 | url = { workspace = true, features = ["serde"] } 13 | -------------------------------------------------------------------------------- /components/dada-ir-ast/src/ast/class_item.rs: -------------------------------------------------------------------------------- 1 | use dada_util::SalsaSerialize; 2 | use serde::Serialize; 3 | 4 | use crate::{ 5 | ast::{AstFieldDecl, AstVisibility, DeferredParse}, 6 | span::{Span, Spanned}, 7 | }; 8 | 9 | use super::{AstGenericDecl, AstWhereClauses, Identifier, SpanVec}; 10 | 11 | /// Some kind of aggregate, like a class, struct, etc. 12 | /// 13 | /// `class $name[$generics] { ... }` or `class $name[$generics](...) { ... }` 14 | #[derive(SalsaSerialize)] 15 | #[salsa::tracked(debug)] 16 | pub struct AstAggregate<'db> { 17 | pub span: Span<'db>, 18 | 19 | /// Visibility of the class 20 | pub visibility: Option>, 21 | 22 | pub kind: AstAggregateKind, 23 | 24 | pub name: Identifier<'db>, 25 | 26 | pub name_span: Span<'db>, 27 | 28 | #[return_ref] 29 | pub generics: Option>>, 30 | 31 | /// If a `()` section is present... 32 | #[return_ref] 33 | pub inputs: Option>>, 34 | 35 | #[return_ref] 36 | pub where_clauses: Option>, 37 | 38 | /// The unparsed contents of the class. 39 | /// This can be parsed via the `members` 40 | /// method defined in `dada_parser::prelude`. 41 | #[return_ref] 42 | pub contents: Option>, 43 | } 44 | 45 | impl<'db> Spanned<'db> for AstAggregate<'db> { 46 | fn span(&self, db: &'db dyn crate::Db) -> Span<'db> { 47 | AstAggregate::span(*self, db) 48 | } 49 | } 50 | 51 | #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)] 52 | pub enum AstAggregateKind { 53 | Class, 54 | Struct, 55 | } 56 | -------------------------------------------------------------------------------- /components/dada-ir-ast/src/ast/member.rs: -------------------------------------------------------------------------------- 1 | use dada_util::{FromImpls, SalsaSerialize}; 2 | use salsa::Update; 3 | use serde::Serialize; 4 | 5 | use crate::{ 6 | ast::AstVisibility, 7 | span::{Span, Spanned}, 8 | }; 9 | 10 | use super::{AstFunction, VariableDecl}; 11 | 12 | #[derive( 13 | Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Update, Debug, FromImpls, Serialize, 14 | )] 15 | pub enum AstMember<'db> { 16 | Field(AstFieldDecl<'db>), 17 | Function(AstFunction<'db>), 18 | } 19 | 20 | impl<'db> Spanned<'db> for AstMember<'db> { 21 | fn span(&self, db: &'db dyn crate::Db) -> Span<'db> { 22 | match self { 23 | AstMember::Field(field) => field.span(db), 24 | AstMember::Function(function) => function.span(db), 25 | } 26 | } 27 | } 28 | 29 | #[derive(SalsaSerialize)] 30 | #[salsa::tracked(debug)] 31 | pub struct AstFieldDecl<'db> { 32 | pub span: Span<'db>, 33 | pub visibility: Option>, 34 | pub variable: VariableDecl<'db>, 35 | } 36 | 37 | impl<'db> Spanned<'db> for AstFieldDecl<'db> { 38 | fn span(&self, db: &'db dyn crate::Db) -> Span<'db> { 39 | AstFieldDecl::span(*self, db) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /components/dada-ir-ast/src/ast/use_item.rs: -------------------------------------------------------------------------------- 1 | use dada_util::SalsaSerialize; 2 | 3 | use crate::span::{Span, Spanned}; 4 | 5 | use super::{AstPath, SpannedIdentifier}; 6 | 7 | /// `use $crate.$path [as $id]` 8 | #[derive(SalsaSerialize)] 9 | #[salsa::tracked(debug)] 10 | pub struct AstUse<'db> { 11 | pub span: Span<'db>, 12 | pub crate_name: SpannedIdentifier<'db>, 13 | #[return_ref] 14 | pub path: AstPath<'db>, 15 | pub as_id: Option>, 16 | } 17 | 18 | impl<'db> Spanned<'db> for AstUse<'db> { 19 | fn span(&self, db: &'db dyn crate::Db) -> Span<'db> { 20 | AstUse::span(*self, db) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /components/dada-ir-ast/src/ast/util.rs: -------------------------------------------------------------------------------- 1 | use std::ops::Deref; 2 | 3 | use salsa::Update; 4 | use serde::Serialize; 5 | 6 | use crate::span::Span; 7 | 8 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Update, Debug, Serialize)] 9 | pub struct SpanVec<'db, T: Update> { 10 | // ------ FIXME: Bug in the derive? 11 | pub span: Span<'db>, 12 | pub values: Vec, 13 | } 14 | 15 | impl Deref for SpanVec<'_, T> { 16 | type Target = Vec; 17 | 18 | fn deref(&self) -> &Self::Target { 19 | &self.values 20 | } 21 | } 22 | 23 | impl<'db, T> IntoIterator for &'db SpanVec<'db, T> 24 | where 25 | T: Update, 26 | { 27 | type Item = &'db T; 28 | 29 | type IntoIter = std::slice::Iter<'db, T>; 30 | 31 | fn into_iter(self) -> Self::IntoIter { 32 | self.values.iter() 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /components/dada-ir-ast/src/diagnostic/render.rs: -------------------------------------------------------------------------------- 1 | use crate::{diagnostic::Diagnostic, span::AbsoluteSpan}; 2 | use annotate_snippets::{Message, Renderer, Snippet}; 3 | use dada_util::arena::Arena; 4 | 5 | use super::RenderOptions; 6 | 7 | pub(super) fn render(db: &dyn crate::Db, opts: &RenderOptions, diagnostic: &Diagnostic) -> String { 8 | let arena = Arena::new(); 9 | let message = to_message(db, diagnostic, &arena); 10 | renderer(opts).render(message).to_string() 11 | } 12 | 13 | fn renderer(opts: &RenderOptions) -> Renderer { 14 | if opts.no_color { 15 | Renderer::plain() 16 | } else { 17 | Renderer::styled() 18 | } 19 | } 20 | 21 | fn to_level(level: crate::diagnostic::Level) -> annotate_snippets::Level { 22 | match level { 23 | crate::diagnostic::Level::Note => annotate_snippets::Level::Note, 24 | crate::diagnostic::Level::Warning => annotate_snippets::Level::Warning, 25 | crate::diagnostic::Level::Info => annotate_snippets::Level::Info, 26 | crate::diagnostic::Level::Help => annotate_snippets::Level::Help, 27 | crate::diagnostic::Level::Error => annotate_snippets::Level::Error, 28 | } 29 | } 30 | 31 | fn to_message<'a>( 32 | db: &'a dyn crate::Db, 33 | diagnostic: &'a Diagnostic, 34 | arena: &'a Arena, 35 | ) -> Message<'a> { 36 | to_level(diagnostic.level) 37 | .title(&diagnostic.message) 38 | .snippet(to_snippet(db, diagnostic, arena)) 39 | .footers(diagnostic.children.iter().map(|d| to_message(db, d, arena))) 40 | } 41 | 42 | fn to_snippet<'a>( 43 | db: &'a dyn crate::Db, 44 | diagnostic: &'a Diagnostic, 45 | arena: &'a Arena, 46 | ) -> Snippet<'a> { 47 | let source_file = diagnostic.span.source_file; 48 | 49 | let default_label = if !diagnostic.labels.is_empty() { 50 | None 51 | } else { 52 | Some( 53 | to_level(diagnostic.level) 54 | .span(to_span(diagnostic.span)) 55 | .label("here"), 56 | ) 57 | }; 58 | 59 | let url = source_file.url(db); 60 | let origin = arena.insert(db.url_display(url)); 61 | 62 | Snippet::source(source_file.contents_if_ok(db)) 63 | .line_start(1) 64 | .origin(origin) 65 | .fold(true) 66 | .annotations( 67 | diagnostic 68 | .labels 69 | .iter() 70 | .map(|label| { 71 | assert!(label.span.source_file == source_file); 72 | to_level(label.level) 73 | .span(to_span(label.span)) 74 | .label(&label.message) 75 | }) 76 | .chain(default_label), 77 | ) 78 | } 79 | 80 | fn to_span(span: AbsoluteSpan) -> std::ops::Range { 81 | span.start.as_usize()..span.end.as_usize() 82 | } 83 | -------------------------------------------------------------------------------- /components/dada-ir-ast/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::unused_unit)] // FIXME: salsa bug it seems 2 | 3 | use std::sync::mpsc::Sender; 4 | 5 | use ast::Identifier; 6 | use diagnostic::Diagnostic; 7 | use inputs::{CompilationRoot, Krate, SourceFile}; 8 | use span::AbsoluteOffset; 9 | use url::Url; 10 | 11 | #[macro_use] 12 | mod macro_rules; 13 | 14 | pub mod ast; 15 | pub mod diagnostic; 16 | pub mod inputs; 17 | pub mod span; 18 | 19 | #[salsa::db] 20 | pub trait Db: salsa::Database { 21 | /// Access the [`CompilationRoot`], from which all crates and sources can be reached. 22 | fn root(&self) -> CompilationRoot; 23 | 24 | /// Load a source-file from the given directory. 25 | /// The modules is a list of parent modules that translates to a file path. 26 | fn source_file<'db>(&'db self, krate: Krate, modules: &[Identifier<'db>]) -> SourceFile; 27 | 28 | /// Convert the url into a string suitable for showing the user. 29 | fn url_display(&self, url: &Url) -> String; 30 | 31 | /// Controls whether type-checking and other parts of the compiler will dump debug logs. 32 | /// If `None` is returned, no debugging output is emitted. 33 | /// If `Some` is returned, it should supply a directory where `.json` files will be created. 34 | /// The `dada_debug` crate will monitor this directory 35 | /// and serve up the information for use in debugging. 36 | fn debug_tx(&self) -> Option>; 37 | } 38 | 39 | /// A debug event 40 | pub struct DebugEvent { 41 | /// URL from the source code the event is associated with 42 | pub url: Url, 43 | 44 | /// Start of span from the source code the event is associated with 45 | pub start: AbsoluteOffset, 46 | 47 | /// End of span from the source code the event is associated with 48 | pub end: AbsoluteOffset, 49 | 50 | /// Data associated with the event 51 | pub payload: DebugEventPayload, 52 | } 53 | 54 | /// ata associated with debug events 55 | pub enum DebugEventPayload { 56 | /// A diagnostic was reported 57 | Diagnostic(Diagnostic), 58 | 59 | /// A log of the results from type-checking the code at the given url. 60 | /// The payload will be a `dada_ir_sym::check::debug::export::Log`. 61 | CheckLog(serde_json::Value), 62 | } 63 | -------------------------------------------------------------------------------- /components/dada-ir-ast/src/macro_rules.rs: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /components/dada-ir-sym/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-ir-sym" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [lints.clippy] 7 | needless_lifetimes = "allow" 8 | too_many_arguments = "allow" 9 | wrong_self_convention = "allow" 10 | format_in_format_args = "allow" 11 | 12 | [lib] 13 | doctest = false 14 | 15 | [dependencies] 16 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 17 | dada-parser = { version = "0.1.0", path = "../dada-parser" } 18 | dada-util = { version = "0.1.0", path = "../dada-util" } 19 | either = "1.13.0" 20 | erased-serde = "0.4.6" 21 | futures = "0.3.31" 22 | itertools = "0.14.0" 23 | ordered-float = { version = "4.5.0", features = ["serde"] } 24 | salsa = { workspace = true } 25 | serde = { version = "1.0.219", features = ["derive"] } 26 | serde_json = "1.0.140" 27 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check.rs: -------------------------------------------------------------------------------- 1 | //! Defines the type-checking and name-resolution logic. This is what creates the symbolic IR. 2 | #![doc = include_str!("../docs/type_checking.md")] 3 | 4 | use env::Env; 5 | use live_places::LivePlaces; 6 | use runtime::Runtime; 7 | 8 | use crate::ir::types::SymTy; 9 | 10 | pub(crate) mod blocks; 11 | mod debug; 12 | mod env; 13 | mod exprs; 14 | pub(crate) mod fields; 15 | pub(crate) mod functions; 16 | mod generics; 17 | pub(crate) mod inference; 18 | mod live_places; 19 | mod member_lookup; 20 | mod modules; 21 | mod places; 22 | pub(crate) mod predicates; 23 | pub(crate) mod red; 24 | pub(crate) mod report; 25 | mod resolve; 26 | mod runtime; 27 | pub(crate) mod scope; 28 | pub(crate) mod scope_tree; 29 | pub(crate) mod signature; 30 | mod statements; 31 | mod stream; 32 | mod subst_impls; 33 | pub(crate) mod subtype; 34 | mod temporaries; 35 | mod to_red; 36 | mod types; 37 | mod universe; 38 | 39 | /// Check an expression in a full environment. 40 | /// This is an async operation -- it may block if insufficient inference data is available. 41 | trait CheckTyInEnv<'db> { 42 | type Output; 43 | 44 | async fn check_in_env(&self, env: &mut Env<'db>) -> Self::Output; 45 | } 46 | 47 | trait CheckExprInEnv<'db> { 48 | type Output; 49 | 50 | async fn check_in_env(&self, env: &mut Env<'db>, live_after: LivePlaces) -> Self::Output; 51 | } 52 | 53 | impl<'db> CheckTyInEnv<'db> for SymTy<'db> { 54 | type Output = SymTy<'db>; 55 | 56 | async fn check_in_env(&self, _env: &mut Env<'db>) -> Self::Output { 57 | *self 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/blocks.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::ast::AstBlock; 2 | 3 | use crate::{check::env::Env, check::statements::check_block_statements, ir::exprs::SymExpr}; 4 | 5 | use super::{CheckExprInEnv, live_places::LivePlaces}; 6 | 7 | impl<'db> CheckExprInEnv<'db> for AstBlock<'db> { 8 | type Output = SymExpr<'db>; 9 | 10 | async fn check_in_env(&self, env: &mut Env<'db>, live_after: LivePlaces) -> Self::Output { 11 | let db = env.db(); 12 | 13 | let statements = self.statements(db); 14 | check_block_statements(env, live_after, statements.span, statements).await 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/debug/export.rs: -------------------------------------------------------------------------------- 1 | //! Prepare the debug log for export as JSON. 2 | 3 | use std::{borrow::Cow, panic::Location}; 4 | 5 | use serde::Serialize; 6 | 7 | use crate::ir::indices::InferVarIndex; 8 | 9 | #[derive(Serialize, Debug)] 10 | pub struct Log<'a> { 11 | pub events_flat: Vec>, 12 | pub nested_event: NestedEvent, 13 | pub infers: Vec, 14 | pub tasks: Vec, 15 | // New fields 16 | pub root_event_info: RootEventInfo<'a>, 17 | pub total_events: usize, 18 | } 19 | 20 | // New structure to hold detailed root event information 21 | #[derive(Serialize, Debug)] 22 | pub struct RootEventInfo<'a> { 23 | pub compiler_location: CompilerLocation<'a>, 24 | pub description: String, 25 | } 26 | 27 | #[derive(Serialize, Debug)] 28 | pub struct Event<'a> { 29 | /// Where in the Rust source... 30 | pub compiler_location: CompilerLocation<'a>, 31 | 32 | /// Task in which this event occurred. 33 | pub task: TaskId, 34 | 35 | /// Kind of event. 36 | pub kind: &'a str, 37 | 38 | /// Embedded JSON containing the value. 39 | pub value: Cow<'a, str>, 40 | 41 | /// If this event spawns a task, this is its id. 42 | pub spawns: Option, 43 | 44 | /// If this event describes creation/change to an inference variable, this is its id. 45 | pub infer: Option, 46 | } 47 | 48 | #[derive(Serialize, Debug)] 49 | pub struct CompilerLocation<'a> { 50 | pub file: &'a str, 51 | pub line: u32, 52 | pub column: u32, 53 | } 54 | 55 | impl<'a> From<&'a Location<'a>> for CompilerLocation<'a> { 56 | fn from(location: &'a Location<'a>) -> Self { 57 | Self { 58 | file: location.file(), 59 | line: location.line(), 60 | column: location.column(), 61 | } 62 | } 63 | } 64 | 65 | #[derive(Copy, Clone, Serialize, Debug)] 66 | pub struct TimeStamp { 67 | pub index: usize, 68 | } 69 | 70 | #[derive(Serialize, Debug)] 71 | pub struct Task { 72 | pub spawned_at: TimeStamp, 73 | pub description: String, 74 | pub events: Vec, 75 | } 76 | 77 | #[derive(Copy, Clone, Debug, Serialize)] 78 | pub struct TaskId { 79 | pub index: usize, 80 | } 81 | 82 | #[derive(Serialize, Debug)] 83 | pub struct NestedEvent { 84 | /// Index for this event in the "event by time" list 85 | pub timestamp: TimeStamp, 86 | 87 | /// "Children" events are either (a) the indented events, 88 | /// if this is an indent, or (b) the events from the 89 | /// spawned task, if this is a spawn. 90 | pub children: Vec, 91 | } 92 | 93 | #[derive(Copy, Clone, Serialize, Debug)] 94 | pub struct InferId { 95 | pub index: usize, 96 | } 97 | 98 | /// Information about an inference variable 99 | #[derive(Serialize, Debug)] 100 | pub struct Infer { 101 | /// Location of the event that created the value of the variable 102 | pub created_at: TimeStamp, 103 | 104 | /// Location of each event that modified the value of the variable 105 | pub events: Vec, 106 | } 107 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/fields.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::diagnostic::{Diagnostic, Err, Errors, Level, Reported}; 2 | 3 | use crate::{ 4 | check::{env::Env, runtime::Runtime}, 5 | ir::{ 6 | binder::Binder, classes::SymField, populate::variable_decl_requires_default_perm, 7 | types::SymTy, 8 | }, 9 | }; 10 | 11 | use super::CheckTyInEnv; 12 | 13 | /// Check the type of a field. 14 | /// The returned type has two binders, the outer binder is the class, 15 | /// the inner binder is the `self` place. 16 | pub(crate) fn check_field<'db>( 17 | db: &'db dyn crate::Db, 18 | field: SymField<'db>, 19 | ) -> Errors>>> { 20 | Runtime::execute( 21 | db, 22 | field.name_span(db), 23 | "check_field", 24 | &[&field], 25 | async move |runtime| -> Errors>>> { 26 | let scope = field.into_scope(db); 27 | let mut env = Env::new(runtime, scope); 28 | 29 | let decl = field.source(db).variable(db); 30 | 31 | // In fields, we don't permit something like `x: String`, 32 | // user must write `x: my String`. 33 | if variable_decl_requires_default_perm(db, decl, &env.scope) { 34 | Diagnostic::new( 35 | db, 36 | Level::Error, 37 | decl.base_ty(db).span(db), 38 | "explicit permission required", 39 | ) 40 | .report(db); 41 | } 42 | 43 | let ast_base_ty = decl.base_ty(db); 44 | let sym_base_ty = ast_base_ty.check_in_env(&mut env).await; 45 | let sym_ty = if let Some(ast_perm) = decl.perm(db) { 46 | let sym_perm = ast_perm.check_in_env(&mut env).await; 47 | SymTy::perm(db, sym_perm, sym_base_ty) 48 | } else { 49 | sym_base_ty 50 | }; 51 | 52 | let bound_ty = env.into_scope().into_bound_value(db, sym_ty); 53 | Ok(bound_ty) 54 | }, 55 | |bound_ty| bound_ty, 56 | ) 57 | } 58 | 59 | pub(crate) fn field_err_ty<'db>( 60 | db: &'db dyn crate::Db, 61 | field: SymField<'db>, 62 | reported: Reported, 63 | ) -> Binder<'db, Binder<'db, SymTy<'db>>> { 64 | let scope = field.into_scope(db); 65 | scope.into_bound_value(db, SymTy::err(db, reported)) 66 | } 67 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/generics.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::ast::{AstWhereClause, AstWhereClauseKind}; 2 | 3 | use crate::ir::generics::{SymWhereClause, SymWhereClauseKind}; 4 | 5 | use super::{CheckTyInEnv, env::Env}; 6 | 7 | pub async fn symbolify_ast_where_clause<'db>( 8 | env: &mut Env<'db>, 9 | ast_where_clause: AstWhereClause<'db>, 10 | output: &mut Vec>, 11 | ) { 12 | let db = env.db(); 13 | let subject = ast_where_clause.subject(db).check_in_env(env).await; 14 | let mut push_kind = 15 | |kind: SymWhereClauseKind| output.push(SymWhereClause::new(db, subject, kind)); 16 | 17 | for kind in ast_where_clause.kinds(db) { 18 | match kind { 19 | AstWhereClauseKind::Reference(_) => { 20 | push_kind(SymWhereClauseKind::Shared); 21 | push_kind(SymWhereClauseKind::Lent); 22 | } 23 | AstWhereClauseKind::Mutable(_) => { 24 | push_kind(SymWhereClauseKind::Unique); 25 | push_kind(SymWhereClauseKind::Lent); 26 | } 27 | AstWhereClauseKind::Shared(_) => { 28 | push_kind(SymWhereClauseKind::Shared); 29 | } 30 | AstWhereClauseKind::Owned(_) => { 31 | push_kind(SymWhereClauseKind::Owned); 32 | } 33 | AstWhereClauseKind::Lent(_) => { 34 | push_kind(SymWhereClauseKind::Lent); 35 | } 36 | AstWhereClauseKind::Unique(_) => { 37 | push_kind(SymWhereClauseKind::Unique); 38 | } 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/inference/reconcile.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::diagnostic::Errors; 2 | use dada_util::vecset::VecSet; 3 | 4 | use crate::{ 5 | check::{env::Env, inference::InferVarKind, red::{Chain, RedTerm, RedTy}, subtype::terms::require_sub_terms}, 6 | ir::{indices::{FromInfer, InferVarIndex}, types::{SymTy, SymTyName}}, 7 | }; 8 | 9 | 10 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/inference/serialize.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::span::Span; 2 | use serde::Serialize; 3 | 4 | use crate::{ 5 | check::red::{RedPerm, RedTy}, 6 | ir::indices::InferVarIndex, 7 | }; 8 | 9 | use super::{InferenceVarBounds, InferenceVarData}; 10 | 11 | // Stripped down version of `InferenceVarData` that excludes `ArcOrElse` objects. 12 | // Suitable for serialization and debugging. 13 | 14 | #[derive(Serialize)] 15 | struct InferenceVarDataExport<'a, 'db> { 16 | span: Span<'db>, 17 | is: Vec, 18 | bounds: InferenceVarBoundsExport<'a, 'db>, 19 | } 20 | 21 | #[derive(Serialize)] 22 | enum InferenceVarBoundsExport<'a, 'db> { 23 | Perm { 24 | lower: Vec<&'a RedPerm<'db>>, 25 | upper: Vec<&'a RedPerm<'db>>, 26 | }, 27 | 28 | Ty { 29 | perm: InferVarIndex, 30 | lower: Option<&'a RedTy<'db>>, 31 | upper: Option<&'a RedTy<'db>>, 32 | }, 33 | } 34 | 35 | impl Serialize for InferenceVarData<'_> { 36 | fn serialize(&self, serializer: S) -> Result 37 | where 38 | S: serde::Serializer, 39 | { 40 | let Self { span, is, bounds } = self; 41 | 42 | let bounds = match bounds { 43 | InferenceVarBounds::Perm { lower, upper } => InferenceVarBoundsExport::Perm { 44 | lower: lower.iter().map(|pair| &pair.0).collect(), 45 | upper: upper.iter().map(|pair| &pair.0).collect(), 46 | }, 47 | InferenceVarBounds::Ty { perm, lower, upper } => InferenceVarBoundsExport::Ty { 48 | perm: *perm, 49 | lower: lower.as_ref().map(|pair| &pair.0), 50 | upper: upper.as_ref().map(|pair| &pair.0), 51 | }, 52 | }; 53 | 54 | let export = InferenceVarDataExport { 55 | span: *span, 56 | is: is.iter().map(|option| option.is_some()).collect(), 57 | bounds, 58 | }; 59 | 60 | Serialize::serialize(&export, serializer) 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/live_places.rs: -------------------------------------------------------------------------------- 1 | use crate::ir::types::SymPlace; 2 | 3 | use super::env::Env; 4 | 5 | /// Placeholder for the liveness computation we will be doing 6 | #[derive(Copy, Clone)] 7 | pub struct LivePlaces {} 8 | 9 | #[expect(unused_variables)] 10 | impl LivePlaces { 11 | /// Assume no places are live. 12 | pub fn none<'db>(env: &Env<'db>) -> Self { 13 | Self {} 14 | } 15 | 16 | /// Special placeholder for when we relate bounds on inference variables. 17 | /// For permissions, these bounds are [`RedPerm`](`crate::check::red::RedPerm`) 18 | /// values and already contain liveness information. 19 | pub fn infer_bounds() -> Self { 20 | Self {} 21 | } 22 | 23 | /// Used where we have to think about the right value 24 | pub fn fixme() -> Self { 25 | Self {} 26 | } 27 | 28 | pub fn is_live<'db>(&self, env: &Env<'db>, place: SymPlace<'db>) -> bool { 29 | true // FIXME 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/modules.rs: -------------------------------------------------------------------------------- 1 | use crate::{ir::module::SymModule, prelude::CheckUseItems}; 2 | use dada_ir_ast::{diagnostic::Errors, span::Spanned}; 3 | 4 | use super::{Env, Runtime, scope::Resolve}; 5 | 6 | /// Resolve all use items found in this module. 7 | /// This is executed by `dada-ir-check` crate 8 | /// simply to force errors to be reported. 9 | #[salsa::tracked] 10 | impl<'db> CheckUseItems<'db> for SymModule<'db> { 11 | #[salsa::tracked] 12 | fn check_use_items(self, db: &'db dyn crate::Db) { 13 | let _: Errors<()> = Runtime::execute( 14 | db, 15 | self.span(db), 16 | "check_use_items", 17 | &[&self], 18 | async move |runtime| { 19 | let mut env = Env::new(runtime, self.mod_scope(db)); 20 | for item in self.ast_use_map(db).values() { 21 | let _ = item.path(db).resolve_in(&mut env).await; 22 | } 23 | Ok(()) 24 | }, 25 | |v| v, 26 | ); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/places.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::diagnostic::Err; 2 | use dada_util::boxed_async_fn; 3 | 4 | use crate::{ 5 | check::env::Env, 6 | ir::{ 7 | classes::SymField, 8 | types::{SymGenericTerm, SymPerm, SymPlace, SymPlaceKind, SymTy}, 9 | }, 10 | prelude::CheckedFieldTy, 11 | }; 12 | 13 | use super::{inference::Direction, red::RedTy, to_red::ToRedTy}; 14 | 15 | pub trait PlaceTy<'db> { 16 | async fn place_ty(&self, env: &mut Env<'db>) -> SymTy<'db>; 17 | } 18 | 19 | impl<'db> PlaceTy<'db> for SymPlace<'db> { 20 | #[boxed_async_fn] 21 | async fn place_ty(&self, env: &mut Env<'db>) -> SymTy<'db> { 22 | match *self.kind(env.db()) { 23 | SymPlaceKind::Var(sym_variable) => env.variable_ty(sym_variable).await, 24 | SymPlaceKind::Field(owner_place, sym_field) => { 25 | let owner_ty = owner_place.place_ty(env).await; 26 | let (owner_red_ty, owner_perm) = owner_ty.to_red_ty(env); 27 | field_ty(env, owner_place, owner_perm, owner_red_ty, sym_field) 28 | } 29 | SymPlaceKind::Index(_sym_place) => { 30 | todo!() 31 | } 32 | SymPlaceKind::Error(reported) => SymTy::err(env.db(), reported), 33 | SymPlaceKind::Erased => panic!("cannot compute type of an erased place"), 34 | } 35 | } 36 | } 37 | 38 | fn field_ty<'db>( 39 | env: &mut Env<'db>, 40 | owner_place: SymPlace<'db>, 41 | owner_perm: SymPerm<'db>, 42 | owner_red_ty: RedTy<'db>, 43 | sym_field: SymField<'db>, 44 | ) -> SymTy<'db> { 45 | let db = env.db(); 46 | match owner_red_ty { 47 | RedTy::Error(reported) => SymTy::err(db, reported), 48 | 49 | RedTy::Named(_name, generics) => { 50 | // FIXME: eventually we probably want to upcast here 51 | let field_ty = sym_field.checked_field_ty(db); 52 | let field_ty = field_ty 53 | .substitute(env.db(), &generics) 54 | .substitute(env.db(), &[SymGenericTerm::Place(owner_place)]); 55 | 56 | owner_perm.apply_to(db, field_ty) 57 | } 58 | 59 | RedTy::Infer(infer) => { 60 | // To have constructed this place there must have been a valid inference bound already 61 | let (infer_red_ty, _) = env 62 | .red_bound(infer, Direction::FromBelow) 63 | .peek_ty() 64 | .unwrap(); 65 | field_ty(env, owner_place, owner_perm, infer_red_ty, sym_field) 66 | } 67 | 68 | RedTy::Perm | RedTy::Var(_) | RedTy::Never => { 69 | unreachable!("no fields on a {owner_red_ty:?}") 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/predicates/require_where_clause.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::diagnostic::Errors; 2 | 3 | use crate::{ 4 | check::{env::Env, report::OrElse}, 5 | ir::generics::{SymWhereClause, SymWhereClauseKind}, 6 | }; 7 | 8 | use super::{ 9 | require_lent::require_term_is_lent, require_owned::require_term_is_owned, 10 | require_shared::require_term_is_shared, require_unique::require_term_is_unique, 11 | }; 12 | 13 | pub async fn require_where_clause<'db>( 14 | env: &mut Env<'db>, 15 | where_clause: SymWhereClause<'db>, 16 | or_else: &dyn OrElse<'db>, 17 | ) -> Errors<()> { 18 | let db = env.db(); 19 | let subject = where_clause.subject(db); 20 | match where_clause.kind(db) { 21 | SymWhereClauseKind::Unique => require_term_is_unique(env, subject, or_else).await, 22 | SymWhereClauseKind::Shared => require_term_is_shared(env, subject, or_else).await, 23 | SymWhereClauseKind::Owned => require_term_is_owned(env, subject, or_else).await, 24 | SymWhereClauseKind::Lent => require_term_is_lent(env, subject, or_else).await, 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/red/sub.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::diagnostic::Errors; 2 | 3 | use crate::check::env::Env; 4 | 5 | use super::{RedChain, RedLink}; 6 | 7 | pub fn chain_sub_chain<'db>( 8 | env: &Env<'db>, 9 | lower_chain: RedChain<'db>, 10 | upper_chain: RedChain<'db>, 11 | ) -> Errors { 12 | let db = env.db(); 13 | links_sub_links(env, lower_chain.links(db), upper_chain.links(db)) 14 | } 15 | 16 | fn links_sub_links<'db>( 17 | env: &Env<'db>, 18 | lower_links: &[RedLink<'db>], 19 | upper_links: &[RedLink<'db>], 20 | ) -> Errors { 21 | macro_rules! rules { 22 | ($($pat:pat => $cond:expr,)*) => { 23 | match (lower_links, upper_links) { 24 | $( 25 | $pat if $cond => Ok(true), 26 | )* 27 | _ => Ok(false), 28 | } 29 | }; 30 | } 31 | 32 | rules! { 33 | ([], []) => true, 34 | 35 | ([RedLink::Our], links_u) => RedLink::are_copy(env, links_u)?, 36 | 37 | ([RedLink::Our, tail_l @ ..], [head_u, tail_u @ ..]) => { 38 | head_u.is_copy(env)? 39 | && links_sub_links(env, tail_l, tail_u)? 40 | }, 41 | 42 | ( 43 | [ 44 | RedLink::Ref(_, place_l), 45 | tail_l @ .., 46 | ], 47 | [ 48 | RedLink::Ref(_, place_u), 49 | tail_u @ .., 50 | ], 51 | ) 52 | | ( 53 | [ 54 | RedLink::Mut(_, place_l), 55 | tail_l @ .., 56 | ], 57 | [ 58 | RedLink::Mut(_, place_u), 59 | tail_u @ .., 60 | ], 61 | ) 62 | | ( 63 | [ 64 | RedLink::Ref(_, place_l), 65 | tail_l @ .., 66 | ], 67 | [ 68 | RedLink::Our, 69 | RedLink::Mut(_, place_u), 70 | tail_u @ .., 71 | ], 72 | ) => { 73 | place_u.is_prefix_of(env.db(), *place_l) 74 | && links_sub_links(env, tail_l, tail_u)? 75 | }, 76 | 77 | ([RedLink::Var(var_l), tail_l @ ..], [RedLink::Var(var_u), tail_u @ ..]) => { 78 | var_l == var_u && links_sub_links(env, tail_l, tail_u)? 79 | }, 80 | 81 | 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/stream.rs: -------------------------------------------------------------------------------- 1 | use std::pin::Pin; 2 | 3 | use super::env::Env; 4 | 5 | pub struct Consumer<'c, 'db, A, R> 6 | where 7 | A: 'c, 8 | R: 'c, 9 | { 10 | op: Box + 'c>, 11 | } 12 | 13 | impl<'c, 'db, A, R> Consumer<'c, 'db, A, R> 14 | where 15 | A: 'c, 16 | R: 'c, 17 | { 18 | pub fn new(op: impl AsyncFnMut(&mut Env<'db>, A) -> R + 'c) -> Self { 19 | Consumer { op: Box::new(op) } 20 | } 21 | 22 | pub async fn consume(&mut self, env: &mut Env<'db>, arg: A) -> R { 23 | self.op.consume(env, arg).await 24 | } 25 | } 26 | 27 | /// Dyn-safe wrapper around a closure. 28 | trait ErasedConsumer<'db, A, R> { 29 | fn consume<'a>( 30 | &'a mut self, 31 | env: &'a mut Env<'db>, 32 | arg: A, 33 | ) -> Pin + 'a>> 34 | where 35 | A: 'a; 36 | } 37 | 38 | impl<'db, F, A, R> ErasedConsumer<'db, A, R> for F 39 | where 40 | F: AsyncFnMut(&mut Env<'db>, A) -> R, 41 | { 42 | fn consume<'a>( 43 | &'a mut self, 44 | env: &'a mut Env<'db>, 45 | arg: A, 46 | ) -> Pin + 'a>> 47 | where 48 | A: 'a, 49 | { 50 | Box::pin(self(env, arg)) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/subtype.rs: -------------------------------------------------------------------------------- 1 | //! Subtyping relations and type conversions. 2 | #![doc = include_str!("../../docs/subtyping.md")] 3 | 4 | pub mod is_future; 5 | pub mod is_numeric; 6 | mod perms; 7 | pub mod relate_infer_bounds; 8 | pub mod terms; 9 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/subtype/is_future.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::diagnostic::Errors; 2 | use dada_util::boxed_async_fn; 3 | 4 | use crate::{ 5 | check::{ 6 | env::Env, 7 | inference::Direction, 8 | live_places::LivePlaces, 9 | red::RedTy, 10 | report::{Because, OrElse, OrElseHelper}, 11 | to_red::ToRedTy, 12 | }, 13 | ir::types::{SymTy, SymTyName}, 14 | }; 15 | 16 | use super::terms::require_sub_terms; 17 | 18 | /// Requires that `ty` resolves to a future type 19 | /// that awaits a value of type `awaited_ty`. 20 | pub async fn require_future_type<'db>( 21 | env: &mut Env<'db>, 22 | live_after: LivePlaces, 23 | ty: SymTy<'db>, 24 | awaited_ty: SymTy<'db>, 25 | or_else: &dyn OrElse<'db>, 26 | ) -> Errors<()> { 27 | let (red_ty, _) = ty.to_red_ty(env); 28 | require_future_red_type(env, live_after, red_ty, awaited_ty, or_else).await 29 | } 30 | 31 | #[boxed_async_fn] 32 | async fn require_future_red_type<'db>( 33 | env: &mut Env<'db>, 34 | live_after: LivePlaces, 35 | red_ty: RedTy<'db>, 36 | awaited_ty: SymTy<'db>, 37 | or_else: &dyn OrElse<'db>, 38 | ) -> Errors<()> { 39 | let db = env.db(); 40 | match red_ty { 41 | RedTy::Error(reported) => Err(reported), 42 | 43 | RedTy::Named(sym_ty_name, generic_args) => match sym_ty_name { 44 | SymTyName::Future => { 45 | let future_ty_arg = generic_args[0].assert_type(db); 46 | require_sub_terms( 47 | env, 48 | live_after, 49 | future_ty_arg.into(), 50 | awaited_ty.into(), 51 | or_else, 52 | ) 53 | .await 54 | } 55 | SymTyName::Primitive(_) | SymTyName::Aggregate(_) | SymTyName::Tuple { arity: _ } => { 56 | Err(or_else.report(env, Because::JustSo)) 57 | } 58 | }, 59 | 60 | RedTy::Var(_) | RedTy::Never => Err(or_else.report(env, Because::JustSo)), 61 | 62 | RedTy::Infer(infer) => { 63 | // For inference variables: find the current lower bound 64 | // and check if it is numeric. Since the bound can only get tighter, 65 | // that is sufficient (indeed, numeric types have no subtypes). 66 | let Some((lower_red_ty, arc_or_else)) = 67 | env.red_bound(infer, Direction::FromBelow).ty().await 68 | else { 69 | return Err( 70 | or_else.report(env, Because::UnconstrainedInfer(env.infer_var_span(infer))) 71 | ); 72 | }; 73 | require_future_red_type( 74 | env, 75 | live_after, 76 | lower_red_ty.clone(), 77 | awaited_ty, 78 | &or_else.map_because(move |_| { 79 | Because::InferredLowerBound(lower_red_ty.clone(), arc_or_else.clone()) 80 | }), 81 | ) 82 | .await 83 | } 84 | 85 | RedTy::Perm => unreachable!("SymTy had a red ty of SymPerm"), 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/subtype/relate_infer_bounds.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::diagnostic::Errors; 2 | 3 | use crate::{ 4 | check::{env::Env, inference::Direction, live_places::LivePlaces, report}, 5 | ir::{indices::InferVarIndex, types::SymPerm}, 6 | }; 7 | 8 | use super::terms::require_sub_terms; 9 | 10 | /// A task that runs for each type inference variable. It awaits any upper/lower bounds 11 | /// and propagates a corresponding bound. 12 | pub async fn relate_infer_bounds<'db>(env: &mut Env<'db>, infer: InferVarIndex) -> Errors<()> { 13 | let mut lower_bound = None; 14 | let mut upper_bound = None; 15 | 16 | let mut bounds = env.term_bounds(SymPerm::my(env.db()), infer, None); 17 | while let Some((direction, new_bound)) = bounds.next(env).await { 18 | match direction { 19 | Direction::FromBelow => lower_bound = Some(new_bound), 20 | Direction::FromAbove => upper_bound = Some(new_bound), 21 | } 22 | 23 | if let (Some(lower), Some(upper)) = (lower_bound, upper_bound) { 24 | // FIXME: the iterator should be yielding up ArcOrElse values 25 | require_sub_terms( 26 | env, 27 | LivePlaces::infer_bounds(), 28 | lower, 29 | upper, 30 | &report::BadSubtermError::new(env.infer_var_span(infer), lower, upper), 31 | ) 32 | .await?; 33 | } 34 | } 35 | 36 | Ok(()) 37 | } 38 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/temporaries.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::span::Span; 2 | use serde::Serialize; 3 | 4 | use crate::ir::{ 5 | exprs::{SymExpr, SymPlaceExpr, SymPlaceExprKind}, 6 | types::{SymGenericKind, SymTy}, 7 | variables::SymVariable, 8 | }; 9 | 10 | /// Translating an expression can result in the creation of 11 | /// anonymous local temporaries that are injected into the 12 | /// surrounding scope. These are returned alongside the result 13 | /// and will eventually be translated into `let-in` expressions 14 | /// when we reach the surrounding statement, block, or other 15 | /// terminating context. 16 | #[derive(Clone, Serialize)] 17 | pub(crate) struct Temporary<'db> { 18 | pub lv: SymVariable<'db>, 19 | pub ty: SymTy<'db>, 20 | pub initializer: Option>, 21 | } 22 | 23 | impl<'db> Temporary<'db> { 24 | pub fn new( 25 | db: &'db dyn crate::Db, 26 | span: Span<'db>, 27 | ty: SymTy<'db>, 28 | initializer: Option>, 29 | ) -> Self { 30 | let lv = SymVariable::new(db, SymGenericKind::Place, None, span); 31 | Self { 32 | lv, 33 | ty, 34 | initializer, 35 | } 36 | } 37 | } 38 | 39 | impl<'db> SymExpr<'db> { 40 | /// Create a temporary to store the result of this expression. 41 | /// 42 | /// Returns a reference to the temporary as a place expression. 43 | pub(crate) fn into_temporary( 44 | self, 45 | db: &'db dyn crate::Db, 46 | temporaries: &mut Vec>, 47 | ) -> SymPlaceExpr<'db> { 48 | let ty = self.ty(db); 49 | let lv = self.into_temporary_var(db, temporaries); 50 | SymPlaceExpr::new(db, self.span(db), ty, SymPlaceExprKind::Var(lv)) 51 | } 52 | 53 | /// Create a temporary to store the result of this expression. 54 | /// 55 | /// Returns a reference to the temporary as a variable. 56 | pub(crate) fn into_temporary_var( 57 | self, 58 | db: &'db dyn crate::Db, 59 | temporaries: &mut Vec>, 60 | ) -> SymVariable<'db> { 61 | let ty = self.ty(db); 62 | 63 | // Create a temporary to store the result of this expression. 64 | let temporary = Temporary::new(db, self.span(db), ty, Some(self)); 65 | let lv = temporary.lv; 66 | temporaries.push(temporary); 67 | 68 | // The result will be a reference to that temporary. 69 | lv 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/check/universe.rs: -------------------------------------------------------------------------------- 1 | use std::num::NonZeroU32; 2 | 3 | use serde::Serialize; 4 | 5 | /// A "universe" defines the set of all terms (types/permissions/etc) in a program. 6 | /// The root universe [`Universe::ROOT`][] consists of the terms the user wrote. 7 | /// We create other universes synthetically to create free universal variables. 8 | /// For example, in a closure body, we are in a distinct universe, which allows us to 9 | /// define closures that can reference a (generic) type `T` that doesn't exist in the parent universe. 10 | /// 11 | /// Universes are ordered. `U1 < U2` means that `U2` can contain strictly more terms than `U1`. 12 | /// `Universe::ROOT <= U` for all `U`. 13 | #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] 14 | pub(crate) struct Universe(NonZeroU32); 15 | 16 | impl Universe { 17 | /// The universe containing the things the user themselves wrote. 18 | /// `ROOT` <= all other universes. 19 | pub const ROOT: Universe = Universe(NonZeroU32::new(1).unwrap()); 20 | 21 | /// Create a universe one larger than the current universe. 22 | pub fn next(self) -> Universe { 23 | Universe(self.0.checked_add(1).unwrap()) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/ir.rs: -------------------------------------------------------------------------------- 1 | //! Defines the symbolic intermediate representation. 2 | //! This is a type-checked, name-resolved version of the AST. 3 | //! Also defines methods to create symbols (and the symbol tree) for functions, types, parameters, etc. 4 | 5 | pub mod binder; 6 | pub mod classes; 7 | pub mod exprs; 8 | pub mod functions; 9 | pub mod generics; 10 | pub mod indices; 11 | pub mod module; 12 | pub(crate) mod populate; 13 | pub mod primitive; 14 | pub mod subst; 15 | pub mod types; 16 | pub mod variables; 17 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/ir/generics.rs: -------------------------------------------------------------------------------- 1 | use dada_util::SalsaSerialize; 2 | use salsa::Update; 3 | use serde::Serialize; 4 | 5 | use super::types::SymGenericTerm; 6 | 7 | #[derive(SalsaSerialize)] 8 | #[salsa::interned(debug)] 9 | pub struct SymWhereClause<'db> { 10 | pub subject: SymGenericTerm<'db>, 11 | pub kind: SymWhereClauseKind, 12 | } 13 | 14 | #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Update, Debug, Serialize)] 15 | pub enum SymWhereClauseKind { 16 | Unique, 17 | Shared, 18 | Owned, 19 | Lent, 20 | } 21 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/ir/indices.rs: -------------------------------------------------------------------------------- 1 | use salsa::Update; 2 | use serde::Serialize; 3 | 4 | use crate::ir::types::SymGenericKind; 5 | 6 | /// Identifies a particular inference variable during type checking. 7 | #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Update, Debug, Serialize)] 8 | pub struct InferVarIndex(usize); 9 | 10 | /// Create an instance of `Self` from an inference variable 11 | pub trait FromInfer<'db> { 12 | fn infer(db: &'db dyn crate::Db, var: InferVarIndex) -> Self; 13 | } 14 | 15 | impl InferVarIndex { 16 | pub fn as_usize(self) -> usize { 17 | self.0 18 | } 19 | 20 | pub fn range(max: InferVarIndex) -> impl Iterator { 21 | (0..max.0).map(InferVarIndex) 22 | } 23 | } 24 | 25 | impl From for InferVarIndex { 26 | fn from(value: usize) -> Self { 27 | InferVarIndex(value) 28 | } 29 | } 30 | 31 | impl std::ops::Add for InferVarIndex { 32 | type Output = InferVarIndex; 33 | 34 | fn add(self, value: usize) -> Self { 35 | Self::from(self.as_usize().checked_add(value).unwrap()) 36 | } 37 | } 38 | 39 | impl std::ops::Sub for InferVarIndex { 40 | type Output = usize; 41 | 42 | fn sub(self, value: InferVarIndex) -> usize { 43 | self.as_usize().checked_sub(value.as_usize()).unwrap() 44 | } 45 | } 46 | 47 | /// Many of our types can be created from a variable 48 | pub trait FromInferVar<'db> { 49 | fn infer(db: &'db dyn crate::Db, kind: SymGenericKind, var: InferVarIndex) -> Self; 50 | } 51 | -------------------------------------------------------------------------------- /components/dada-ir-sym/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! "Symbolic IR": High-level, type checked representaton. Derived from the AST. 2 | #![doc = include_str!("../docs/overview.md")] 3 | 4 | pub use dada_ir_ast::Db; 5 | 6 | pub mod check; 7 | pub mod ir; 8 | pub mod well_known; 9 | 10 | pub mod prelude { 11 | use crate::ir::binder::Binder; 12 | use crate::ir::classes::SymField; 13 | use crate::ir::exprs::SymExpr; 14 | use crate::ir::functions::{SymFunction, SymFunctionSignature}; 15 | use crate::ir::types::SymTy; 16 | use dada_ir_ast::diagnostic::Errors; 17 | 18 | /// Return the symbol corresponding to the AST node. 19 | /// Implementations are memoized so that this can be called many times and will always yield the same symbol. 20 | pub trait Symbol<'db>: Copy { 21 | type Output; 22 | 23 | fn symbol(self, db: &'db dyn crate::Db) -> Self::Output; 24 | } 25 | 26 | pub trait CheckUseItems<'db> { 27 | fn check_use_items(self, db: &'db dyn crate::Db); 28 | } 29 | 30 | pub trait CheckedBody<'db> { 31 | fn checked_body(self, db: &'db dyn crate::Db) -> Option>; 32 | } 33 | 34 | #[salsa::tracked] 35 | impl<'db> CheckedBody<'db> for SymFunction<'db> { 36 | #[salsa::tracked] 37 | fn checked_body(self, db: &'db dyn crate::Db) -> Option> { 38 | crate::check::functions::check_function_body(db, self) 39 | } 40 | } 41 | 42 | pub trait CheckedFieldTy<'db> { 43 | /// See [`crate::check::fields::check_field`][] 44 | fn checked_field_ty(self, db: &'db dyn crate::Db) -> Binder<'db, Binder<'db, SymTy<'db>>>; 45 | } 46 | 47 | #[salsa::tracked] 48 | impl<'db> CheckedFieldTy<'db> for SymField<'db> { 49 | #[salsa::tracked] 50 | fn checked_field_ty(self, db: &'db dyn crate::Db) -> Binder<'db, Binder<'db, SymTy<'db>>> { 51 | match crate::check::fields::check_field(db, self) { 52 | Ok(v) => v, 53 | Err(reported) => crate::check::fields::field_err_ty(db, self, reported), 54 | } 55 | } 56 | } 57 | pub trait CheckedSignature<'db> { 58 | fn checked_signature(self, db: &'db dyn crate::Db) -> Errors>; 59 | } 60 | 61 | #[salsa::tracked] 62 | impl<'db> CheckedSignature<'db> for SymFunction<'db> { 63 | #[salsa::tracked] 64 | fn checked_signature(self, db: &'db dyn crate::Db) -> Errors> { 65 | match crate::check::signature::check_function_signature(db, self) { 66 | Ok(s) => Ok(s), 67 | Err(e) => Err(e), 68 | } 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /components/dada-lang/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-lang" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [lib] 7 | doctest = false 8 | 9 | [dependencies] 10 | annotate-snippets = { workspace = true } 11 | dada-check = { version = "0.1.0", path = "../dada-check" } 12 | dada-compiler = { version = "0.1.0", path = "../dada-compiler" } 13 | dada-debug = { version = "0.1.0", path = "../dada-debug" } 14 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 15 | dada-parser = { version = "0.1.0", path = "../dada-parser" } 16 | dada-util = { version = "0.1.0", path = "../dada-util" } 17 | indicatif = "0.17.8" 18 | lazy_static = "1.5.0" 19 | prettydiff = "0.7.0" 20 | rayon = "1.10.0" 21 | regex = "1.10.6" 22 | salsa = { workspace = true } 23 | structopt = "0.3.26" # derive doesn't work without this 24 | thiserror = "1.0.63" 25 | url = "2.5.3" 26 | walkdir = "2.5.0" 27 | -------------------------------------------------------------------------------- /components/dada-lang/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(panic_payload_as_str)] 2 | #![doc = include_str!("../docs/overview.md")] 3 | 4 | use dada_debug::DebugOptions; 5 | use dada_ir_ast::diagnostic::RenderOptions; 6 | use dada_util::Fallible; 7 | use structopt::StructOpt; 8 | 9 | mod main_lib; 10 | 11 | use dada_compiler::Db; 12 | 13 | #[derive(Debug, StructOpt)] 14 | pub struct Options { 15 | #[structopt(flatten)] 16 | global_options: GlobalOptions, 17 | 18 | #[structopt(subcommand)] 19 | command: Command, 20 | } 21 | 22 | #[derive(Debug, StructOpt)] 23 | pub struct GlobalOptions { 24 | #[structopt(long)] 25 | no_color: bool, 26 | } 27 | 28 | impl GlobalOptions { 29 | pub(crate) fn test_options() -> Self { 30 | Self { no_color: false } 31 | } 32 | 33 | pub(crate) fn render_opts(&self) -> RenderOptions { 34 | RenderOptions { 35 | no_color: self.no_color, 36 | } 37 | } 38 | } 39 | 40 | #[derive(Debug, StructOpt)] 41 | pub enum Command { 42 | Compile { 43 | #[structopt(flatten)] 44 | compile_options: CompileOptions, 45 | }, 46 | 47 | Run { 48 | #[structopt(flatten)] 49 | run_options: RunOptions, 50 | }, 51 | 52 | Test { 53 | #[structopt(flatten)] 54 | test_options: TestOptions, 55 | }, 56 | 57 | Debug { 58 | #[structopt(flatten)] 59 | debug_options: DebugOptions, 60 | 61 | #[structopt(flatten)] 62 | compile_options: CompileOptions, 63 | }, 64 | } 65 | 66 | #[derive(Debug, StructOpt)] 67 | pub struct CompileOptions { 68 | /// Main source file to compile. 69 | input: String, 70 | } 71 | 72 | #[derive(Debug, StructOpt)] 73 | pub struct RunOptions { 74 | #[structopt(flatten)] 75 | compile_options: CompileOptions, 76 | } 77 | 78 | #[derive(Debug, StructOpt)] 79 | pub struct TestOptions { 80 | /// Print each test as we run it 81 | #[structopt(long, short)] 82 | verbose: bool, 83 | 84 | /// Test file(s) or directory 85 | inputs: Vec, 86 | } 87 | 88 | impl Options { 89 | pub fn main(self) -> Fallible<()> { 90 | main_lib::Main::new(self.global_options).run(self.command) 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /components/dada-lang/src/main_lib.rs: -------------------------------------------------------------------------------- 1 | use dada_util::Fallible; 2 | 3 | use crate::{Command, GlobalOptions}; 4 | 5 | mod compile; 6 | mod run; 7 | mod test; 8 | 9 | pub struct Main { 10 | #[allow(dead_code)] 11 | global_options: GlobalOptions, 12 | } 13 | 14 | impl Main { 15 | pub fn new(global_options: GlobalOptions) -> Self { 16 | Self { global_options } 17 | } 18 | 19 | pub fn run(mut self, command: Command) -> Fallible<()> { 20 | match command { 21 | Command::Compile { compile_options } => self.compile(&compile_options, None)?, 22 | Command::Test { test_options } => self.test(test_options)?, 23 | Command::Run { run_options } => self.run_command(&run_options)?, 24 | Command::Debug { 25 | debug_options, 26 | compile_options, 27 | } => { 28 | let mut debug_server = debug_options.to_server(); 29 | let debug_tx = debug_server.launch(); 30 | eprintln!( 31 | "serving debug results on http://localhost:{port}/", 32 | port = debug_options.port 33 | ); 34 | self.compile(&compile_options, Some(debug_tx))?; 35 | eprintln!( 36 | "compilation complete. Debug at http://localhost:{port}/", 37 | port = debug_options.port 38 | ); 39 | debug_server.block_on()?; 40 | } 41 | } 42 | Ok(()) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /components/dada-lang/src/main_lib/compile.rs: -------------------------------------------------------------------------------- 1 | use std::{path::Path, sync::mpsc::Sender}; 2 | 3 | use dada_compiler::{Compiler, RealFs}; 4 | use dada_ir_ast::{DebugEvent, diagnostic::Level}; 5 | use dada_util::{Fallible, bail}; 6 | 7 | use crate::CompileOptions; 8 | 9 | use super::Main; 10 | 11 | impl Main { 12 | pub(super) fn compile( 13 | &mut self, 14 | compile_options: &CompileOptions, 15 | debug_tx: Option>, 16 | ) -> Fallible<()> { 17 | let debug_mode = debug_tx.is_some(); 18 | let mut compiler = Compiler::new(RealFs::default(), debug_tx); 19 | let source_url = Path::new(&compile_options.input); 20 | let source_file = compiler.load_source_file(source_url)?; 21 | let diagnostics = compiler.check_all(source_file); 22 | 23 | for diagnostic in &diagnostics { 24 | eprintln!( 25 | "{}", 26 | diagnostic.render(&compiler, &self.global_options.render_opts()) 27 | ); 28 | } 29 | 30 | // In debug mode, diagnostics get reported to the `debug_tx` and aren't considered errors. 31 | if !debug_mode && diagnostics.iter().any(|d| d.level >= Level::Error) { 32 | bail!("compilation failed due to errors"); 33 | } 34 | 35 | Ok(()) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /components/dada-lang/src/main_lib/run.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use dada_compiler::{Compiler, RealFs}; 4 | use dada_util::Fallible; 5 | 6 | use crate::RunOptions; 7 | 8 | use super::Main; 9 | 10 | impl Main { 11 | pub(super) fn run_command(&mut self, run_options: &RunOptions) -> Fallible<()> { 12 | let mut compiler = Compiler::new(RealFs::default(), None); 13 | let source_url = Path::new(&run_options.compile_options.input); 14 | let source_file = compiler.load_source_file(source_url)?; 15 | let bytes = compiler.codegen_main_fn(source_file); 16 | let diagnostics = compiler.check_all(source_file); 17 | 18 | for diagnostic in &diagnostics { 19 | eprintln!( 20 | "{}", 21 | diagnostic.render(&compiler, &self.global_options.render_opts()) 22 | ); 23 | } 24 | 25 | let _ = bytes; 26 | 27 | Ok(()) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /components/dada-lang/src/main_lib/test/timeout_warning.rs: -------------------------------------------------------------------------------- 1 | use std::{path::Path, time::Duration}; 2 | 3 | pub(super) fn timeout_warning(test_path: &Path, op: impl FnOnce() -> R) -> R { 4 | let mut sec = 5; 5 | std::thread::scope(|scope| { 6 | let (tx, rx) = std::sync::mpsc::channel(); 7 | scope.spawn(move || { 8 | loop { 9 | match rx.recv_timeout(Duration::from_secs(sec)) { 10 | Ok(()) => return, 11 | Err(_) => { 12 | eprintln!("test `{test_path:?}` has been running for over {sec} seconds"); 13 | sec = (sec * 2).max(120); 14 | } 15 | } 16 | } 17 | }); 18 | 19 | let r = op(); 20 | tx.send(()).unwrap(); 21 | r 22 | }) 23 | } 24 | -------------------------------------------------------------------------------- /components/dada-lsp-server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-lsp-server" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | crossbeam-channel = "0.5.13" 8 | dada-compiler = { version = "0.1.0", path = "../dada-compiler" } 9 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 10 | dada-ir-sym = { version = "0.1.0", path = "../dada-ir-sym" } 11 | dada-lang = { version = "0.1.0", path = "../dada-lang" } 12 | dada-probe = { version = "0.1.0", path = "../dada-probe" } 13 | dada-util = { version = "0.1.0", path = "../dada-util" } 14 | futures = "0.3.31" 15 | lsp-server = "0.7.7" 16 | lsp-types = "0.97.0" 17 | salsa = { workspace = true } 18 | serde = { version = "1.0.215", features = ["derive"] } 19 | serde_json = "1.0.132" 20 | url = "2.5.3" 21 | -------------------------------------------------------------------------------- /components/dada-parser/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-parser" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 8 | dada-util = { version = "0.1.0", path = "../dada-util" } 9 | salsa = { workspace = true } 10 | -------------------------------------------------------------------------------- /components/dada-parser/src/miscellaneous.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::ast::{AstGenericTerm, AstPath, AstPathKind}; 2 | 3 | use crate::tokenizer::operator; 4 | 5 | use super::{Expected, Parse, ParseFail, Parser}; 6 | 7 | impl<'db> Parse<'db> for AstPath<'db> { 8 | type Output = Self; 9 | 10 | fn opt_parse( 11 | db: &'db dyn crate::Db, 12 | parser: &mut Parser<'_, 'db>, 13 | ) -> Result, ParseFail<'db>> { 14 | let Ok(id) = parser.eat_id() else { 15 | return Ok(None); 16 | }; 17 | let mut path = AstPath::new(db, AstPathKind::Identifier(id)); 18 | 19 | loop { 20 | if parser.eat_op(operator::DOT).is_ok() { 21 | let id = parser.eat_id()?; 22 | path = AstPath::new(db, AstPathKind::Member { path, id }); 23 | continue; 24 | } 25 | 26 | if let Some(args) = AstGenericTerm::opt_parse_delimited( 27 | db, 28 | parser, 29 | crate::tokenizer::Delimiter::SquareBrackets, 30 | AstGenericTerm::eat_comma, 31 | )? { 32 | path = AstPath::new(db, AstPathKind::GenericArgs { path, args }); 33 | continue; 34 | } 35 | 36 | return Ok(Some(path)); 37 | } 38 | } 39 | 40 | fn expected() -> Expected { 41 | Expected::Nonterminal("path") 42 | } 43 | } 44 | 45 | pub trait OrOptParse<'db, Variant1> { 46 | fn or_opt_parse( 47 | self, 48 | db: &'db dyn crate::Db, 49 | parser: &mut Parser<'_, 'db>, 50 | ) -> Result, ParseFail<'db>> 51 | where 52 | Variant1: Into, 53 | Variant2: Parse<'db, Output: Into>; 54 | } 55 | 56 | impl<'db, Variant1> OrOptParse<'db, Variant1> for Result, ParseFail<'db>> { 57 | fn or_opt_parse( 58 | self, 59 | db: &'db dyn crate::Db, 60 | parser: &mut Parser<'_, 'db>, 61 | ) -> Result, ParseFail<'db>> 62 | where 63 | Variant1: Into, 64 | Variant2: Parse<'db, Output: Into>, 65 | { 66 | match self { 67 | Ok(Some(v1)) => Ok(Some(v1.into())), 68 | Ok(None) => match Variant2::opt_parse(db, parser) { 69 | Ok(Some(v2)) => Ok(Some(v2.into())), 70 | Ok(None) => Ok(None), 71 | Err(err) => Err(err), 72 | }, 73 | Err(err) => Err(err), 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /components/dada-parser/src/prelude.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::ast::{AstBlock, AstGenericTerm, AstMember}; 2 | 3 | use super::*; 4 | 5 | /// Given a [`SourceFile`], parse its members 6 | pub trait SourceFileParse { 7 | fn parse(self, db: &dyn crate::Db) -> AstModule<'_>; 8 | } 9 | 10 | /// Given a [`dada_ir_ast::ast::AstAggregate`], parse its members 11 | pub trait ClassItemMembers<'db> { 12 | fn members(self, db: &'db dyn crate::Db) -> &'db SpanVec<'db, AstMember<'db>>; 13 | } 14 | 15 | /// Given a [`dada_ir_ast::ast::AstFunction`], parse its associated body into a block 16 | pub trait FunctionBlock<'db> { 17 | fn body_block(self, db: &'db dyn crate::Db) -> Option>; 18 | } 19 | 20 | /// Given a [`SquareBracketArgs`], parse its associated body into a block 21 | pub trait SquareBracketArgs<'db> { 22 | fn parse_as_generics(self, db: &'db dyn crate::Db) -> SpanVec<'db, AstGenericTerm<'db>>; 23 | } 24 | -------------------------------------------------------------------------------- /components/dada-parser/src/square_bracket_args.rs: -------------------------------------------------------------------------------- 1 | use dada_ir_ast::ast::{AstGenericTerm, SpanVec, SquareBracketArgs}; 2 | 3 | use crate::Parser; 4 | 5 | #[salsa::tracked] 6 | impl<'db> crate::prelude::SquareBracketArgs<'db> for SquareBracketArgs<'db> { 7 | #[salsa::tracked] 8 | fn parse_as_generics( 9 | self, 10 | db: &'db dyn crate::Db, 11 | ) -> SpanVec<'db, dada_ir_ast::ast::AstGenericTerm<'db>> { 12 | let deferred = self.deferred(db); 13 | let anchor = deferred.span.anchor; 14 | Parser::deferred(db, anchor, deferred, |parser| { 15 | parser.parse_many_and_report_diagnostics::>(db) 16 | }) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /components/dada-probe/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-probe" 3 | version.workspace = true 4 | repository.workspace = true 5 | edition.workspace = true 6 | 7 | [lints.clippy] 8 | needless_lifetimes = "allow" 9 | 10 | [dependencies] 11 | dada-ir-ast = { version = "0.1.0", path = "../dada-ir-ast" } 12 | dada-ir-sym = { version = "0.1.0", path = "../dada-ir-sym" } 13 | dada-parser = { version = "0.1.0", path = "../dada-parser" } 14 | dada-util = { version = "0.1.0", path = "../dada-util" } 15 | salsa = { workspace = true } 16 | -------------------------------------------------------------------------------- /components/dada-util-procmacro/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-util-procmacro" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | proc-macro2 = "1.0" 8 | quote = "1.0" 9 | syn = { version = "2.0", features = ["full", "visit", "visit-mut"] } 10 | synstructure = "0.13" 11 | 12 | [lib] 13 | proc-macro = true 14 | -------------------------------------------------------------------------------- /components/dada-util-procmacro/src/boxed_async_fn.rs: -------------------------------------------------------------------------------- 1 | mod parse; 2 | 3 | use proc_macro::TokenStream; 4 | use quote::quote; 5 | use syn::parse_macro_input; 6 | 7 | /// Transforms an async fn to return a `Box>`. 8 | /// 9 | /// Originally based on the [`async_recursion`](https://crates.io/crates/async-recursion) crate 10 | /// authored by Robert Usher and licensed under MIT/APACHE-2.0. 11 | pub fn boxed_async_fn(args: TokenStream, input: TokenStream) -> TokenStream { 12 | let parse::AsyncItem(mut item) = parse_macro_input!(input as parse::AsyncItem); 13 | let _args = parse_macro_input!(args as syn::parse::Nothing); 14 | 15 | let block = item.block; 16 | item.block = syn::parse2(quote!({Box::pin(async move #block).await})).unwrap(); 17 | 18 | TokenStream::from(quote!(#item)) 19 | } 20 | -------------------------------------------------------------------------------- /components/dada-util-procmacro/src/boxed_async_fn/parse.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::Span; 2 | use syn::{ 3 | ItemFn, 4 | parse::{Error, Parse, ParseStream, Result}, 5 | }; 6 | 7 | pub struct AsyncItem(pub ItemFn); 8 | 9 | impl Parse for AsyncItem { 10 | fn parse(input: ParseStream) -> Result { 11 | let item: ItemFn = input.parse()?; 12 | 13 | // Check that this is an async function 14 | if item.sig.asyncness.is_none() { 15 | return Err(Error::new(Span::call_site(), "expected an async function")); 16 | } 17 | 18 | Ok(AsyncItem(item)) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /components/dada-util-procmacro/src/lib.rs: -------------------------------------------------------------------------------- 1 | use proc_macro::TokenStream; 2 | use quote::quote; 3 | use synstructure::decl_derive; 4 | 5 | mod boxed_async_fn; 6 | mod salsa_serialize; 7 | 8 | decl_derive!([FromImpls, attributes(no_from_impl)] => from_impls_derive); 9 | decl_derive!([SalsaSerialize] => salsa_serialize::salsa_serialize_derive); 10 | 11 | fn from_impls_derive(s: synstructure::Structure) -> TokenStream { 12 | let result = s 13 | .variants() 14 | .iter() 15 | .map(|variant| { 16 | let variant_name = &variant.ast().ident; 17 | let fields = &variant.ast().fields; 18 | 19 | for attr in variant 20 | .ast() 21 | .attrs 22 | .iter() 23 | .filter(|a| a.meta.path().is_ident("no_from_impl")) 24 | { 25 | if attr.meta.require_path_only().is_err() { 26 | return Err(syn::Error::new_spanned( 27 | attr, 28 | "`no_from_impl` does not accept arguments", 29 | )); 30 | } 31 | } 32 | 33 | if variant 34 | .ast() 35 | .attrs 36 | .iter() 37 | .any(|a| a.meta.path().is_ident("no_from_impl")) 38 | { 39 | return Ok(quote!()); 40 | } 41 | 42 | if fields.len() != 1 { 43 | return Err(syn::Error::new_spanned( 44 | variant.ast().ident, 45 | "each variant must have exactly one field", 46 | )); 47 | } 48 | 49 | let field_ty = &fields.iter().next().unwrap().ty; 50 | Ok(s.gen_impl(quote! { 51 | gen impl From<#field_ty> for @Self { 52 | fn from(value: #field_ty) -> Self { 53 | Self::#variant_name(value) 54 | } 55 | } 56 | 57 | })) 58 | }) 59 | .collect::>(); 60 | 61 | match result { 62 | Ok(tokens) => tokens.into(), 63 | Err(err) => err.into_compile_error().into(), 64 | } 65 | } 66 | 67 | /// Transforms an async fn to return a `Box>`. 68 | /// 69 | /// Adapted from the [`async_recursion`](https://crates.io/crates/async-recursion) crate authored by 70 | /// Robert Usher and licensed under MIT/APACHE-2.0. 71 | #[proc_macro_attribute] 72 | pub fn boxed_async_fn(args: TokenStream, input: TokenStream) -> TokenStream { 73 | boxed_async_fn::boxed_async_fn(args, input) 74 | } 75 | -------------------------------------------------------------------------------- /components/dada-util/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dada-util" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | anyhow = { workspace = true } 8 | dada-util-procmacro = { version = "0.1.0", path = "../dada-util-procmacro" } 9 | erased-serde = "0.4.5" 10 | futures = "0.3.31" 11 | fxhash = "0.2.1" 12 | imstr = "0.2.0" 13 | indexmap = "2.6.0" 14 | lazy_static = "1.5.0" 15 | salsa = { workspace = true } 16 | serde = { version = "1.0.216", features = ["derive"] } 17 | serde_json = "1.0.133" 18 | 19 | -------------------------------------------------------------------------------- /components/dada-util/src/arena.rs: -------------------------------------------------------------------------------- 1 | use std::{any::Any, cell::RefCell, pin::Pin}; 2 | 3 | /// A really dumb arena implementation intended not for efficiency 4 | /// but rather to prolong lifetimes. 5 | pub struct Arena { 6 | /// List of values inserted into the arena. 7 | /// They cannot be moved out from the box or dropped until the arena is dropped. 8 | /// 9 | /// The use of Box is needed to ensure the address of the value is stable. 10 | /// The `Pin` and `dyn Any` parts are just for fun and/or convenience. 11 | /// The pin is expressing the "don't move" constraint but is neither necessary 12 | /// nor sufficient for soundness (it doesn't prevent drops), 13 | /// and the `dyn Any` is just to capture the destructor but we don't do 14 | /// any downcasting. 15 | data: RefCell>>>, 16 | } 17 | 18 | impl Default for Arena { 19 | fn default() -> Self { 20 | Self::new() 21 | } 22 | } 23 | 24 | impl Arena { 25 | pub fn new() -> Self { 26 | Self { 27 | data: Default::default(), 28 | } 29 | } 30 | 31 | pub fn insert(&self, value: T) -> &T 32 | where 33 | T: Any, 34 | { 35 | let data = Box::pin(value); 36 | let ptr: *const T = &*data; 37 | self.data.borrow_mut().push(data); 38 | 39 | // UNSAFE: WE don't ever remove anything from `self.data` until self is dropped. 40 | // UNSAFE: The value is guaranteed to be valid for the lifetime of `self` 41 | unsafe { &*ptr } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /components/dada-util/src/fixed_depth_json_README.md: -------------------------------------------------------------------------------- 1 | # Debug Field Deny List 2 | 3 | This feature allows you to exclude specific fields from the JSON debug output by adding them to a global deny list. 4 | 5 | ## How It Works 6 | 7 | The `DEBUG_FIELD_DENY_LIST` in `fixed_depth_json.rs` is a global list of field names that will be excluded from serialization. When a field with a name in this list is encountered during serialization, it will be skipped entirely, reducing noise in the debug output. 8 | 9 | ## Usage 10 | 11 | To exclude fields from the debug output, simply add their names to the `DEBUG_FIELD_DENY_LIST` in `components/dada-util/src/fixed_depth_json.rs`: 12 | 13 | ```rust 14 | lazy_static! { 15 | pub static ref DEBUG_FIELD_DENY_LIST: HashSet<&'static str> = { 16 | let mut set = HashSet::new(); 17 | 18 | // Add field names to exclude here 19 | set.insert("compiler_location"); // Exclude entire location info 20 | set.insert("file"); // Or just exclude file paths 21 | set.insert("line"); // Or just exclude line numbers 22 | set.insert("column"); // Or just exclude column numbers 23 | 24 | // Add any other fields you want to exclude 25 | set.insert("noisy_field_1"); 26 | set.insert("noisy_field_2"); 27 | 28 | set 29 | }; 30 | } 31 | ``` 32 | 33 | ## Common Fields to Exclude 34 | 35 | Here are some fields you might want to consider excluding: 36 | 37 | 1. **Location Information**: 38 | - `compiler_location` - The entire location struct 39 | - `file` - Just the file path 40 | - `line` - Just the line number 41 | - `column` - Just the column number 42 | 43 | 2. **Internal Details**: 44 | - Fields that contain internal implementation details that aren't useful for debugging 45 | 46 | 3. **Large Data Structures**: 47 | - Fields that contain large collections or deeply nested structures that make the output hard to read 48 | 49 | ## Benefits 50 | 51 | - **Cleaner Output**: Reduces noise in the debug output, making it easier to focus on relevant information 52 | - **Centralized Configuration**: All excluded fields are defined in one place 53 | - **No Struct Modifications**: You don't need to modify any struct definitions with `#[serde(skip)]` attributes 54 | - **Easy Maintenance**: Simple to add or remove fields from the deny list as needed 55 | -------------------------------------------------------------------------------- /components/dada-util/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::ops::AsyncFnOnce; 2 | 3 | pub use fxhash::FxHashMap as Map; 4 | pub use fxhash::FxHashSet as Set; 5 | pub use imstr::ImString as Text; 6 | pub type IndexMap = indexmap::IndexMap; 7 | 8 | pub type Fallible = anyhow::Result; 9 | 10 | pub use anyhow::Context; 11 | pub use anyhow::Error; 12 | pub use anyhow::anyhow; 13 | pub use anyhow::bail; 14 | 15 | pub use dada_util_procmacro::*; 16 | 17 | pub mod typedvec; 18 | pub mod vecset; 19 | 20 | pub mod fixed_depth_json; 21 | 22 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] 23 | pub enum Never {} 24 | 25 | unsafe impl salsa::Update for Never { 26 | unsafe fn maybe_update(_old_pointer: *mut Self, _new_value: Self) -> bool { 27 | unreachable!() 28 | } 29 | } 30 | 31 | pub mod arena; 32 | 33 | pub mod log; 34 | 35 | pub async fn indirect(op: impl AsyncFnOnce() -> T) -> T { 36 | let boxed_future = futures::future::FutureExt::boxed_local(op()); 37 | boxed_future.await 38 | } 39 | 40 | pub mod vecext; 41 | -------------------------------------------------------------------------------- /components/dada-util/src/typedvec.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | marker::PhantomData, 3 | ops::{Deref, DerefMut}, 4 | }; 5 | 6 | pub struct TypedVec { 7 | data: Vec, 8 | phantom: PhantomData, 9 | } 10 | 11 | pub trait TypedVecIndex { 12 | fn into_usize(self) -> usize; 13 | fn from_usize(v: usize) -> Self; 14 | } 15 | 16 | impl Default for TypedVec { 17 | fn default() -> Self { 18 | TypedVec { 19 | data: Vec::new(), 20 | phantom: PhantomData, 21 | } 22 | } 23 | } 24 | 25 | impl From> for TypedVec { 26 | fn from(data: Vec) -> Self { 27 | TypedVec { 28 | data, 29 | phantom: PhantomData, 30 | } 31 | } 32 | } 33 | 34 | impl From> for Vec { 35 | fn from(data: TypedVec) -> Self { 36 | data.data 37 | } 38 | } 39 | 40 | impl TypedVec { 41 | pub fn new() -> Self { 42 | Self::default() 43 | } 44 | 45 | pub fn into_data(self) -> Vec { 46 | self.data 47 | } 48 | } 49 | 50 | impl std::ops::Index for TypedVec { 51 | type Output = T; 52 | 53 | fn index(&self, index: I) -> &Self::Output { 54 | &self.data[index.into_usize()] 55 | } 56 | } 57 | 58 | impl std::ops::IndexMut for TypedVec { 59 | fn index_mut(&mut self, index: I) -> &mut Self::Output { 60 | &mut self.data[index.into_usize()] 61 | } 62 | } 63 | 64 | impl Deref for TypedVec { 65 | type Target = Vec; 66 | 67 | fn deref(&self) -> &Self::Target { 68 | &self.data 69 | } 70 | } 71 | 72 | impl DerefMut for TypedVec { 73 | fn deref_mut(&mut self) -> &mut Self::Target { 74 | &mut self.data 75 | } 76 | } 77 | 78 | impl Extend for TypedVec { 79 | fn extend>(&mut self, iter: TI) { 80 | self.data.extend(iter); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /components/dada-util/src/vecext.rs: -------------------------------------------------------------------------------- 1 | pub trait VecExt { 2 | fn push_if_not_contained(&mut self, element: T) -> bool 3 | where 4 | T: PartialEq; 5 | } 6 | 7 | impl VecExt for Vec { 8 | fn push_if_not_contained(&mut self, element: T) -> bool 9 | where 10 | T: PartialEq, 11 | { 12 | if self.contains(&element) { 13 | false 14 | } else { 15 | self.push(element); 16 | true 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /components/vscode/.github/workflows/build-extension.yml: -------------------------------------------------------------------------------- 1 | name: Build Extension 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | paths: 7 | - 'components/vscode/**' 8 | - 'components/dada-lsp-server/**' 9 | pull_request: 10 | branches: [ main ] 11 | paths: 12 | - 'components/vscode/**' 13 | - 'components/dada-lsp-server/**' 14 | workflow_dispatch: 15 | 16 | jobs: 17 | build-server: 18 | strategy: 19 | matrix: 20 | include: 21 | - os: ubuntu-latest 22 | target: linux-x64 23 | artifact_name: dada-lsp-server 24 | - os: macos-latest 25 | target: darwin-x64 26 | artifact_name: dada-lsp-server 27 | - os: windows-latest 28 | target: win32-x64 29 | artifact_name: dada-lsp-server.exe 30 | 31 | runs-on: ${{ matrix.os }} 32 | 33 | steps: 34 | - uses: actions/checkout@v3 35 | 36 | - name: Set up Rust 37 | uses: actions-rs/toolchain@v1 38 | with: 39 | toolchain: stable 40 | 41 | - name: Build server 42 | run: cargo build --release -p dada-lsp-server 43 | 44 | - name: Create directory 45 | run: mkdir -p components/vscode/bin/${{ matrix.target }} 46 | shell: bash 47 | 48 | - name: Copy binary (Unix) 49 | if: matrix.os != 'windows-latest' 50 | run: | 51 | cp target/release/${{ matrix.artifact_name }} components/vscode/bin/${{ matrix.target }}/ 52 | 53 | - name: Copy binary (Windows) 54 | if: matrix.os == 'windows-latest' 55 | run: | 56 | copy target\release\${{ matrix.artifact_name }} components\vscode\bin\${{ matrix.target }}\ 57 | shell: cmd 58 | 59 | - name: Upload binary 60 | uses: actions/upload-artifact@v3 61 | with: 62 | name: ${{ matrix.target }}-binary 63 | path: components/vscode/bin/${{ matrix.target }}/${{ matrix.artifact_name }} 64 | 65 | package-extension: 66 | needs: build-server 67 | runs-on: ubuntu-latest 68 | 69 | steps: 70 | - uses: actions/checkout@v3 71 | 72 | - name: Setup Node.js 73 | uses: actions/setup-node@v3 74 | with: 75 | node-version: '16' 76 | 77 | - name: Download all binaries 78 | uses: actions/download-artifact@v3 79 | with: 80 | path: components/vscode/bin 81 | 82 | - name: Display structure of downloaded files 83 | run: ls -R components/vscode/bin 84 | shell: bash 85 | 86 | - name: Install dependencies 87 | run: cd components/vscode && npm install 88 | 89 | - name: Compile TypeScript 90 | run: cd components/vscode && npm run compile 91 | 92 | - name: Package extension 93 | run: cd components/vscode && npx vsce package 94 | 95 | - name: Upload VSIX 96 | uses: actions/upload-artifact@v3 97 | with: 98 | name: dada-extension 99 | path: components/vscode/*.vsix 100 | -------------------------------------------------------------------------------- /components/vscode/.gitignore: -------------------------------------------------------------------------------- 1 | out 2 | node_modules 3 | .vscode-test/ 4 | *.vsix 5 | -------------------------------------------------------------------------------- /components/vscode/.vscodeignore: -------------------------------------------------------------------------------- 1 | .vscode/** 2 | .vscode-test/** 3 | .github/** 4 | scripts/** 5 | .gitignore 6 | .yarnrc 7 | vsc-extension-quickstart.md 8 | **/tsconfig.json 9 | **/.eslintrc.json 10 | **/*.map 11 | node_modules/** 12 | -------------------------------------------------------------------------------- /components/vscode/README.md: -------------------------------------------------------------------------------- 1 | # Dada Language Support for Visual Studio Code 2 | 3 | This extension provides language support for the Dada programming language. 4 | 5 | ## Features 6 | 7 | - Syntax highlighting for `.dada` files 8 | - Language server integration providing: 9 | - Error checking and diagnostics 10 | - Hover information 11 | - Go to definition 12 | 13 | ## Requirements 14 | 15 | The extension includes pre-compiled binaries of the Dada language server for common platforms (Windows, macOS, and Linux). If a binary for your platform is not included, the extension will attempt to: 16 | 17 | 1. Use a custom path specified in the settings 18 | 2. Build and run the server using Cargo (requires Rust to be installed) 19 | 3. Find the server in your PATH 20 | 21 | ## Extension Settings 22 | 23 | This extension contributes the following settings: 24 | 25 | * `dada.serverPath`: Path to the Dada language server executable (optional) 26 | * `dada.trace.server`: Traces the communication between VS Code and the Dada language server 27 | 28 | ## Commands 29 | 30 | * `Dada: Restart Language Server`: Restarts the language server if it encounters issues 31 | 32 | ## Development 33 | 34 | ### Building the Extension 35 | 36 | 1. Install dependencies: 37 | ``` 38 | npm install 39 | ``` 40 | 41 | 2. Compile TypeScript: 42 | ``` 43 | npm run compile 44 | ``` 45 | 46 | 3. Package the extension: 47 | ``` 48 | npx vsce package 49 | ``` 50 | 51 | ### Adding Language Server Binaries 52 | 53 | Pre-compiled binaries for the language server should be placed in the appropriate platform directory under `bin/`: 54 | 55 | - `bin/darwin-x64/` - macOS Intel 56 | - `bin/darwin-arm64/` - macOS Apple Silicon 57 | - `bin/linux-x64/` - Linux 58 | - `bin/win32-x64/` - Windows 59 | 60 | During development, if no binary is found, the extension will attempt to use `cargo run -p dada-lsp-server --` to build and run the server on-demand. 61 | 62 | ## Known Issues 63 | 64 | This is an early version of the extension and may have some limitations. 65 | 66 | ## Release Notes 67 | 68 | ### 0.1.0 69 | 70 | Initial release of the Dada language extension. 71 | -------------------------------------------------------------------------------- /components/vscode/bin/README.md: -------------------------------------------------------------------------------- 1 | # Dada Language Server Binaries 2 | 3 | This directory contains pre-compiled binaries of the Dada Language Server for different platforms. 4 | 5 | ## Directory Structure 6 | 7 | - `darwin-x64/`: macOS Intel binaries 8 | - `darwin-arm64/`: macOS Apple Silicon binaries 9 | - `linux-x64/`: Linux x64 binaries 10 | - `win32-x64/`: Windows x64 binaries 11 | 12 | ## Adding Binaries 13 | 14 | To add a binary for a specific platform: 15 | 16 | 1. Build the Dada Language Server for the target platform: 17 | ``` 18 | cargo build --release -p dada-lsp-server 19 | ``` 20 | 21 | 2. Copy the binary to the appropriate directory: 22 | - For macOS: `cp target/release/dada-lsp-server bin/darwin-x64/` 23 | - For Linux: `cp target/release/dada-lsp-server bin/linux-x64/` 24 | - For Windows: `copy target\release\dada-lsp-server.exe bin\win32-x64\` 25 | 26 | ## Automated Building 27 | 28 | For production releases, it's recommended to set up a CI/CD pipeline to automatically build binaries for all supported platforms. 29 | 30 | ## Development 31 | 32 | During development, if no binary is found, the extension will attempt to use `cargo run -p dada-lsp-server --` to build and run the server on-demand. 33 | -------------------------------------------------------------------------------- /components/vscode/images/icon.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dada-lang/dada/8bdf2b1cec0192d0ad1f67c35e2cb8472d1a3e1d/components/vscode/images/icon.svg -------------------------------------------------------------------------------- /components/vscode/language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | "lineComment": "//", 4 | "blockComment": ["/*", "*/"] 5 | }, 6 | "brackets": [ 7 | ["{", "}"], 8 | ["[", "]"], 9 | ["(", ")"] 10 | ], 11 | "autoClosingPairs": [ 12 | { "open": "{", "close": "}" }, 13 | { "open": "[", "close": "]" }, 14 | { "open": "(", "close": ")" }, 15 | { "open": "\"", "close": "\"", "notIn": ["string"] }, 16 | { "open": "'", "close": "'", "notIn": ["string", "comment"] } 17 | ], 18 | "surroundingPairs": [ 19 | ["{", "}"], 20 | ["[", "]"], 21 | ["(", ")"], 22 | ["\"", "\""], 23 | ["'", "'"] 24 | ], 25 | "folding": { 26 | "markers": { 27 | "start": "^\\s*//\\s*#?region\\b", 28 | "end": "^\\s*//\\s*#?endregion\\b" 29 | } 30 | }, 31 | "wordPattern": "(-?\\d*\\.\\d\\w*)|([^\\`\\~\\!\\@\\#\\%\\^\\&\\*\\(\\)\\-\\=\\+\\[\\{\\]\\}\\\\\\|\\;\\:\\'\\\"\\,\\.\\<\\>\\/\\?\\s]+)" 32 | } 33 | -------------------------------------------------------------------------------- /components/vscode/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dada-language", 3 | "displayName": "Dada Language", 4 | "description": "Language support for Dada programming language", 5 | "version": "0.1.0", 6 | "publisher": "dada-lang", 7 | "repository": { 8 | "type": "git", 9 | "url": "https://github.com/dada-lang/dada" 10 | }, 11 | "engines": { 12 | "vscode": "^1.74.0" 13 | }, 14 | "categories": [ 15 | "Programming Languages" 16 | ], 17 | "contributes": { 18 | "commands": [ 19 | { 20 | "command": "dada.restartServer", 21 | "title": "Dada: Restart Language Server" 22 | } 23 | ], 24 | "languages": [ 25 | { 26 | "id": "dada", 27 | "aliases": [ 28 | "Dada", 29 | "dada" 30 | ], 31 | "extensions": [ 32 | ".dada" 33 | ], 34 | "configuration": "./language-configuration.json" 35 | } 36 | ], 37 | "grammars": [ 38 | { 39 | "language": "dada", 40 | "scopeName": "source.dada", 41 | "path": "./syntaxes/dada.tmLanguage.json" 42 | } 43 | ], 44 | "configuration": { 45 | "type": "object", 46 | "title": "Dada", 47 | "properties": { 48 | "dada.serverPath": { 49 | "type": "string", 50 | "default": "", 51 | "description": "Path to the Dada language server executable" 52 | }, 53 | "dada.trace.server": { 54 | "type": "string", 55 | "enum": [ 56 | "off", 57 | "messages", 58 | "verbose" 59 | ], 60 | "default": "off", 61 | "description": "Traces the communication between VS Code and the Dada language server" 62 | } 63 | } 64 | } 65 | }, 66 | "activationEvents": [ 67 | "onLanguage:dada" 68 | ], 69 | "main": "./out/extension.js", 70 | "scripts": { 71 | "vscode:prepublish": "npm run compile && npm run package-server", 72 | "compile": "tsc -p ./", 73 | "watch": "tsc -watch -p ./", 74 | "lint": "eslint src --ext ts", 75 | "package-server": "node ./scripts/package-server.js" 76 | }, 77 | "dependencies": { 78 | "vscode-languageclient": "^8.1.0" 79 | }, 80 | "devDependencies": { 81 | "@types/node": "^16.11.7", 82 | "@types/vscode": "^1.74.0", 83 | "@typescript-eslint/eslint-plugin": "^5.42.0", 84 | "@typescript-eslint/parser": "^5.42.0", 85 | "eslint": "^8.26.0", 86 | "typescript": "^4.8.4" 87 | }, 88 | "volta": { 89 | "node": "22.15.0" 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /components/vscode/src/extension.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from 'vscode'; 2 | import { 3 | LanguageClient, 4 | LanguageClientOptions, 5 | ServerOptions, 6 | TransportKind 7 | } from 'vscode-languageclient/node'; 8 | import { getServerPath, getServerOptions } from './utils/serverUtils'; 9 | 10 | let client: LanguageClient | undefined; 11 | 12 | export function activate(context: vscode.ExtensionContext) { 13 | // Get server path using our utility 14 | const serverPath = getServerPath(context); 15 | const { command, args, options } = getServerOptions(context, serverPath); 16 | 17 | // Create output channel for logging 18 | const outputChannel = vscode.window.createOutputChannel('Dada Language Server'); 19 | outputChannel.appendLine(`Using server: ${command} ${args.join(' ')}`); 20 | 21 | // Options to control the language client 22 | const clientOptions: LanguageClientOptions = { 23 | documentSelector: [{ scheme: 'file', language: 'dada' }], 24 | synchronize: { 25 | fileEvents: vscode.workspace.createFileSystemWatcher('**/*.dada') 26 | }, 27 | outputChannel 28 | }; 29 | 30 | // Create the server options 31 | const serverOptions: ServerOptions = { 32 | run: { command, args, options, transport: TransportKind.stdio }, 33 | debug: { command, args, options, transport: TransportKind.stdio } 34 | }; 35 | 36 | // Create and start the client 37 | client = new LanguageClient( 38 | 'dada', 39 | 'Dada Language Server', 40 | serverOptions, 41 | clientOptions 42 | ); 43 | 44 | // Start the client 45 | client.start().catch(error => { 46 | vscode.window.showErrorMessage( 47 | `Failed to start Dada language server: ${error.message}. ` + 48 | 'Please check the Dada Language Server output channel for details.' 49 | ); 50 | }); 51 | 52 | // Register commands 53 | context.subscriptions.push( 54 | vscode.commands.registerCommand('dada.restartServer', async () => { 55 | if (client) { 56 | await client.stop(); 57 | client.start(); 58 | } 59 | }) 60 | ); 61 | } 62 | 63 | export function deactivate(): Thenable | undefined { 64 | if (!client) { 65 | return undefined; 66 | } 67 | return client.stop(); 68 | } 69 | -------------------------------------------------------------------------------- /components/vscode/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "ES2020", 5 | "outDir": "out", 6 | "lib": ["ES2020", "DOM"], 7 | "sourceMap": true, 8 | "rootDir": "src", 9 | "strict": true, 10 | "esModuleInterop": true, 11 | "skipLibCheck": true, 12 | "forceConsistentCasingInFileNames": true 13 | }, 14 | "include": ["src"], 15 | "exclude": ["node_modules", ".vscode-test"] 16 | } 17 | -------------------------------------------------------------------------------- /components/xtask/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "xtask" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | structopt = "0.3" 10 | tracing = "0.1.36" 11 | tracing-tree = "0.2.1" 12 | tracing-subscriber = { version = "0.3.15", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] } 13 | xshell = "0.1" 14 | anyhow = { workspace = true } 15 | -------------------------------------------------------------------------------- /components/xtask/src/build.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | use structopt::StructOpt; 3 | 4 | #[derive(StructOpt)] 5 | pub struct Build {} 6 | 7 | const DADA_LSP_SERVER_CRATE: &str = "dada-lsp-server"; 8 | const WASM_TRIPLE: &str = "wasm32-wasip1-threads"; 9 | const PROFILE: &str = "debug"; 10 | 11 | impl Build { 12 | pub fn main(&self) -> anyhow::Result<()> { 13 | let xtask_dir = cargo_path("CARGO_MANIFEST_DIR")?; 14 | let manifest_dir = xtask_dir.parent().unwrap().parent().unwrap(); 15 | tracing::debug!("manifest directory: {manifest_dir:?}"); 16 | 17 | // This *should* be part of the Dockerfile, but it doesn't seem to be? 18 | xshell::Cmd::new("rustup") 19 | .arg("target") 20 | .arg("add") 21 | .arg(WASM_TRIPLE) 22 | .run()?; 23 | 24 | // Start with a default build 25 | xshell::Cmd::new("cargo") 26 | .arg("build") 27 | .arg("-p") 28 | .arg(DADA_LSP_SERVER_CRATE) 29 | .run()?; 30 | 31 | // Then do a wasm build 32 | xshell::Cmd::new("cargo") 33 | .arg("build") 34 | .arg("-p") 35 | .arg(DADA_LSP_SERVER_CRATE) 36 | .arg("--target") 37 | .arg(WASM_TRIPLE) 38 | .run()?; 39 | 40 | // Copy the output into the wasm directory 41 | let wasm_dir = manifest_dir.join("components/vscode/wasm"); 42 | xshell::mkdir_p(&wasm_dir)?; 43 | 44 | let target_dir = manifest_dir.join("target").join(WASM_TRIPLE).join(PROFILE); 45 | let wasm_file = target_dir 46 | .join(DADA_LSP_SERVER_CRATE) 47 | .with_extension("wasm"); 48 | 49 | xshell::cp(&wasm_file, &wasm_dir)?; 50 | 51 | Ok(()) 52 | } 53 | } 54 | 55 | fn cargo_path(env_var: &str) -> anyhow::Result { 56 | match std::env::var(env_var) { 57 | Ok(s) => { 58 | tracing::debug!("cargo_path({env_var}) = {s}"); 59 | Ok(PathBuf::from(s)) 60 | } 61 | Err(_) => anyhow::bail!("`{}` not set", env_var), 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /components/xtask/src/deploy.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | use structopt::StructOpt; 3 | 4 | #[derive(StructOpt)] 5 | pub struct Deploy { 6 | #[structopt(long)] 7 | check: bool, 8 | } 9 | 10 | impl Deploy { 11 | pub fn main(&self) -> anyhow::Result<()> { 12 | let xtask_dir = cargo_path("CARGO_MANIFEST_DIR")?; 13 | let manifest_dir = xtask_dir.parent().unwrap().parent().unwrap(); 14 | tracing::debug!("manifest directory: {manifest_dir:?}"); 15 | let book_dir = manifest_dir.join("book"); 16 | 17 | // Build the Docusaurus site 18 | { 19 | let _directory = xshell::pushd(&book_dir)?; 20 | let npm = if cfg!(target_os = "windows") { 21 | "npm.cmd" 22 | } else { 23 | "npm" 24 | }; 25 | xshell::Cmd::new(npm).arg("install").run()?; 26 | if self.check { 27 | xshell::Cmd::new(npm).arg("run").arg("typecheck").run()?; 28 | xshell::Cmd::new(npm).arg("run").arg("format:check").run()?; 29 | } 30 | xshell::Cmd::new(npm).arg("run").arg("build").run()?; 31 | } 32 | 33 | // Generate rustdocs and copy to book/build/impl 34 | { 35 | let _directory = xshell::pushd(manifest_dir)?; 36 | 37 | // Generate rustdocs 38 | xshell::Cmd::new("cargo") 39 | .arg("doc") 40 | .arg("--workspace") 41 | .arg("--no-deps") 42 | .arg("--document-private-items") 43 | .run()?; 44 | 45 | // Copy rustdocs to book/build/impl 46 | let target_doc_dir = manifest_dir.join("target").join("doc"); 47 | let book_impl_dir = book_dir.join("build").join("impl"); 48 | 49 | // Remove existing impl directory if it exists 50 | if book_impl_dir.exists() { 51 | std::fs::remove_dir_all(&book_impl_dir)?; 52 | } 53 | 54 | // Copy the entire doc directory 55 | copy_dir_recursive(&target_doc_dir, &book_impl_dir)?; 56 | } 57 | 58 | Ok(()) 59 | } 60 | } 61 | 62 | fn cargo_path(env_var: &str) -> anyhow::Result { 63 | match std::env::var(env_var) { 64 | Ok(s) => { 65 | tracing::debug!("cargo_path({env_var}) = {s}"); 66 | Ok(PathBuf::from(s)) 67 | } 68 | Err(_) => anyhow::bail!("`{}` not set", env_var), 69 | } 70 | } 71 | 72 | fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> anyhow::Result<()> { 73 | std::fs::create_dir_all(dst)?; 74 | 75 | for entry in std::fs::read_dir(src)? { 76 | let entry = entry?; 77 | let src_path = entry.path(); 78 | let dst_path = dst.join(entry.file_name()); 79 | 80 | if src_path.is_dir() { 81 | copy_dir_recursive(&src_path, &dst_path)?; 82 | } else { 83 | std::fs::copy(&src_path, &dst_path)?; 84 | } 85 | } 86 | 87 | Ok(()) 88 | } 89 | -------------------------------------------------------------------------------- /components/xtask/src/main.rs: -------------------------------------------------------------------------------- 1 | use structopt::StructOpt; 2 | use tracing_subscriber::{EnvFilter, prelude::*}; 3 | 4 | mod build; 5 | mod deploy; 6 | 7 | fn main() -> anyhow::Result<()> { 8 | Options::from_args().main() 9 | } 10 | 11 | #[derive(StructOpt)] 12 | pub struct Options { 13 | #[structopt(long, default_value = "info")] 14 | log: String, 15 | 16 | #[structopt(subcommand)] // Note that we mark a field as a subcommand 17 | command: Command, 18 | } 19 | 20 | #[derive(StructOpt)] 21 | pub enum Command { 22 | Build { 23 | #[structopt(flatten)] 24 | options: build::Build, 25 | }, 26 | Deploy { 27 | #[structopt(flatten)] 28 | options: deploy::Deploy, 29 | }, 30 | } 31 | 32 | impl Options { 33 | fn main(&self) -> anyhow::Result<()> { 34 | let subscriber = tracing_subscriber::Registry::default() 35 | .with({ 36 | // Configure which modules/level/etc using `DADA_LOG` 37 | // environment variable if present, 38 | // else the `--log` parameter. 39 | match std::env::var("DADA_LOG") { 40 | Ok(env) => EnvFilter::new(env), 41 | Err(_) => EnvFilter::new(&self.log), 42 | } 43 | }) 44 | .with({ 45 | // Configure the hierarchical display. 46 | tracing_tree::HierarchicalLayer::default() 47 | .with_writer(std::io::stderr) 48 | .with_indent_lines(false) 49 | .with_ansi(true) 50 | .with_targets(true) 51 | .with_indent_amount(2) 52 | }); 53 | tracing::subscriber::set_global_default(subscriber).unwrap(); 54 | 55 | match &self.command { 56 | Command::Build { options } => options.main(), 57 | Command::Deploy { options } => options.main(), 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /dada.code-workspace: -------------------------------------------------------------------------------- 1 | { 2 | "folders": [ 3 | { 4 | "path": "." 5 | }, 6 | { 7 | "path": "components/vscode" 8 | } 9 | ], 10 | "settings": {} 11 | } -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | test: 2 | cargo test --all --workspace --all-targets 3 | 4 | # Generate documentation with private items included (recommended) 5 | doc: 6 | cargo doc --workspace --no-deps --document-private-items 7 | 8 | # Generate and open documentation 9 | doc-open: 10 | cargo doc --workspace --no-deps --document-private-items --open 11 | 12 | # Generate documentation and serve it locally on http://localhost:8000 13 | doc-serve: 14 | cargo doc --workspace --no-deps --document-private-items 15 | @echo "Documentation generated. Starting server at http://localhost:8000" 16 | @echo "Visit http://localhost:8000/dada/ to view the main documentation" 17 | cd target/doc && python3 -m http.server 8000 -------------------------------------------------------------------------------- /libdada/prelude.dada: -------------------------------------------------------------------------------- 1 | export class String { 2 | data: Pointer[u8] 3 | length: u32 4 | capacity: u32 5 | 6 | ## Create a string from a statically allocated byte array. 7 | ## Used to create string literals. 8 | ## 9 | ## # Unsafe 10 | ## 11 | ## The data must be valid indefinitely. 12 | ## The resulting string will not free the data when it is dropped. 13 | export unsafe fn literal(data: Pointer[u8], length: u32) -> String { 14 | String { data: data, length: length, capacity: 0 } 15 | } 16 | 17 | ## Get the length of the string. 18 | export fn len(self) -> u32 { 19 | self.length 20 | } 21 | } 22 | 23 | export struct Pointer[type T] 24 | 25 | export async fn print(s: String) {} 26 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dada", 3 | "lockfileVersion": 2, 4 | "requires": true, 5 | "packages": {} 6 | } 7 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | nightly -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # Dada Programming Language 2 | //! 3 | //! **👉 For complete documentation and APIs, see the [`dada_lang`](../dada_lang) crate.** 4 | //! 5 | //! This crate provides the main binary entry point for the Dada programming language compiler. 6 | //! The actual compiler implementation, APIs, and comprehensive documentation are located in 7 | //! the [`dada_lang`](../dada_lang) crate. 8 | //! 9 | //! ## Quick Start 10 | //! 11 | //! If you're looking to: 12 | //! 13 | //! - **Use the Dada compiler** - You're in the right place! Install with `cargo install dada` 14 | //! - **Understand the compiler architecture** - See [`dada_lang`](../dada_lang) for the complete overview 15 | //! - **Explore the type system** - Start with [`dada_ir_sym`](../dada_ir_sym) documentation 16 | //! - **Contribute to development** - Check out the [`dada_lang`](../dada_lang) module documentation 17 | //! 18 | //! ## Example Usage 19 | //! 20 | //! ```bash 21 | //! # Compile a Dada source file 22 | //! dada compile my_program.dada 23 | //! 24 | //! # Run a Dada program 25 | //! dada run my_program.dada 26 | //! 27 | //! # Run tests 28 | //! dada test tests/ 29 | //! ``` 30 | //! 31 | //! ## Architecture 32 | //! 33 | //! The Dada compiler is organized as a workspace with several components: 34 | //! 35 | //! - [`dada_lang`](../dada_lang) - Main compiler APIs and CLI (start here!) 36 | //! - [`dada_parser`](../dada_parser) - Lexing and parsing 37 | //! - [`dada_ir_sym`](../dada_ir_sym) - Symbolic IR and type checking 38 | //! - [`dada_check`](../dada_check) - Type checking orchestration 39 | //! - [`dada_codegen`](../dada_codegen) - WebAssembly code generation 40 | //! - [`dada_compiler`](../dada_compiler) - Compilation orchestration 41 | //! 42 | //! For the complete documentation, visit [`dada_lang`](../dada_lang). 43 | 44 | // Re-export the main API from dada-lang for convenience 45 | pub use dada_lang::*; 46 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use dada_util::Fallible; 2 | use structopt::StructOpt; 3 | 4 | #[tokio::main(flavor = "current_thread")] 5 | async fn main() -> Fallible<()> { 6 | dada_lang::Options::from_args().main() 7 | } 8 | -------------------------------------------------------------------------------- /tests/.gitignore: -------------------------------------------------------------------------------- 1 | *.txt 2 | *.test-report.* -------------------------------------------------------------------------------- /tests/class_inputs.dada: -------------------------------------------------------------------------------- 1 | class Test(input: u32) { 2 | field: u32 3 | #! cannot have both explicit fields and an automatic constructor 4 | } -------------------------------------------------------------------------------- /tests/default_perms/class_field_class_ty.dada: -------------------------------------------------------------------------------- 1 | # Test that a class field that is a class must have an explicit permission. 2 | 3 | class Foo(x: String) {} 4 | #! ^^^^^^ explicit permission required 5 | 6 | class Bar { 7 | x: String 8 | #! ^^^^^^ explicit permission required 9 | } 10 | -------------------------------------------------------------------------------- /tests/default_perms/class_field_primitive_ty.dada: -------------------------------------------------------------------------------- 1 | class Foo(x: u32) {} 2 | 3 | class Bar { 4 | x: u32 5 | } 6 | -------------------------------------------------------------------------------- /tests/default_perms/class_field_struct_ty.dada: -------------------------------------------------------------------------------- 1 | struct Point(x: u32, y: u32) 2 | 3 | class Foo(x: Point) {} 4 | 5 | class Bar { 6 | x: Point 7 | } 8 | -------------------------------------------------------------------------------- /tests/default_perms/class_method_class_ty.dada: -------------------------------------------------------------------------------- 1 | # Test that a class field that is a class must have an explicit permission. 2 | 3 | class Foo { 4 | fn m1(self, s: String) { 5 | self.m2(s.give) 6 | #! ^^^^^^ subtype expected 7 | } 8 | 9 | fn m2(self, s: my String) { 10 | 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/default_perms/class_method_self_ty.dada: -------------------------------------------------------------------------------- 1 | # Test that a class field that is a class must have an explicit permission. 2 | 3 | class Foo { 4 | fn m1(self) { 5 | self.give.m2() 6 | #! ^^^^^^^^^ subtype expected 7 | } 8 | 9 | fn m2(my self) { 10 | 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/default_perms/class_method_struct_ty.dada: -------------------------------------------------------------------------------- 1 | struct Point(x: u32, y: u32) 2 | 3 | class Foo { 4 | fn m1(self, s: Point) { 5 | self.m2(s.give) 6 | } 7 | 8 | fn m2(self, s: my Point) { 9 | # ^^ permission not relevant to struct type 10 | 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/default_perms/struct_method_self_ty.dada: -------------------------------------------------------------------------------- 1 | # Test that a class field that is a class must have an explicit permission. 2 | 3 | struct Foo { 4 | fn m1(self) { 5 | self.give.m2() 6 | } 7 | 8 | fn m2(my self) { 9 | 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/harness.rs: -------------------------------------------------------------------------------- 1 | fn main() -> dada_util::Fallible<()> { 2 | let status = std::process::Command::new(env!("CARGO_BIN_EXE_dada")) 3 | .arg("test") 4 | .arg("--") 5 | .arg("tests") 6 | .status()?; 7 | if status.success() { 8 | Ok(()) 9 | } else { 10 | match status.code() { 11 | Some(code) => dada_util::bail!("dada test exited with status code: {}", code), 12 | None => dada_util::bail!("dada test terminated by signal"), 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /tests/hello_world.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | async fn main() { 4 | print("Hello, world").await 5 | } -------------------------------------------------------------------------------- /tests/parser/class_body_ill_formed.dada: -------------------------------------------------------------------------------- 1 | class Foo { 2 | a b c d e f g 3 | #! ^ expected `:` to come next 4 | #! ^ extra input 5 | } -------------------------------------------------------------------------------- /tests/parser/class_body_two_fields.dada: -------------------------------------------------------------------------------- 1 | class Foo { 2 | x: i32 3 | 22 4 | #! ^^ extra input 5 | } -------------------------------------------------------------------------------- /tests/parser/fn_body.dada: -------------------------------------------------------------------------------- 1 | fn just_return() { 2 | return 3 | } 4 | 5 | fn return_identifier(x: u32) -> u32 { 6 | x 7 | } -------------------------------------------------------------------------------- /tests/parser/fn_named_class.dada: -------------------------------------------------------------------------------- 1 | fn class() { 2 | #! ^^ expected an identifier to come next 3 | #! ^^^^^ expected an identifier to come next 4 | #! ^^ expected a module-level item 5 | #! expected a module-level item 6 | } -------------------------------------------------------------------------------- /tests/parser/operator_precedence.dada: -------------------------------------------------------------------------------- 1 | #:fn_asts 2 | 3 | fn return_identifier(x: u32) -> u32 { 4 | 44 + 22 * 33 5 | } -------------------------------------------------------------------------------- /tests/parser/pair.dada: -------------------------------------------------------------------------------- 1 | class Pair[type A, type B] { 2 | a: A 3 | b: B 4 | } -------------------------------------------------------------------------------- /tests/parser/perm_ref.dada: -------------------------------------------------------------------------------- 1 | class Data {} 2 | 3 | fn take(d: ref Data) {} -------------------------------------------------------------------------------- /tests/spikes/bank_account.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | struct Amount( 4 | sign: bool, 5 | cents: u32, 6 | ) { 7 | fn is_positive(self) -> bool { 8 | self.sign 9 | } 10 | 11 | fn is_negative(self) -> bool { 12 | !self.sign 13 | } 14 | 15 | fn negate(self) -> Amount { 16 | Amount(!self.sign, self.cents) 17 | } 18 | 19 | fn greater_than(self, amount: Amount) -> bool { 20 | if self.is_positive() && amount.is_negative() { 21 | true 22 | } else if amount.is_positive() && self.is_negative() { 23 | false 24 | } else if self.is_positive() { 25 | self.cents > amount.cents 26 | } else { 27 | self.cents < amount.cents 28 | } 29 | } 30 | 31 | fn plus(self, amount: Amount) -> Amount { 32 | if self.is_positive() { 33 | if amount.is_positive() { 34 | Amount(true, self.cents + amount.cents) 35 | } else if self.cents >= amount.cents { 36 | Amount(true, self.cents - amount.cents) 37 | } else { 38 | Amount(false, amount.cents - self.cents) 39 | } 40 | } else { 41 | amount.plus(self.negate()) 42 | } 43 | } 44 | 45 | fn minus(self, amount: Amount) -> Amount { 46 | self.plus(amount.negate()) 47 | } 48 | } 49 | 50 | class Account(name: my String, mut amount: Amount) { 51 | fn is_overdrawn(self) -> bool { 52 | self.amount.is_negative() 53 | } 54 | 55 | fn deposit(mut self, amount: Amount) { 56 | self.amount = self.amount.plus(amount) 57 | } 58 | 59 | fn withdraw(mut self, amount: Amount) { 60 | self.amount = self.amount.minus(amount) 61 | } 62 | 63 | fn transfer_to(mut self, account: mut Account, amount: Amount) { 64 | self.mut.withdraw(amount) 65 | account.mut.deposit(amount) 66 | } 67 | } 68 | 69 | async fn main() { 70 | let mut jackson = Account("Jackson", Amount(true, 22)) 71 | let mut jill = Account("Jill", Amount(true, 44)) 72 | let mut jack = Account("Jack", Amount(true, 222)) 73 | 74 | print("Accounts: Jackson {jackson} Jill {jill} Jack {jack}").await 75 | 76 | # First Jackson transfers $22 to Jill -- oh, shoot, he's broke! 77 | jackson.mut.transfer_to(jill.mut, Amount(true, 22)) 78 | print("Accounts: Jackson {jackson} Jill {jill} Jack {jack}").await 79 | 80 | # Then his parent Jack transfers $22 to Jack. Much better! 81 | jack.mut.transfer_to(jackson.mut, Amount(true, 22)) 82 | print("Accounts: Jackson {jackson} Jill {jill} Jack {jack}").await 83 | } -------------------------------------------------------------------------------- /tests/spikes/class_arguments.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | class Point(x: u32, y: u32) 4 | 5 | fn main() { 6 | let p = Point(22, 44) 7 | let q = p.x + p.y 8 | } 9 | -------------------------------------------------------------------------------- /tests/spikes/leased_method.dada: -------------------------------------------------------------------------------- 1 | class Account() { 2 | fn transfer_to(mut self) { 3 | } 4 | } 5 | 6 | async fn main() { 7 | let mut jackson = Account() 8 | jackson.mut.transfer_to() 9 | } -------------------------------------------------------------------------------- /tests/symbols/bad_local_variable_in_perm_ref.dada: -------------------------------------------------------------------------------- 1 | class Data {} 2 | 3 | fn take_ok(d: ref[x] Data, x: Data) {} 4 | 5 | fn take_bad1(d: ref[x] Data) {} 6 | #! ^ could not find anything named `x` 7 | 8 | fn take_bad2(d: ref[x, y] Data) {} 9 | #! ^ could not find anything named `x` 10 | #! ^ could not find anything named `y` 11 | -------------------------------------------------------------------------------- /tests/symbols/bad_name.dada: -------------------------------------------------------------------------------- 1 | class Foo { 2 | x: Baz 3 | #! ^^^ could not find anything named `Baz` 4 | } 5 | 6 | -------------------------------------------------------------------------------- /tests/type_check/infer_add_u32s.dada: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let x = 22 + 44 3 | #? ^^ ExprType: u32 4 | #? ^^ ExprType: u32 5 | #? ^^^^^^^ ExprType: u32 6 | #? ^ VariableType: u32 7 | set(x) 8 | } 9 | 10 | fn set(x: u32) { 11 | 12 | } -------------------------------------------------------------------------------- /tests/type_check/infer_conflicting_bounds.dada: -------------------------------------------------------------------------------- 1 | #:FIXME 2 | class Contents { 3 | s: my String 4 | } 5 | 6 | fn test(c: my Contents) { 7 | let s = any_string() 8 | #! subtype expected 9 | s = lower_bound() 10 | upper_bound(s.give) 11 | } 12 | 13 | fn any_string[perm P]() -> P String { 14 | #! invalid return value 15 | } 16 | 17 | fn upper_bound(s: my String) { 18 | } 19 | 20 | fn lower_bound() -> our String { 21 | "Hello, world".share 22 | } -------------------------------------------------------------------------------- /tests/type_check/infer_ref_string.dada: -------------------------------------------------------------------------------- 1 | class Contents { 2 | s: my String 3 | } 4 | 5 | fn test(c: my Contents) { 6 | let x = c.s 7 | #? ^ VariableType: ref[c.s] String 8 | } -------------------------------------------------------------------------------- /tests/type_check/infer_var_u32.dada: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let x = get() 3 | #? ^ VariableType: u32 4 | } 5 | 6 | fn get() -> u32 { 7 | 22 8 | } -------------------------------------------------------------------------------- /tests/type_check/predicate_mut_string.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | fn test_shared() { 4 | let x: String = "hello, world" 5 | is_shared(x.mut) #! /where clause.*not satisfied 6 | } 7 | 8 | fn is_shared(t: type T) 9 | where 10 | T is shared, 11 | {} 12 | 13 | fn test_unique() { 14 | let x: String = "hello, world" 15 | is_unique(x.mut) 16 | } 17 | 18 | fn is_unique(t: type T) 19 | where 20 | T is unique, 21 | { 22 | 23 | } 24 | 25 | fn test_lent() { 26 | let x: String = "hello, world" 27 | is_lent(x.mut) 28 | } 29 | 30 | fn is_lent(t: type T) 31 | where 32 | T is lent, 33 | {} 34 | 35 | fn test_owned() { 36 | let x: String = "hello, world" 37 | is_owned(x.mut) #! /where clause.*not satisfied 38 | } 39 | 40 | fn is_owned(t: type T) 41 | where 42 | T is owned, 43 | {} -------------------------------------------------------------------------------- /tests/type_check/predicate_my_string.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | #:FIXME 3 | 4 | fn test_shared() { 5 | let x: String = "hello, world" 6 | is_shared(x.give) #! /where clause.*not satisfied 7 | } 8 | 9 | fn is_shared(t: type T) 10 | where 11 | T is shared, 12 | {} 13 | 14 | fn test_unique() { 15 | let x: String = "hello, world" 16 | is_unique(x.give) 17 | } 18 | 19 | fn is_unique(t: type T) 20 | where 21 | T is unique, 22 | { 23 | 24 | } 25 | 26 | fn test_lent() { 27 | let x: String = "hello, world" 28 | is_lent(x.give) #! /where clause.*not satisfied 29 | } 30 | 31 | fn is_lent(t: type T) 32 | where 33 | T is lent, 34 | {} 35 | 36 | fn test_owned() { 37 | let x: String = "hello, world" 38 | is_owned(x.give) 39 | } 40 | 41 | fn is_owned(t: type T) 42 | where 43 | T is owned, 44 | {} -------------------------------------------------------------------------------- /tests/type_check/predicate_our_string.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | fn test_shared() { 4 | let x: String = "hello, world" 5 | is_shared(x.share) 6 | } 7 | 8 | fn is_shared(t: type T) 9 | where 10 | T is shared, 11 | {} 12 | 13 | fn test_unique() { 14 | let x: String = "hello, world" 15 | is_unique(x.share) #! /where clause.*not satisfied 16 | } 17 | 18 | fn is_unique(t: type T) 19 | where 20 | T is unique, 21 | { 22 | 23 | } 24 | 25 | fn test_lent() { 26 | let x: String = "hello, world" 27 | 28 | # Interesting example: this is not an error 29 | # because `our` can be upcast to `ref[]`. 30 | is_lent(x.share) 31 | } 32 | 33 | fn is_lent(t: type T) 34 | where 35 | T is lent, 36 | {} 37 | 38 | fn test_lent_identity() { 39 | let x: String = "hello, world" 40 | 41 | # This forces the result to be `our String` 42 | # and so we get an error 43 | let y: our String = is_lent_identity(x.share) #! /where clause.*not satisfied 44 | } 45 | 46 | fn is_lent_identity(t: type T) -> T 47 | where 48 | T is lent, 49 | { 50 | t.give 51 | } 52 | 53 | fn test_owned() { 54 | let x: String = "hello, world" 55 | is_owned(x.share) 56 | } 57 | 58 | fn is_owned(t: type T) 59 | where 60 | T is owned, 61 | {} -------------------------------------------------------------------------------- /tests/type_check/predicate_ref_string.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | fn test_shared() { 4 | let x: String = "hello, world" 5 | is_shared(x) 6 | } 7 | 8 | fn is_shared(t: type T) 9 | where 10 | T is shared, 11 | {} 12 | 13 | fn test_unique() { 14 | let x: String = "hello, world" 15 | is_unique(x) #! /where clause.*not satisfied 16 | } 17 | 18 | fn is_unique(t: type T) 19 | where 20 | T is unique, 21 | { 22 | 23 | } 24 | 25 | fn test_lent() { 26 | let x: String = "hello, world" 27 | is_lent(x) 28 | } 29 | 30 | fn is_lent(t: type T) 31 | where 32 | T is lent, 33 | {} 34 | 35 | fn test_owned() { 36 | let x: String = "hello, world" 37 | is_owned(x) #! /where clause.*not satisfied 38 | } 39 | 40 | fn is_owned(t: type T) 41 | where 42 | T is owned, 43 | {} -------------------------------------------------------------------------------- /tests/type_check/predicate_u32.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | fn gimme() -> u32 { 22 } 4 | 5 | fn test_shared() { 6 | is_shared(gimme()) 7 | } 8 | 9 | fn is_shared(t: type T) 10 | where 11 | T is shared, 12 | {} 13 | 14 | fn test_unique() { 15 | is_unique(gimme()) #! /where clause.*not satisfied 16 | } 17 | 18 | fn is_unique(t: type T) 19 | where 20 | T is unique, 21 | { 22 | 23 | } 24 | 25 | fn test_lent() { 26 | is_lent(gimme()) #! /where clause.*not satisfied 27 | } 28 | 29 | fn is_lent(t: type T) 30 | where 31 | T is lent, 32 | {} 33 | 34 | fn test_owned() { 35 | is_owned(gimme()) 36 | } 37 | 38 | fn is_owned(t: type T) 39 | where 40 | T is owned, 41 | {} -------------------------------------------------------------------------------- /tests/type_check/predicate_via_spec.dada: -------------------------------------------------------------------------------- 1 | #:skip_codegen # FIXME: codegen doesn't work yet 2 | 3 | ################################################## 4 | ## `our String` 5 | 6 | fn test_our_shared() { is_shared[our String]() } 7 | fn test_our_unique() { is_unique[our String]() } #! /where clause.*not satisfied 8 | fn test_our_lent() { is_lent[our String]() } #! /where clause.*not satisfied 9 | fn test_our_owned() { is_owned[our String]() } 10 | 11 | ################################################## 12 | ## `my String` 13 | 14 | fn test_my_shared() { is_shared[my String]() } #! /where clause.*not satisfied 15 | fn test_my_unique() { is_unique[my String]() } 16 | fn test_my_lent() { is_lent[my String]() } #! /where clause.*not satisfied 17 | fn test_my_owned() { is_owned[my String]() } 18 | 19 | ################################################## 20 | ## `ref[x] String` where `x: my String` 21 | 22 | fn test_refmy_shared(x: my String) { is_shared[ref[x] String]() } 23 | fn test_refmy_unique(x: my String) { is_unique[ref[x] String]() } #! /where clause.*not satisfied 24 | fn test_refmy_lent(x: my String) { is_lent[ref[x] String]() } 25 | fn test_refmy_owned(x: my String) { is_owned[ref[x] String]() } #! /where clause.*not satisfied 26 | 27 | ################################################## 28 | ## TEST FUNCTIONS 29 | 30 | fn is_shared[type T]() 31 | where 32 | T is shared, 33 | {} 34 | 35 | fn is_unique[type T]() 36 | where 37 | T is unique, 38 | {} 39 | 40 | fn is_lent[type T]() 41 | where 42 | T is lent, 43 | {} 44 | 45 | fn is_owned[type T]() 46 | where 47 | T is owned, 48 | {} --------------------------------------------------------------------------------