├── .gitignore ├── rust-toolchain.toml ├── compiler ├── verde │ ├── src │ │ ├── internal │ │ │ ├── mod.rs │ │ │ ├── storage │ │ │ │ ├── mod.rs │ │ │ │ ├── interned.rs │ │ │ │ ├── query.rs │ │ │ │ ├── tracked.rs │ │ │ │ ├── pushable.rs │ │ │ │ └── routing.rs │ │ │ └── traits.rs │ │ ├── lib.rs │ │ └── test.rs │ ├── derive │ │ ├── Cargo.toml │ │ └── src │ │ │ ├── pushable.rs │ │ │ ├── tracked.rs │ │ │ ├── lib.rs │ │ │ ├── query.rs │ │ │ └── database.rs │ ├── Cargo.toml │ ├── README.md │ └── tests │ │ └── test.rs ├── arena │ ├── Cargo.toml │ └── src │ │ ├── sparse.rs │ │ ├── dense.rs │ │ └── lib.rs ├── lex │ ├── Cargo.toml │ └── src │ │ ├── lib.rs │ │ ├── tests.rs │ │ └── token.rs ├── text │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── thir │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── diagnostics │ ├── Cargo.toml │ └── src │ │ ├── lib.rs │ │ ├── span.rs │ │ └── diag.rs ├── pretty │ └── Cargo.toml ├── tycheck │ ├── Cargo.toml │ └── src │ │ ├── reader.rs │ │ └── decl.rs ├── syntax │ ├── Cargo.toml │ ├── src │ │ ├── lib.rs │ │ ├── builder.rs │ │ └── generated │ │ │ ├── mod.rs │ │ │ └── kind.rs │ └── yam.ungram ├── hir-lower │ ├── Cargo.toml │ └── src │ │ ├── prelude.rs │ │ └── lib.rs ├── parse │ ├── Cargo.toml │ └── src │ │ ├── lib.rs │ │ ├── tests │ │ ├── mod.rs │ │ └── recovery.rs │ │ ├── api.rs │ │ └── parse │ │ ├── recovery.rs │ │ └── mod.rs ├── hir │ ├── Cargo.toml │ └── src │ │ ├── ident.rs │ │ ├── ast.rs │ │ ├── lang_item.rs │ │ └── lib.rs ├── yamc │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── codegen │ └── Cargo.toml ├── yamw │ ├── Cargo.toml │ └── src │ │ └── main.rs └── driver │ ├── Cargo.toml │ └── src │ └── lib.rs ├── core ├── core.yam └── lang_items.yam ├── README.md ├── docs ├── progress.md └── overview.md ├── Cargo.toml ├── rustfmt.toml └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /out* 3 | 4 | .idea 5 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "nightly" 3 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/mod.rs: -------------------------------------------------------------------------------- 1 | mod db; 2 | pub mod storage; 3 | mod traits; 4 | 5 | pub use db::*; 6 | pub use traits::*; 7 | -------------------------------------------------------------------------------- /compiler/arena/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "arena" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | rustc-hash = { workspace = true } 8 | -------------------------------------------------------------------------------- /core/core.yam: -------------------------------------------------------------------------------- 1 | @prelude 2 | pub import .root.lang_items.{ 3 | bool, 4 | char, 5 | u8, u16, u32, u64, u128, 6 | i8, i16, i32, i64, i128, 7 | f32, f64 8 | }; 9 | -------------------------------------------------------------------------------- /compiler/lex/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lex" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | diagnostics = { path = "../diagnostics" } 8 | 9 | logos = { workspace = true } 10 | -------------------------------------------------------------------------------- /compiler/text/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "text" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | lasso = { workspace = true, features = ["multi-threaded", "ahasher"] } 8 | once_cell = { workspace = true } 9 | -------------------------------------------------------------------------------- /compiler/thir/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "thir" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | arena = { path = "../arena" } 8 | hir = { path = "../hir" } 9 | verde = { path = "../verde" } 10 | 11 | rustc-hash = { workspace = true } 12 | -------------------------------------------------------------------------------- /compiler/diagnostics/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "diagnostics" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | text = { path = "../text" } 8 | verde = { path = "../verde" } 9 | 10 | ariadne = { workspace = true } 11 | rustc-hash = { workspace = true } 12 | -------------------------------------------------------------------------------- /compiler/pretty/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pretty" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | arena = { path = "../arena" } 8 | hir = { path = "../hir" } 9 | thir = { path = "../thir" } 10 | verde = { path = "../verde" } 11 | 12 | pretty = { workspace = true } 13 | -------------------------------------------------------------------------------- /compiler/tycheck/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "tycheck" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | arena = { path = "../arena" } 8 | diagnostics = { path = "../diagnostics" } 9 | hir = { path = "../hir" } 10 | thir = { path = "../thir" } 11 | verde = { path = "../verde" } 12 | 13 | rustc-hash = { workspace = true } 14 | tracing = { workspace = true } 15 | -------------------------------------------------------------------------------- /compiler/syntax/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "syntax" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [build-dependencies] 7 | ungrammar = "1.16" 8 | quote = "1.0" 9 | 10 | [dependencies] 11 | diagnostics = { path = "../diagnostics" } 12 | lex = { path = "../lex" } 13 | text = { path = "../text" } 14 | 15 | cstree = { workspace = true, features = ["derive", "multi_threaded_interning"] } 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Yam 2 | 3 | An experimental language with a few goals: 4 | 1. First-class IDE support - because IDEs are just as important as plain-text syntax and batch compiling. 5 | 2. A powerful type system - with support for compile-time metaprogramming. 6 | 3. A focus on low-level, but safe programming, for games and graphics. 7 | 8 | Since yam is heavily in development, none of these goals have been realized yet. 9 | -------------------------------------------------------------------------------- /compiler/hir-lower/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hir-lower" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | arena = { path = "../arena" } 8 | diagnostics = { path = "../diagnostics" } 9 | hir = { path = "../hir" } 10 | syntax = { path = "../syntax" } 11 | text = { path = "../text" } 12 | verde = { path = "../verde" } 13 | 14 | rustc-hash = { workspace = true } 15 | tracing = { workspace = true } 16 | -------------------------------------------------------------------------------- /compiler/parse/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "parse" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | diagnostics = { path = "../diagnostics" } 8 | lex = { path = "../lex" } 9 | syntax = { path = "../syntax" } 10 | text = { path = "../text" } 11 | 12 | rustc-hash = { workspace = true } 13 | tracing = { workspace = true } 14 | 15 | [dev-dependencies] 16 | expect-test = { workspace = true } 17 | pretty_assertions = { workspace = true } 18 | -------------------------------------------------------------------------------- /compiler/hir/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hir" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | arena = { path = "../arena" } 8 | diagnostics = { path = "../diagnostics" } 9 | syntax = { path = "../syntax" } 10 | text = { path = "../text" } 11 | verde = { path = "../verde" } 12 | 13 | rustc-hash = { workspace = true } 14 | tracing = { workspace = true } 15 | 16 | [target.test.dependencies] 17 | verde = { path = "../verde", features = ["test"] } 18 | -------------------------------------------------------------------------------- /compiler/yamc/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "yamc" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | diagnostics = { path = "../diagnostics" } 8 | driver = { path = "../driver" } 9 | 10 | clap = { workspace = true, features = ["derive"] } 11 | tracing = { workspace = true } 12 | tracing-forest = { workspace = true, features = ["ansi", "env-filter"] } 13 | tracing-subscriber = { workspace = true, features = ["env-filter"] } 14 | walkdir = { workspace = true } 15 | -------------------------------------------------------------------------------- /compiler/codegen/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "codegen" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | arena = { path = "../arena" } 8 | hir = { path = "../hir" } 9 | thir = { path = "../thir" } 10 | verde = { path = "../verde" } 11 | 12 | cranelift = { workspace = true } 13 | cranelift-module = { workspace = true } 14 | cranelift-object = { workspace = true } 15 | parking_lot = { workspace = true } 16 | rustc-hash = { workspace = true } 17 | target-lexicon = { workspace = true } 18 | -------------------------------------------------------------------------------- /compiler/yamw/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "yamw" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | diagnostics = { path = "../diagnostics" } 8 | parse = { path = "../parse" } 9 | # hir = { path = "../hir" } 10 | # hir-lower = { path = "../hir-lower" } 11 | verde = { path = "../verde", features = ["tracing"] } 12 | 13 | clap = { version = "4.3", features = ["derive"] } 14 | crossbeam-channel = "0.5" 15 | notify-debouncer-mini = "0.3" 16 | rustc-hash = "1.1" 17 | tracing = "0.1" 18 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 19 | walkdir = "2.3" 20 | -------------------------------------------------------------------------------- /compiler/driver/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "driver" 3 | version = "0.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | codegen = { path = "../codegen" } 8 | diagnostics = { path = "../diagnostics" } 9 | hir = { path = "../hir" } 10 | hir-lower = { path = "../hir-lower" } 11 | parse = { path = "../parse" } 12 | pretty = { path = "../pretty" } 13 | text = { path = "../text" } 14 | thir = { path = "../thir" } 15 | tycheck = { path = "../tycheck" } 16 | verde = { path = "../verde", features = ["tracing", "debug"] } 17 | 18 | rayon = { workspace = true } 19 | rustc-hash = { workspace = true } 20 | tracing = { workspace = true } 21 | -------------------------------------------------------------------------------- /core/lang_items.yam: -------------------------------------------------------------------------------- 1 | @lang(u8) 2 | pub type u8 = u8; 3 | @lang(u16) 4 | pub type u16 = u16; 5 | @lang(u32) 6 | pub type u32 = u32; 7 | @lang(u64) 8 | pub type u64 = u64; 9 | @lang(u128) 10 | pub type u128 = u128; 11 | 12 | @lang(i8) 13 | pub type i8 = i8; 14 | @lang(i16) 15 | pub type i16 = i16; 16 | @lang(i32) 17 | pub type i32 = i32; 18 | @lang(i64) 19 | pub type i64 = i64; 20 | @lang(i128) 21 | pub type i128 = i128; 22 | 23 | @lang(char) 24 | pub type char = char; 25 | 26 | @lang(f32) 27 | pub type f32 = f32; 28 | @lang(f64) 29 | pub type f64 = f64; 30 | 31 | // TODO: Switch bool to an enum. 32 | @lang(bool) 33 | pub type bool = bool; 34 | -------------------------------------------------------------------------------- /compiler/verde/derive/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "verde-derive" 3 | version = "0.1.0" 4 | authors = ["SparkyPotato"] 5 | license = "MIT" 6 | edition = "2021" 7 | 8 | repository = "https://github.com/SparkyPotato/yam" 9 | documentation = "https://docs.rs/verde-derive" 10 | description = "Derive macros for verde" 11 | 12 | [package.metadata.docs.rs] 13 | all-features = true 14 | 15 | [features] 16 | serde = [] 17 | 18 | [lib] 19 | proc-macro = true 20 | 21 | [dependencies] 22 | proc-macro2 = { workspace = true } 23 | quote = { workspace = true } 24 | rustc-hash = { workspace = true } 25 | syn = { workspace = true, features = ["full", "extra-traits"] } 26 | -------------------------------------------------------------------------------- /docs/progress.md: -------------------------------------------------------------------------------- 1 | # Progress and quality of the compiler infrastructure 2 | 3 | ## Compiler 4 | 5 | - [x] Lexer 6 | - [x] Parser 7 | - [x] Recovery 8 | - [ ] Package and library support 9 | - [ ] Resolve names to paths from other packages. 10 | - [ ] Use both source-trees and library files to access other packages. 11 | - [ ] HIR and lowering 12 | - [x] Basic lowering 13 | - [ ] Resilience from CST 14 | - [ ] Proper name resolution 15 | - [ ] Type inference 16 | - [x] Basic inference 17 | - [ ] Full operator support 18 | - [ ] Good error reporting 19 | - [ ] Code generation 20 | - [x] Terrible Cranelift codegen. 21 | - [ ] Support for all features and types and control flow. 22 | - [ ] Compile to library files with metadata for importing. 23 | 24 | ## LSP 25 | - [ ] Nothing :) 26 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["compiler/*", "compiler/verde/derive"] 3 | exclude = ["compiler/yamw"] 4 | 5 | resolver = "2" 6 | 7 | [profile.release] 8 | debug = true 9 | 10 | [workspace.dependencies] 11 | ariadne = "0.3" 12 | clap = "4.3" 13 | cranelift = "0.103.0" 14 | cranelift-module = "0.103.0" 15 | cranelift-object = "0.103.0" 16 | cstree = "0.12" 17 | dashmap = "5.5" 18 | expect-test = "1.4" 19 | lasso = "0.7.2" 20 | logos = "0.13" 21 | once_cell = "1.18" 22 | parking_lot = "0.12.1" 23 | pretty = "0.12.3" 24 | pretty_assertions = "1.4" 25 | proc-macro2 = "1.0" 26 | rayon = "1.7" 27 | rustc-hash = "1.1.0" 28 | serde = "1.0" 29 | syn = "2.0" 30 | target-lexicon = "0.12.12" 31 | tracing = "0.1.40" 32 | tracing-forest = "0.1" 33 | tracing-subscriber = "0.3" 34 | walkdir = "2.3" 35 | quote = "1.0" 36 | -------------------------------------------------------------------------------- /compiler/arena/src/sparse.rs: -------------------------------------------------------------------------------- 1 | use rustc_hash::FxHashMap; 2 | 3 | use crate::Ix; 4 | 5 | /// A map from `Ix` to `V` that expects to have values sparsely populated for all possible `Ix` values. 6 | #[derive(Clone, Eq, PartialEq)] 7 | pub struct SparseMap { 8 | map: FxHashMap, V>, 9 | } 10 | 11 | impl SparseMap { 12 | pub fn new() -> Self { 13 | Self { 14 | map: FxHashMap::default(), 15 | } 16 | } 17 | 18 | pub fn insert(&mut self, key: Ix, value: V) -> Option { self.map.insert(key, value) } 19 | 20 | pub fn get(&self, key: Ix) -> Option<&V> { self.map.get(&key) } 21 | 22 | pub fn get_mut(&mut self, key: Ix) -> Option<&mut V> { self.map.get_mut(&key) } 23 | } 24 | 25 | impl Default for SparseMap { 26 | fn default() -> Self { Self::new() } 27 | } 28 | -------------------------------------------------------------------------------- /compiler/text/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | use once_cell::sync::Lazy; 4 | 5 | pub type Interner = lasso::ThreadedRodeo; 6 | 7 | static INTERNER: Lazy = Lazy::new(Interner::new); 8 | 9 | // Do not impl `Deref` to avoid excessive contention of `INTERNER`. 10 | #[derive(Copy, Clone, Eq, PartialEq, Hash, Default)] 11 | pub struct Text(lasso::Spur); 12 | 13 | impl Debug for Text { 14 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(self.as_str()) } 15 | } 16 | 17 | impl Text { 18 | pub fn new(text: &str) -> Self { Self(INTERNER.get_or_intern(text)) } 19 | 20 | pub fn as_str(&self) -> &'static str { INTERNER.resolve(&self.0) } 21 | } 22 | 23 | impl From for Text { 24 | fn from(spur: lasso::Spur) -> Self { Self(spur) } 25 | } 26 | 27 | pub fn get_interner() -> &'static Interner { &INTERNER } 28 | -------------------------------------------------------------------------------- /compiler/parse/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(try_trait_v2)] 2 | 3 | use diagnostics::FileDiagnostic; 4 | pub use syntax; 5 | use syntax::{ast::File, builder::TreeBuilderContext, AstElement, SyntaxNode}; 6 | use tracing::{span, Level}; 7 | 8 | use crate::parse::Parser; 9 | 10 | mod api; 11 | mod parse; 12 | #[cfg(test)] 13 | mod tests; 14 | 15 | #[derive(Default)] 16 | pub struct ParseContext { 17 | ctx: TreeBuilderContext, 18 | } 19 | 20 | impl ParseContext { 21 | pub fn new() -> Self { Self::default() } 22 | 23 | pub fn parse_file(&mut self, source: &str) -> (File, Vec) { 24 | let s = span!(Level::TRACE, "parse_file"); 25 | let _e = s.enter(); 26 | 27 | let (builder, diagnostics) = Parser::new(source, &mut self.ctx).parse(); 28 | let root = builder.finish(); 29 | let file = File::cast(SyntaxNode::new_root(root).into()).unwrap(); 30 | (file, diagnostics) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /compiler/syntax/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub use cstree::{ 2 | green::{GreenNode, GreenToken}, 3 | text::{TextRange, TextSize}, 4 | util::NodeOrToken, 5 | }; 6 | 7 | pub use crate::generated::{ast, kind::SyntaxKind, token, AstElement, AstNode, AstToken, OptionNameExt}; 8 | pub mod builder; 9 | mod generated; 10 | 11 | pub type SyntaxNode = cstree::syntax::SyntaxNode; 12 | pub type SyntaxToken = cstree::syntax::SyntaxToken; 13 | pub type SyntaxElement = cstree::syntax::SyntaxElement; 14 | pub type SyntaxElementRef<'a> = cstree::syntax::SyntaxElementRef<'a, SyntaxKind>; 15 | pub type SyntaxNodeChildren<'a> = cstree::syntax::SyntaxNodeChildren<'a, SyntaxKind>; 16 | 17 | pub type ResolvedNode = cstree::syntax::ResolvedNode; 18 | pub type ResolvedToken = cstree::syntax::ResolvedToken; 19 | pub type ResolvedElement = cstree::syntax::ResolvedElement; 20 | -------------------------------------------------------------------------------- /compiler/parse/src/tests/mod.rs: -------------------------------------------------------------------------------- 1 | use diagnostics::{test::emit_test, FilePath}; 2 | use expect_test::Expect; 3 | use pretty_assertions::assert_eq; 4 | use syntax::{builder::TreeBuilderContext, ResolvedNode}; 5 | 6 | use crate::Parser; 7 | 8 | mod happy; 9 | mod recovery; 10 | 11 | fn harness(source: &str, ast: Expect, diagnostics: Expect) { 12 | let mut ctx = TreeBuilderContext::new(); 13 | let (builder, out) = Parser::new(source, &mut ctx).parse(); 14 | let node = builder.finish(); 15 | 16 | let resolved = ResolvedNode::new_root_with_resolver(node, text::get_interner()); 17 | 18 | let text = resolved.text(); 19 | assert_eq!(text, source, "CST is not lossless"); 20 | 21 | let debug = fmt(&resolved); 22 | ast.assert_eq(&debug); 23 | 24 | let diags = emit_test(source, out, &FilePath::new("test")); 25 | diagnostics.assert_eq(&diags); 26 | } 27 | 28 | fn fmt(node: &ResolvedNode) -> String { 29 | let mut s = node.debug(node.resolver().as_ref(), true); 30 | s.pop(); 31 | s 32 | } 33 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | binop_separator = "Front" 2 | blank_lines_lower_bound = 0 3 | blank_lines_upper_bound = 1 4 | combine_control_expr = true 5 | comment_width = 120 6 | condense_wildcard_suffixes = true 7 | empty_item_single_line = true 8 | fn_params_layout = "Compressed" 9 | fn_single_line = true 10 | force_explicit_abi = true 11 | format_code_in_doc_comments = true 12 | format_macro_matchers = true 13 | format_macro_bodies = true 14 | format_strings = true 15 | hard_tabs = true 16 | imports_indent = "Block" 17 | imports_granularity = "Crate" 18 | imports_layout = "HorizontalVertical" 19 | indent_style = "Block" 20 | match_block_trailing_comma = true 21 | max_width = 120 22 | merge_derives = true 23 | newline_style = "Native" 24 | normalize_comments = true 25 | normalize_doc_attributes = true 26 | reorder_impl_items = true 27 | reorder_imports = true 28 | group_imports = "StdExternalCrate" 29 | trailing_semicolon = true 30 | use_field_init_shorthand = true 31 | use_try_shorthand = true 32 | wrap_comments = true 33 | -------------------------------------------------------------------------------- /compiler/verde/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "verde" 3 | version = "0.2.1" 4 | authors = ["SparkyPotato"] 5 | license = "MIT" 6 | edition = "2021" 7 | 8 | readme = "README.md" 9 | repository = "https://github.com/SparkyPotato/yam" 10 | documentation = "https://docs.rs/verde" 11 | description = "A refreshingly simple incremental computation library" 12 | 13 | [package.metadata.docs.rs] 14 | all-features = true 15 | 16 | [features] 17 | serde = [ 18 | "dep:serde", 19 | "dashmap/serde", 20 | "verde-derive/serde", 21 | "parking_lot/serde", 22 | ] 23 | test = [] 24 | tracing = ["dep:tracing"] 25 | debug = ["parking_lot/deadlock_detection"] 26 | 27 | [dependencies] 28 | verde-derive = { version = "0.1.0", path = "derive" } 29 | 30 | dashmap = { workspace = true } 31 | parking_lot = { workspace = true, features = ["hardware-lock-elision"] } 32 | rustc-hash = { workspace = true } 33 | serde = { workspace = true, optional = true, features = ["derive"] } 34 | tracing = { workspace = true, optional = true } 35 | -------------------------------------------------------------------------------- /compiler/lex/src/lib.rs: -------------------------------------------------------------------------------- 1 | use diagnostics::FileSpan; 2 | 3 | use crate::token::{Token, TokenKind}; 4 | 5 | #[cfg(test)] 6 | mod tests; 7 | pub mod token; 8 | 9 | pub struct Lexer<'s> { 10 | inner: logos::Lexer<'s, TokenKind>, 11 | } 12 | 13 | impl<'s> Lexer<'s> { 14 | pub fn new(source: &'s str) -> Self { 15 | Self { 16 | inner: logos::Lexer::new(source), 17 | } 18 | } 19 | 20 | pub fn source(&self) -> &'s str { self.inner.source() } 21 | } 22 | 23 | impl Lexer<'_> { 24 | pub fn next(&mut self) -> Token { 25 | let token = self.inner.next(); 26 | let span = self.inner.span(); 27 | match token { 28 | Some(token) => Token { 29 | kind: token.unwrap_or(TokenKind::Error), 30 | span: FileSpan { 31 | start: span.start as _, 32 | end: span.end as _, 33 | relative: (), 34 | }, 35 | }, 36 | None => Token { 37 | kind: T![eof], 38 | span: self.eof_span(), 39 | }, 40 | } 41 | } 42 | 43 | pub fn eof_span(&self) -> FileSpan { 44 | FileSpan { 45 | start: self.source().len() as u32 - 1, 46 | end: self.source().len() as _, 47 | relative: (), 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Shaye Garg 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /compiler/hir/src/ident.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | use text::Text; 4 | use verde::{Db, Id, Interned}; 5 | 6 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] 7 | pub struct PackageId(pub u32); 8 | 9 | /// An absolute path. 10 | #[derive(Interned, Copy, Clone, PartialEq, Eq, Hash)] 11 | pub enum AbsPath { 12 | Package(PackageId), 13 | Name { prec: Id, name: Text }, 14 | } 15 | 16 | impl AbsPath { 17 | pub fn segments(db: &dyn Db, mut path: Id) -> (PackageId, Vec) { 18 | let mut names = Vec::new(); 19 | loop { 20 | match *db.geti(path) { 21 | Self::Package(id) => return (id, names), 22 | Self::Name { prec, name } => { 23 | path = prec; 24 | names.push(name); 25 | }, 26 | } 27 | } 28 | } 29 | } 30 | 31 | impl From for AbsPath { 32 | fn from(x: PackageId) -> Self { Self::Package(x) } 33 | } 34 | 35 | pub trait DebugAbsPath { 36 | type Output<'a>: Debug; 37 | 38 | fn debug<'a>(self, db: &'a dyn Db) -> Self::Output<'a>; 39 | } 40 | 41 | impl DebugAbsPath for Id { 42 | type Output<'a> = DebugAbsPathStruct<'a>; 43 | 44 | fn debug<'a>(self, db: &'a dyn Db) -> Self::Output<'a> { DebugAbsPathStruct(self, db) } 45 | } 46 | 47 | pub struct DebugAbsPathStruct<'a>(Id, &'a dyn Db); 48 | 49 | impl<'a> Debug for DebugAbsPathStruct<'a> { 50 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 51 | match *self.1.geti(self.0) { 52 | AbsPath::Package(x) => write!(f, "#{}", x.0), 53 | AbsPath::Name { prec, name } => write!(f, "{:?}.{}", prec.debug(self.1), name.as_str()), 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /compiler/verde/derive/src/pushable.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::quote; 3 | use syn::{DeriveInput, GenericParam}; 4 | 5 | use crate::Result; 6 | 7 | pub(crate) fn pushable(input: DeriveInput) -> Result { 8 | let name = input.ident; 9 | let (impl_, ty, where_) = input.generics.split_for_impl(); 10 | let bounds: Vec<_> = where_ 11 | .into_iter() 12 | .map(|x| { 13 | let pred = x.predicates.iter(); 14 | quote! { #(#pred)* } 15 | }) 16 | .chain(input.generics.params.iter().filter_map(|x| match x { 17 | GenericParam::Type(x) => { 18 | let ident = &x.ident; 19 | Some(quote! { 20 | #ident: ::std::clone::Clone + ::std::marker::Send + 'static 21 | }) 22 | }, 23 | GenericParam::Lifetime(_) => None, 24 | GenericParam::Const(_) => None, 25 | })) 26 | .collect(); 27 | 28 | Ok(quote! { 29 | impl #impl_ ::verde::internal::Storable for #name #ty 30 | where #(#bounds)* { 31 | type Storage = ::verde::internal::storage::PushableStorage; 32 | 33 | fn tracked_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedTrackedStorage> { 34 | None 35 | } 36 | 37 | fn query_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedQueryStorage> { 38 | None 39 | } 40 | 41 | fn pushable_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedPushableStorage> { 42 | Some(store) 43 | } 44 | 45 | fn interned_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedInternedStorage> { 46 | None 47 | } 48 | } 49 | 50 | impl #impl_ ::verde::Pushable for #name #ty where #(#bounds)* {} 51 | }) 52 | } 53 | -------------------------------------------------------------------------------- /compiler/arena/src/dense.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt::Debug, marker::PhantomData, ops::Index}; 2 | 3 | use crate::Ix; 4 | 5 | /// A map from `Ix` to `V` that expects to have values densely populated for all possible `Ix` values. 6 | #[derive(Clone, Eq, PartialEq)] 7 | pub struct DenseMap { 8 | map: Vec>, 9 | _phantom: PhantomData K>, 10 | } 11 | 12 | impl DenseMap { 13 | pub fn new() -> Self { 14 | Self { 15 | map: Vec::new(), 16 | _phantom: PhantomData, 17 | } 18 | } 19 | 20 | pub fn with_capacity(capacity: usize) -> Self { 21 | Self { 22 | map: Vec::with_capacity(capacity), 23 | _phantom: PhantomData, 24 | } 25 | } 26 | 27 | pub fn insert(&mut self, key: Ix, value: V) -> Option { 28 | let key = key.index(); 29 | if key >= self.map.len() { 30 | self.map.resize_with(key + 1, || None); 31 | } 32 | self.map[key].replace(value) 33 | } 34 | 35 | pub fn get(&self, key: Ix) -> Option<&V> { self.map.get(key.index()).and_then(|x| x.as_ref()) } 36 | 37 | pub fn get_mut(&mut self, key: Ix) -> Option<&mut V> { self.map.get_mut(key.index()).and_then(|x| x.as_mut()) } 38 | 39 | pub fn len(&self) -> usize { self.map.iter().filter(|x| x.is_some()).count() } 40 | 41 | pub fn iter(&self) -> impl Iterator, &V)> + '_ { 42 | self.map 43 | .iter() 44 | .enumerate() 45 | .filter_map(|(i, x)| x.as_ref().map(|x| (Ix::new(i), x))) 46 | } 47 | } 48 | 49 | impl Default for DenseMap { 50 | fn default() -> Self { Self::new() } 51 | } 52 | 53 | impl Index> for DenseMap { 54 | type Output = V; 55 | 56 | fn index(&self, index: Ix) -> &Self::Output { self.get(index).unwrap() } 57 | } 58 | 59 | impl Debug for DenseMap { 60 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_map().entries(self.iter()).finish() } 61 | } 62 | -------------------------------------------------------------------------------- /compiler/diagnostics/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fmt::{Debug, Display}, 3 | ops::Range, 4 | }; 5 | 6 | use ariadne::{Cache, Report, Source}; 7 | use rustc_hash::FxHashMap; 8 | 9 | mod diag; 10 | mod span; 11 | 12 | pub use crate::{diag::*, span::*}; 13 | 14 | /// A cache storing the contents of files. 15 | #[derive(Default)] 16 | pub struct FileCache { 17 | files: FxHashMap, 18 | } 19 | 20 | impl FileCache { 21 | pub fn new() -> Self { Self::default() } 22 | 23 | pub fn set_file(&mut self, file: FilePath, data: String) { self.files.insert(file, Source::from(data)); } 24 | 25 | pub fn set_files(&mut self, files: impl IntoIterator) { 26 | self.files.extend(files.into_iter().map(|(x, y)| (x, Source::from(y)))); 27 | } 28 | } 29 | 30 | impl Cache for &FileCache { 31 | fn fetch(&mut self, id: &FilePath) -> Result<&Source, Box> { 32 | Ok(self.files.get(id).expect("Invalid file")) 33 | } 34 | 35 | fn display<'a>(&self, id: &'a FilePath) -> Option> { Some(Box::new(id)) } 36 | } 37 | 38 | /// Emit diagnostics with a cache and span resolution context. 39 | pub fn emit(diags: impl IntoIterator>, cache: &FileCache, ctx: &S::Ctx) { 40 | for diag in diags { 41 | diag.emit(cache, ctx); 42 | } 43 | } 44 | 45 | /// Emit a quick diagnostic with no source. 46 | pub fn quick_diagnostic(kind: DiagKind, message: impl ToString) { 47 | Report::>::build(kind.into_report_kind(), (), 0) 48 | .with_message(message) 49 | .finish() 50 | .eprint(Source::from("")) 51 | .unwrap(); 52 | } 53 | 54 | pub mod test { 55 | use crate::{Diagnostic, Span}; 56 | 57 | pub fn emit_test(source: &str, diags: impl IntoIterator>, ctx: &S::Ctx) -> String 58 | where 59 | S: Span, 60 | { 61 | let mut s = String::new(); 62 | for diag in diags { 63 | s += &diag.emit_test(source, ctx); 64 | } 65 | s 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/storage/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(ambiguous_glob_reexports)] 2 | 3 | use std::{fmt::Debug, hash::BuildHasherDefault, marker::PhantomData, num::NonZeroU64}; 4 | 5 | use rustc_hash::FxHasher; 6 | 7 | pub mod interned; 8 | mod pushable; 9 | mod query; 10 | mod routing; 11 | pub mod tracked; 12 | 13 | pub use interned::*; 14 | pub use pushable::*; 15 | pub use query::*; 16 | pub use routing::*; 17 | pub use tracked::*; 18 | 19 | pub(crate) type DashMap = dashmap::DashMap>; 20 | 21 | /// An instance of `T` stored in the database. 22 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 23 | pub struct Id(NonZeroU64, PhantomData T>); 24 | 25 | impl Debug for Id { 26 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 27 | write!(f, "{}({})", std::any::type_name::(), self.get().index) 28 | } 29 | } 30 | 31 | impl Clone for Id { 32 | fn clone(&self) -> Self { *self } 33 | } 34 | impl Copy for Id {} 35 | 36 | impl PartialEq for Id { 37 | fn eq(&self, other: &Self) -> bool { self.0 == other.0 } 38 | } 39 | impl Eq for Id {} 40 | 41 | impl std::hash::Hash for Id { 42 | fn hash(&self, state: &mut H) { self.0.hash(state); } 43 | } 44 | 45 | impl Id { 46 | pub(crate) fn new(index: u32, route: Route) -> Self { 47 | let route_storage = route.storage; 48 | let route_index = route.index; 49 | Self( 50 | NonZeroU64::new((index as u64) | ((route_index as u64) << 32) | ((route_storage as u64) << 48)).unwrap(), 51 | PhantomData, 52 | ) 53 | } 54 | 55 | pub(crate) fn get(self) -> ErasedId { 56 | let index = self.0.get() as u32; 57 | let route_storage = (self.0.get() >> 48) as u16; 58 | let route_index = (self.0.get() >> 32) as u16; 59 | ErasedId { 60 | index, 61 | route: Route { 62 | storage: route_storage, 63 | index: route_index, 64 | }, 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /compiler/syntax/src/builder.rs: -------------------------------------------------------------------------------- 1 | use cstree::{ 2 | build::{GreenNodeBuilder, NodeCache}, 3 | green::GreenNode, 4 | }; 5 | 6 | use crate::SyntaxKind; 7 | 8 | #[repr(transparent)] 9 | pub struct TreeBuilderContext { 10 | cache: NodeCache<'static, &'static text::Interner>, 11 | } 12 | 13 | impl TreeBuilderContext { 14 | pub fn new() -> Self { 15 | Self { 16 | cache: NodeCache::from_interner(text::get_interner()), 17 | } 18 | } 19 | } 20 | 21 | impl Default for TreeBuilderContext { 22 | fn default() -> Self { Self::new() } 23 | } 24 | 25 | pub struct TreeBuilder<'c> { 26 | builder: GreenNodeBuilder<'c, 'static, SyntaxKind, &'static text::Interner>, 27 | node_depth: usize, 28 | } 29 | 30 | impl<'c> TreeBuilder<'c> { 31 | pub fn new(context: &'c mut TreeBuilderContext) -> Self { 32 | Self { 33 | builder: GreenNodeBuilder::with_cache(&mut context.cache), 34 | node_depth: 0, 35 | } 36 | } 37 | } 38 | 39 | impl TreeBuilder<'_> { 40 | pub fn token(&mut self, kind: SyntaxKind, text: &str) { self.builder.token(kind.into(), text) } 41 | 42 | pub fn node_depth(&self) -> usize { self.node_depth } 43 | 44 | pub fn start_node(&mut self, kind: SyntaxKind) { 45 | self.builder.start_node(kind.into()); 46 | self.node_depth += 1; 47 | } 48 | 49 | pub fn finish_node(&mut self) { 50 | self.node_depth -= 1; 51 | self.builder.finish_node(); 52 | } 53 | 54 | pub fn finish_node_at(&mut self, node_depth: usize) { 55 | while self.node_depth > node_depth { 56 | self.finish_node(); 57 | } 58 | } 59 | 60 | pub fn checkpoint(&self) -> Checkpoint { Checkpoint(self.builder.checkpoint()) } 61 | 62 | pub fn start_node_at(&mut self, checkpoint: Checkpoint, kind: SyntaxKind) { 63 | self.builder.start_node_at(checkpoint.0, kind.into()); 64 | self.node_depth += 1; 65 | } 66 | 67 | pub fn finish(self) -> GreenNode { self.builder.finish().0 } 68 | } 69 | 70 | #[repr(transparent)] 71 | #[derive(Clone, Copy, Debug)] 72 | pub struct Checkpoint(cstree::build::Checkpoint); 73 | -------------------------------------------------------------------------------- /compiler/verde/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../README.md")] 2 | 3 | #[cfg(feature = "serde")] 4 | pub use serde; 5 | pub use verde_derive::{db, query, storage, Interned, Pushable, Tracked}; 6 | 7 | pub use crate::internal::{storage::Id, Ctx, Db, Interned, Pushable, Tracked}; 8 | 9 | #[doc(hidden)] 10 | pub mod internal; 11 | #[cfg(any(feature = "test", test))] 12 | pub mod test; 13 | 14 | #[cfg(feature = "tracing")] 15 | macro_rules! span { 16 | (enter $($x:tt)*) => { 17 | let _e = crate::span!($($x)*).entered(); 18 | }; 19 | 20 | (trace, $($x:tt)*) => { 21 | tracing::span!(tracing::Level::TRACE, $($x)*) 22 | }; 23 | (debug, $($x:tt)*) => { 24 | tracing::span!(tracing::Level::DEBUG, $($x)*) 25 | }; 26 | (info, $($x:tt)*) => { 27 | tracing::span!(tracing::Level::INFO, $($x)*) 28 | }; 29 | (warn, $($x:tt)*) => { 30 | tracing::span!(tracing::Level::WARN, $($x)*) 31 | }; 32 | (error, $($x:tt)*) => { 33 | tracing::span!(tracing::Level::ERROR, $($x)*) 34 | }; 35 | } 36 | 37 | #[cfg(not(feature = "tracing"))] 38 | macro_rules! span { 39 | ($($x:tt)*) => {{ 40 | let x = crate::Span; 41 | x 42 | }}; 43 | } 44 | 45 | use span; 46 | 47 | #[cfg(feature = "tracing")] 48 | macro_rules! event { 49 | (trace, $($x:tt)*) => { 50 | tracing::event!(tracing::Level::TRACE, $($x)*) 51 | }; 52 | (debug, $($x:tt)*) => { 53 | tracing::event!(tracing::Level::DEBUG, $($x)*) 54 | }; 55 | (info, $($x:tt)*) => { 56 | tracing::event!(tracing::Level::INFO, $($x)*) 57 | }; 58 | (warn, $($x:tt)*) => { 59 | tracing::event!(tracing::Level::WARN, $($x)*) 60 | }; 61 | (error, $($x:tt)*) => { 62 | tracing::event!(tracing::Level::ERROR, $($x)*) 63 | }; 64 | } 65 | 66 | #[cfg(not(feature = "tracing"))] 67 | macro_rules! event { 68 | ($($x:tt)*) => { 69 | () 70 | }; 71 | } 72 | 73 | use event; 74 | 75 | #[cfg(not(feature = "tracing"))] 76 | struct Span; 77 | 78 | #[cfg(not(feature = "tracing"))] 79 | impl Span { 80 | fn record(&self, _: &Q, _: V) -> &Self { self } 81 | 82 | fn enter(&self) {} 83 | } 84 | -------------------------------------------------------------------------------- /compiler/thir/src/lib.rs: -------------------------------------------------------------------------------- 1 | use arena::dense::DenseMap; 2 | use hir::ident::AbsPath; 3 | use rustc_hash::FxHashMap; 4 | use verde::{storage, Id, Interned, Tracked}; 5 | 6 | pub struct Thir { 7 | pub hir: FxHashMap, Id>, 8 | pub decls: FxHashMap, Id>, 9 | pub items: FxHashMap, Id>, 10 | } 11 | 12 | #[storage] 13 | pub struct Storage(ItemDecl, Item, Type); 14 | 15 | #[derive(Tracked, Clone, PartialEq, Eq, Debug)] 16 | pub struct ItemDecl { 17 | #[id] 18 | pub path: Id, 19 | pub kind: ItemDeclKind, 20 | } 21 | 22 | #[derive(Tracked, Clone, PartialEq, Eq, Debug)] 23 | pub struct Item { 24 | #[id] 25 | pub path: Id, 26 | pub decl: Id, 27 | pub locals: DenseMap>, 28 | pub exprs: DenseMap>, 29 | } 30 | 31 | #[derive(Clone, PartialEq, Eq, Debug)] 32 | pub enum ItemDeclKind { 33 | Struct(StructDecl), 34 | Enum(EnumDecl), 35 | Fn(FnDecl), 36 | TypeAlias(TypeAliasDecl), 37 | Static(StaticDecl), 38 | } 39 | 40 | #[derive(Clone, PartialEq, Eq, Debug)] 41 | pub struct StructDecl { 42 | pub fields: DenseMap>, 43 | pub ty: Id, 44 | } 45 | 46 | #[derive(Clone, PartialEq, Eq, Debug)] 47 | pub struct EnumDecl { 48 | pub repr: hir::LangItem, 49 | pub ty: Id, 50 | } 51 | 52 | #[derive(Clone, PartialEq, Eq, Debug)] 53 | pub struct FnDecl { 54 | pub params: DenseMap>, 55 | pub ret: Id, 56 | pub ty: Id, 57 | } 58 | 59 | #[derive(Clone, PartialEq, Eq, Debug)] 60 | pub struct TypeAliasDecl { 61 | pub ty: Id, 62 | } 63 | 64 | #[derive(Clone, PartialEq, Eq, Debug)] 65 | pub struct StaticDecl { 66 | pub ty: Id, 67 | } 68 | 69 | #[derive(Clone, PartialEq, Eq, Hash, Interned, Debug)] 70 | pub enum Type { 71 | Array(ArrayType), 72 | Fn(FnType), 73 | Struct(Id), 74 | Enum(Id), 75 | Ptr(PtrType), 76 | LangItem(hir::LangItem), 77 | Void, 78 | Error, 79 | } 80 | 81 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] 82 | pub struct ArrayType { 83 | pub ty: Id, 84 | pub len: u64, 85 | } 86 | 87 | #[derive(Clone, PartialEq, Eq, Hash, Debug)] 88 | pub struct FnType { 89 | pub abi: Option<&'static str>, 90 | pub params: Vec>, 91 | pub ret: Id, 92 | } 93 | 94 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] 95 | pub struct PtrType { 96 | pub mutable: bool, 97 | pub ty: Id, 98 | } 99 | -------------------------------------------------------------------------------- /compiler/yamc/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use clap::Parser; 4 | use diagnostics::{quick_diagnostic, DiagKind, FilePath}; 5 | use driver::{target::Triple, CodegenOptions, CompileInput, Database, SourceFile}; 6 | use tracing_forest::ForestLayer; 7 | use tracing_subscriber::{prelude::*, EnvFilter, Registry}; 8 | use walkdir::WalkDir; 9 | 10 | #[derive(Parser)] 11 | #[command(author, version, about, long_about = None)] 12 | struct Options { 13 | /// The root file of the package 14 | path: PathBuf, 15 | /// The output object file. 16 | output: Option, 17 | #[arg(long)] 18 | verify_ir: bool, 19 | #[arg(long)] 20 | emit_hir: bool, 21 | #[arg(long)] 22 | emit_ir: bool, 23 | #[arg(long)] 24 | check: bool, 25 | } 26 | 27 | fn main() { 28 | Registry::default() 29 | .with(ForestLayer::default().with_filter(EnvFilter::from_env("YAMLOG"))) 30 | .init(); 31 | 32 | let options = Options::parse(); 33 | 34 | let Some(root_dir) = options.path.parent() else { 35 | quick_diagnostic(DiagKind::Error, format!("expected file: {}", options.path.display())); 36 | return; 37 | }; 38 | let files: Vec<_> = load_file(&options.path) 39 | .into_iter() 40 | .chain( 41 | WalkDir::new(root_dir) 42 | .into_iter() 43 | .filter_map(|x| x.ok()) 44 | .filter_map(|x| { 45 | let path = x.path(); 46 | let is_yam = path.extension().map(|x| x == "yam").unwrap_or(false); 47 | let is_root = path == options.path; 48 | (is_yam && !is_root).then(|| path.to_owned()) 49 | }) 50 | .filter_map(|path| load_file(&path)), 51 | ) 52 | .collect(); 53 | 54 | if files.is_empty() { 55 | return; 56 | } 57 | 58 | driver::compile(CompileInput { 59 | db: Database::default(), 60 | files, 61 | codegen_options: CodegenOptions { 62 | name: options.path.file_stem().unwrap().to_string_lossy().into(), 63 | target: Triple::host(), 64 | verify_ir: options.verify_ir, 65 | emit_ir: options.emit_ir, 66 | }, 67 | emit_hir: options.emit_hir, 68 | output: (!options.check).then(|| { 69 | FilePath::new( 70 | &options 71 | .output 72 | .unwrap_or_else(|| { 73 | let mut path = options.path.clone(); 74 | path.set_extension("o"); 75 | path 76 | }) 77 | .to_str() 78 | .unwrap(), 79 | ) 80 | }), 81 | }); 82 | } 83 | 84 | fn load_file(path: &Path) -> Option { 85 | let source = std::fs::read_to_string(path) 86 | .map_err(|err| { 87 | quick_diagnostic( 88 | DiagKind::Error, 89 | format!("failed to read file `{}`: {}", path.display(), err), 90 | ); 91 | }) 92 | .ok()?; 93 | let path = FilePath::new(&path.to_string_lossy()); 94 | Some(SourceFile { path, source }) 95 | } 96 | -------------------------------------------------------------------------------- /compiler/arena/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fmt::Debug, 3 | hash::Hash, 4 | marker::PhantomData, 5 | num::NonZeroU32, 6 | ops::{Index, IndexMut}, 7 | }; 8 | 9 | pub mod dense; 10 | pub mod sparse; 11 | 12 | /// An index into an arena. 13 | pub struct Ix(NonZeroU32, PhantomData T>); 14 | 15 | impl Ix { 16 | pub fn new(index: usize) -> Self { Self(NonZeroU32::new(index as u32 + 1).expect("Arena overflow"), PhantomData) } 17 | 18 | pub fn index(self) -> usize { self.0.get() as usize - 1 } 19 | } 20 | 21 | /// An arena of elements of type `T`. 22 | #[derive(Clone, Eq, PartialEq)] 23 | pub struct Arena { 24 | elems: Vec, 25 | } 26 | 27 | impl Arena { 28 | pub fn new() -> Self { Self { elems: Vec::new() } } 29 | 30 | pub fn push(&mut self, elem: T) -> Ix { 31 | let ix = self.elems.len(); 32 | self.elems.push(elem); 33 | Ix::new(ix) 34 | } 35 | 36 | pub fn ids(&self) -> impl Iterator> + '_ { self.elems.iter().enumerate().map(|(i, _)| Ix::new(i)) } 37 | 38 | pub fn iter(&self) -> impl Iterator + '_ { self.elems.iter() } 39 | 40 | pub fn iter_mut(&mut self) -> impl Iterator + '_ { self.elems.iter_mut() } 41 | 42 | pub fn ids_iter(&self) -> impl Iterator, &T)> + '_ { 43 | self.elems.iter().enumerate().map(|(i, e)| (Ix::new(i), e)) 44 | } 45 | 46 | pub fn len(&self) -> usize { self.elems.len() } 47 | } 48 | 49 | impl FromIterator for Arena { 50 | fn from_iter>(iter: I) -> Self { 51 | Self { 52 | elems: FromIterator::from_iter(iter), 53 | } 54 | } 55 | } 56 | 57 | impl Index> for Arena { 58 | type Output = T; 59 | 60 | fn index(&self, ix: Ix) -> &Self::Output { self.elems.index(ix.index()) } 61 | } 62 | 63 | impl IndexMut> for Arena { 64 | fn index_mut(&mut self, ix: Ix) -> &mut Self::Output { self.elems.index_mut(ix.index()) } 65 | } 66 | 67 | impl Default for Arena { 68 | fn default() -> Self { Self::new() } 69 | } 70 | 71 | impl Debug for Arena { 72 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.elems.fmt(f) } 73 | } 74 | 75 | impl Clone for Ix { 76 | fn clone(&self) -> Self { *self } 77 | } 78 | impl Copy for Ix {} 79 | 80 | impl PartialEq for Ix { 81 | fn eq(&self, other: &Self) -> bool { self.0 == other.0 } 82 | } 83 | impl Eq for Ix {} 84 | 85 | impl Hash for Ix { 86 | fn hash(&self, state: &mut H) { self.0.hash(state) } 87 | } 88 | 89 | impl Debug for Ix { 90 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 91 | write!(f, "Ix<{}>({})", std::any::type_name::(), self.0) 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /compiler/tycheck/src/reader.rs: -------------------------------------------------------------------------------- 1 | use arena::{Arena, Ix}; 2 | use diagnostics::Span; 3 | use hir::{ident::AbsPath, lang_item::LangItemMap}; 4 | use rustc_hash::FxHashMap; 5 | use verde::{Ctx, Id}; 6 | 7 | pub struct HirReader<'a> { 8 | pub path: Id, 9 | pub types: &'a Arena, 10 | pub exprs: &'a Arena, 11 | pub locals: &'a Arena, 12 | pub lang_items: &'a LangItemMap, 13 | pub items: &'a FxHashMap, Id>, 14 | } 15 | 16 | impl<'a> HirReader<'a> { 17 | pub fn new( 18 | item: &'a hir::Item, lang_items: &'a LangItemMap, items: &'a FxHashMap, Id>, 19 | ) -> Self { 20 | Self { 21 | path: item.path, 22 | types: &item.types, 23 | exprs: &item.exprs, 24 | locals: &item.locals, 25 | lang_items, 26 | items, 27 | } 28 | } 29 | 30 | pub fn req_type(&self, ctx: &Ctx, ty: Ix) -> Id { 31 | let ty = &self.types[ty]; 32 | let ty = match ty.kind { 33 | hir::TypeKind::Array(ref a) => thir::Type::Array(thir::ArrayType { 34 | ty: self.req_type(ctx, a.ty), 35 | len: self.array_len(ctx, a.len), 36 | }), 37 | hir::TypeKind::Fn(ref f) => thir::Type::Fn(thir::FnType { 38 | abi: f.abi.as_ref().map(|x| x.abi.as_ref().map(|x| x.abi).unwrap_or("C")), 39 | params: f.params.iter().map(|&ty| self.req_type(ctx, ty)).collect(), 40 | ret: f 41 | .ret 42 | .map(|ty| self.req_type(ctx, ty)) 43 | .unwrap_or_else(|| ctx.add(thir::Type::Void)), 44 | }), 45 | hir::TypeKind::Infer => { 46 | let span = ty.id.erased(); 47 | ctx.push( 48 | span.error("expected type, found `_`") 49 | .label(span.label("cannot infer type here")), 50 | ); 51 | thir::Type::Error 52 | }, 53 | hir::TypeKind::Struct(s) => thir::Type::Struct(s), 54 | hir::TypeKind::Enum(e) => thir::Type::Enum(e), 55 | hir::TypeKind::Alias(p) => self.type_alias(p), 56 | hir::TypeKind::Ptr(p) => thir::Type::Ptr(thir::PtrType { 57 | mutable: p.mutable, 58 | ty: self.req_type(ctx, p.ty), 59 | }), 60 | hir::TypeKind::Error => thir::Type::Error, 61 | }; 62 | ctx.add(ty) 63 | } 64 | 65 | pub fn type_alias(&self, target: Id) -> thir::Type { 66 | match self.lang_items.get_lang_item_of(target) { 67 | Some(x) => thir::Type::LangItem(x), 68 | None => todo!(), 69 | } 70 | } 71 | 72 | pub fn array_len(&self, ctx: &Ctx, len: Ix) -> u64 { 73 | let expr = &self.exprs[len]; 74 | let span = expr.id.erased(); 75 | match expr.kind { 76 | hir::ExprKind::Literal(l) => match l { 77 | hir::Literal::Int(i) => return i as _, 78 | _ => ctx.push( 79 | span.error("expected `{int}`") 80 | .label(span.label("array lengths must be integers")), 81 | ), 82 | }, 83 | _ => ctx.push( 84 | span.error("expected `{int}`") 85 | .label(span.label("array lengths must be literals")), 86 | ), 87 | }; 88 | 89 | 0 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /compiler/hir-lower/src/prelude.rs: -------------------------------------------------------------------------------- 1 | use hir::ident::{AbsPath, PackageId}; 2 | use rustc_hash::FxHashMap; 3 | use text::Text; 4 | use tracing::{span, Level}; 5 | use verde::{query, Ctx, Id, Tracked}; 6 | 7 | use crate::{ 8 | index::{Declaration, ModuleTree, NameTy, PackageTree}, 9 | resolve::{GlobalResolution, GlobalResolver}, 10 | VisiblePackages, 11 | }; 12 | 13 | #[derive(Copy, Clone, Eq, PartialEq)] 14 | pub enum PreludeItem { 15 | Module(Id), 16 | Item { path: Id, ty: NameTy }, 17 | } 18 | 19 | #[derive(Tracked, Eq, PartialEq)] 20 | pub struct Prelude { 21 | #[id] 22 | id: (), 23 | pub prelude: FxHashMap, 24 | } 25 | 26 | #[derive(Tracked, Eq, PartialEq)] 27 | pub struct PackagePrelude { 28 | #[id] 29 | pkg: PackageId, 30 | prelude: FxHashMap, 31 | } 32 | 33 | #[query] 34 | pub fn get_prelude(ctx: &Ctx, tree: Id) -> Prelude { 35 | let s = span!(Level::DEBUG, "generate prelude"); 36 | let _e = s.enter(); 37 | 38 | // TODO: sort based on visible packages 39 | let t = ctx.get(tree); 40 | 41 | let mut prelude = FxHashMap::default(); 42 | for (&pkg, &t) in t.packages.iter() { 43 | let p = get_prelude_from_package(ctx, pkg, t); 44 | let p = ctx.get(p); 45 | prelude.extend(p.prelude.iter().map(|(&k, &v)| (k, v))); 46 | } 47 | 48 | Prelude { id: (), prelude } 49 | } 50 | 51 | #[query] 52 | pub fn get_prelude_from_package(ctx: &Ctx, pkg: PackageId, t: Id) -> PackagePrelude { 53 | let s = span!(Level::DEBUG, "generate package prelude", pkg = ?pkg); 54 | let _e = s.enter(); 55 | 56 | let tree = ctx.get(t); 57 | let Some(index) = tree.index.and_then(|x| { 58 | let i = ctx.get(x); 59 | i.has_prelude.then_some(i) 60 | }) else { 61 | return PackagePrelude { 62 | pkg, 63 | prelude: FxHashMap::default(), 64 | }; 65 | }; 66 | 67 | // The prelude is resolved using only the current package. 68 | let packages = VisiblePackages { 69 | package: PackageId(0), 70 | packages: { 71 | let mut m = FxHashMap::default(); 72 | m.insert(Text::new("root"), PackageId(0)); 73 | m 74 | }, 75 | }; 76 | let ptree = PackageTree { 77 | id: (), 78 | packages: { 79 | let mut m = FxHashMap::default(); 80 | m.insert(PackageId(0), t); 81 | m 82 | }, 83 | }; 84 | let resolver = GlobalResolver::new(ctx, tree.path, None, None, &packages, &ptree); 85 | let mut prelude = FxHashMap::default(); 86 | for (&name, decl) in index.decls.iter() { 87 | match decl { 88 | Declaration::Import { path, is_prelude, .. } if *is_prelude => { 89 | let item = match resolver.resolve_prelude(t, path.clone()) { 90 | GlobalResolution::Module(m) => PreludeItem::Module(m), 91 | GlobalResolution::Item { path, ty, .. } => PreludeItem::Item { path, ty }, 92 | GlobalResolution::Error => continue, 93 | }; 94 | prelude.insert(name, item); 95 | }, 96 | _ => {}, 97 | } 98 | } 99 | 100 | PackagePrelude { pkg, prelude } 101 | } 102 | -------------------------------------------------------------------------------- /compiler/syntax/src/generated/mod.rs: -------------------------------------------------------------------------------- 1 | use diagnostics::FileSpan; 2 | use text::Text; 3 | 4 | use self::token::{LParen, RParen}; 5 | use crate::{ 6 | ast::{Name, PathSegment}, 7 | SyntaxElement, 8 | SyntaxElementRef, 9 | SyntaxKind, 10 | SyntaxNode, 11 | SyntaxToken, 12 | }; 13 | 14 | pub mod ast; 15 | pub mod kind; 16 | pub mod token; 17 | 18 | pub trait AstNode: Sized {} 19 | 20 | pub trait AstToken: Sized { 21 | fn text(&self) -> Text; 22 | } 23 | 24 | pub trait AstElement: Sized { 25 | fn can_cast(kind: SyntaxKind) -> bool; 26 | 27 | fn cast(elem: SyntaxElement) -> Option; 28 | 29 | fn span(&self) -> FileSpan; 30 | 31 | fn inner(self) -> SyntaxElement; 32 | } 33 | 34 | fn children<'a, T: 'a + AstElement>(node: &'a SyntaxNode) -> impl Iterator + 'a { 35 | node.children_with_tokens() 36 | .map(|x| match x { 37 | SyntaxElementRef::Node(node) => SyntaxElement::Node(node.clone()), 38 | SyntaxElementRef::Token(token) => SyntaxElement::Token(token.clone()), 39 | }) 40 | .filter_map(T::cast) 41 | } 42 | 43 | pub struct TokenTree(SyntaxNode); 44 | impl AstNode for TokenTree {} 45 | impl AstElement for TokenTree { 46 | fn can_cast(kind: SyntaxKind) -> bool { kind == SyntaxKind::TokenTree } 47 | 48 | fn cast(elem: SyntaxElement) -> Option { 49 | let node = elem.into_node()?; 50 | Self::can_cast(node.kind()).then(|| Self(node)) 51 | } 52 | 53 | fn span(&self) -> FileSpan { 54 | let range = self.0.text_range(); 55 | FileSpan { 56 | start: range.start().into(), 57 | end: range.end().into(), 58 | relative: (), 59 | } 60 | } 61 | 62 | fn inner(self) -> SyntaxElement { self.0.into() } 63 | } 64 | 65 | impl TokenTree { 66 | pub fn l_paren(&self) -> Option { children(&self.0).nth(0) } 67 | 68 | pub fn tokens(&self) -> impl Iterator + '_ { 69 | self.0 70 | .children_with_tokens() 71 | .filter_map(|x| match x { 72 | SyntaxElementRef::Token(token) if token.kind() != SyntaxKind::Whitespace => Some(token.clone()), 73 | _ => None, 74 | }) 75 | .skip(1) 76 | .take_while(|x| x.kind() != SyntaxKind::RParen) 77 | } 78 | 79 | pub fn r_paren(&self) -> Option { children(&self.0).nth(0) } 80 | } 81 | 82 | pub trait OptionNameExt { 83 | fn text(&self) -> Option; 84 | } 85 | 86 | impl OptionNameExt for Option { 87 | fn text(&self) -> Option { self.as_ref().and_then(|x| x.text()) } 88 | } 89 | 90 | impl OptionNameExt for Option { 91 | fn text(&self) -> Option { 92 | self.as_ref() 93 | .and_then(|x| match x { 94 | PathSegment::Name(name) => Some(name), 95 | PathSegment::Dot(_) => None, 96 | }) 97 | .and_then(|x| x.ident()) 98 | .map(|x| x.text()) 99 | } 100 | } 101 | 102 | impl Name { 103 | pub fn text(&self) -> Option { self.ident().map(|x| x.text()) } 104 | } 105 | 106 | impl PathSegment { 107 | pub fn name(&self) -> Option { 108 | match self { 109 | PathSegment::Name(name) => Some(name.clone()), 110 | PathSegment::Dot(_) => None, 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /compiler/tycheck/src/decl.rs: -------------------------------------------------------------------------------- 1 | use arena::{dense::DenseMap, Arena}; 2 | use hir::{ 3 | ident::{AbsPath, DebugAbsPath}, 4 | lang_item::LangItemMap, 5 | LangItem, 6 | }; 7 | use rustc_hash::FxHashMap; 8 | use tracing::{span, Level}; 9 | use verde::{query, Ctx, Id}; 10 | 11 | use crate::reader::HirReader; 12 | 13 | #[query] 14 | pub fn type_decl( 15 | ctx: &Ctx, item: Id, lang_items: Id, 16 | #[ignore] items: &FxHashMap, Id>, 17 | ) -> thir::ItemDecl { 18 | let item = ctx.get(item); 19 | 20 | let s = span!(Level::DEBUG, "type decl", path=?item.path.debug(ctx)); 21 | let _e = s.enter(); 22 | 23 | let lang_items = ctx.get(lang_items); 24 | let reader = HirReader::new(&item, &lang_items, items); 25 | 26 | let kind = match item.kind { 27 | hir::ItemKind::Struct(ref s) => thir::ItemDeclKind::Struct(struct_(ctx, &reader, s)), 28 | hir::ItemKind::Enum(ref e) => thir::ItemDeclKind::Enum(enum_(ctx, &reader, e)), 29 | hir::ItemKind::Fn(ref f) => thir::ItemDeclKind::Fn(fn_(ctx, &reader, f)), 30 | hir::ItemKind::TypeAlias(ref t) => thir::ItemDeclKind::TypeAlias(type_alias(ctx, &reader, t)), 31 | hir::ItemKind::Static(ref s) => thir::ItemDeclKind::Static(static_(ctx, &reader, s)), 32 | }; 33 | 34 | thir::ItemDecl { path: item.path, kind } 35 | } 36 | 37 | fn struct_(ctx: &Ctx, reader: &HirReader, s: &hir::Struct) -> thir::StructDecl { 38 | thir::StructDecl { 39 | fields: params(ctx, reader, &s.fields), 40 | ty: ctx.add(thir::Type::Struct(reader.path)), 41 | } 42 | } 43 | 44 | fn enum_(ctx: &Ctx, reader: &HirReader, e: &hir::Enum) -> thir::EnumDecl { 45 | let var = e.variants.len(); 46 | let variants = if var == 0 { 1 } else { var.ilog2() + 1 }; 47 | let bits = ((variants + 7) / 8) * 8; 48 | thir::EnumDecl { 49 | repr: match bits { 50 | 8 => LangItem::U8, 51 | 16 => LangItem::U16, 52 | 32 => LangItem::U32, 53 | 64 => LangItem::U64, 54 | _ => unreachable!(), 55 | }, 56 | ty: ctx.add(thir::Type::Enum(reader.path)), 57 | } 58 | } 59 | 60 | fn fn_(ctx: &Ctx, reader: &HirReader, f: &hir::Fn) -> thir::FnDecl { 61 | let ret = f 62 | .ret 63 | .map(|ty| reader.req_type(ctx, ty)) 64 | .unwrap_or_else(|| ctx.add(thir::Type::Void)); 65 | let params = params(ctx, reader, &f.params); 66 | let ty = ctx.add(thir::Type::Fn(thir::FnType { 67 | abi: f.abi.as_ref().map(|x| x.abi.as_ref().map(|x| x.abi).unwrap_or("C")), 68 | params: params.iter().map(|(_, &x)| x).collect(), 69 | ret, 70 | })); 71 | thir::FnDecl { params, ret, ty } 72 | } 73 | 74 | fn type_alias(ctx: &Ctx, reader: &HirReader, t: &hir::TypeAlias) -> thir::TypeAliasDecl { 75 | thir::TypeAliasDecl { 76 | ty: reader.req_type(ctx, t.ty), 77 | } 78 | } 79 | 80 | fn static_(ctx: &Ctx, reader: &HirReader, s: &hir::Static) -> thir::StaticDecl { 81 | thir::StaticDecl { 82 | ty: reader.req_type(ctx, s.ty), 83 | } 84 | } 85 | 86 | fn params(ctx: &Ctx, reader: &HirReader, params: &Arena) -> DenseMap> { 87 | let mut map = DenseMap::with_capacity(params.len()); 88 | for (id, param) in params.ids_iter() { 89 | map.insert(id, reader.req_type(ctx, param.ty)); 90 | } 91 | map 92 | } 93 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/storage/interned.rs: -------------------------------------------------------------------------------- 1 | use std::{borrow::Borrow, hash::Hash}; 2 | 3 | use parking_lot::{MappedRwLockReadGuard, Mutex, RwLock, RwLockReadGuard}; 4 | use rustc_hash::FxHashMap; 5 | 6 | use crate::{event, internal::Interned}; 7 | 8 | pub trait ErasedInternedStorage {} 9 | 10 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 11 | pub struct InternedStorage { 12 | /// Mutex is required for TOCTOU safety. 13 | map: Mutex>, 14 | values: RwLock>, 15 | } 16 | 17 | pub type Get<'a, T> = MappedRwLockReadGuard<'a, T>; 18 | 19 | impl ErasedInternedStorage for InternedStorage {} 20 | 21 | impl InternedStorage { 22 | /// Insert a new value into the storage. 23 | pub fn insert(&self, value: T) -> u32 { 24 | let mut map = self.map.lock(); 25 | match map.get(&value) { 26 | Some(index) => *index, 27 | None => { 28 | event!(trace, "inserting new value"); 29 | let mut values = self.values.write(); 30 | let index = values.len() as u32; 31 | values.push(value.clone()); 32 | map.insert(value, index); 33 | index 34 | }, 35 | } 36 | } 37 | 38 | pub fn insert_ref(&self, value: &U) -> u32 39 | where 40 | U: ToOwned + Hash + Eq + ?Sized, 41 | T: Borrow, 42 | { 43 | let mut map = self.map.lock(); 44 | match map.get(value) { 45 | Some(index) => *index, 46 | None => { 47 | event!(trace, "inserting new value"); 48 | let mut values = self.values.write(); 49 | let index = values.len() as u32; 50 | values.push(value.to_owned()); 51 | map.insert(value.to_owned(), index); 52 | index 53 | }, 54 | } 55 | } 56 | 57 | pub fn get(&self, index: u32) -> Get<'_, T> { 58 | RwLockReadGuard::map(self.values.read_recursive(), |x| &x[index as usize]) 59 | } 60 | } 61 | 62 | impl Default for InternedStorage { 63 | fn default() -> Self { 64 | Self { 65 | map: Mutex::default(), 66 | values: RwLock::new(Vec::new()), 67 | } 68 | } 69 | } 70 | 71 | impl<'a> dyn ErasedInternedStorage + 'a { 72 | /// **Safety**: The type of `self` must be `InternedStorage`. 73 | pub unsafe fn insert(&self, value: T) -> u32 { 74 | unsafe { 75 | let storage = self as *const dyn ErasedInternedStorage as *const InternedStorage; 76 | (*storage).insert(value) 77 | } 78 | } 79 | 80 | /// **Safety**: The type of `self` must be `InternedStorage`. 81 | pub unsafe fn get(&self, index: u32) -> Get<'_, T> { 82 | unsafe { 83 | let storage = self as *const dyn ErasedInternedStorage as *const InternedStorage; 84 | (*storage).get(index) 85 | } 86 | } 87 | 88 | /// **Safety**: The type of `self` must be `InternedStorage`. 89 | pub unsafe fn insert_ref(&self, value: &U) -> u32 90 | where 91 | U: ToOwned + Hash + Eq + ?Sized, 92 | T: Borrow + Interned, 93 | { 94 | unsafe { 95 | let storage = self as *const dyn ErasedInternedStorage as *const InternedStorage; 96 | (*storage).insert_ref(value) 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /compiler/verde/derive/src/tracked.rs: -------------------------------------------------------------------------------- 1 | use std::hash::{Hash, Hasher}; 2 | 3 | use proc_macro2::TokenStream; 4 | use quote::{quote, ToTokens}; 5 | use syn::{ 6 | spanned::Spanned, 7 | token::{Enum, Union}, 8 | Data, 9 | DataEnum, 10 | DataStruct, 11 | DataUnion, 12 | DeriveInput, 13 | Ident, 14 | Meta, 15 | Type, 16 | }; 17 | 18 | use crate::{Error, Result}; 19 | 20 | pub(crate) fn tracked(input: DeriveInput) -> Result { 21 | match input.data { 22 | Data::Struct(s) => { 23 | let (id_field, id_ty) = generate(&s)?; 24 | let (generics, ty_generics, where_clause) = input.generics.split_for_impl(); 25 | let ty = input.ident; 26 | 27 | Ok(quote! { 28 | impl #generics ::verde::Tracked for #ty #ty_generics #where_clause { 29 | type Id = #id_ty; 30 | 31 | fn id(&self) -> &Self::Id { 32 | &self.#id_field 33 | } 34 | } 35 | 36 | impl #generics ::verde::internal::Storable for #ty #ty_generics #where_clause { 37 | type Storage = ::verde::internal::storage::TrackedStorage; 38 | 39 | fn tracked_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedTrackedStorage> { 40 | Some(store) 41 | } 42 | 43 | fn query_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedQueryStorage> { 44 | None 45 | } 46 | 47 | fn pushable_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedPushableStorage> { 48 | None 49 | } 50 | 51 | fn interned_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedInternedStorage> { 52 | None 53 | } 54 | } 55 | }) 56 | }, 57 | Data::Enum(DataEnum { 58 | enum_token: Enum { span }, 59 | .. 60 | }) 61 | | Data::Union(DataUnion { 62 | union_token: Union { span }, 63 | .. 64 | }) => Err(Error::new(span, "Tracked cannot be derived for enums or unions")), 65 | } 66 | } 67 | 68 | enum Field { 69 | Ident(Ident), 70 | Tuple(usize), 71 | } 72 | 73 | impl Hash for Field { 74 | fn hash(&self, state: &mut H) { 75 | match self { 76 | Field::Ident(i) => i.hash(state), 77 | Field::Tuple(i) => i.hash(state), 78 | } 79 | } 80 | } 81 | 82 | impl ToTokens for Field { 83 | fn to_tokens(&self, tokens: &mut TokenStream) { 84 | match self { 85 | Field::Ident(i) => i.to_tokens(tokens), 86 | Field::Tuple(i) => i.to_tokens(tokens), 87 | } 88 | } 89 | } 90 | 91 | fn generate(input: &DataStruct) -> Result<(Field, Type)> { 92 | let mut id = None; 93 | 94 | for (i, f) in input.fields.iter().enumerate() { 95 | if f.attrs 96 | .iter() 97 | .any(|x| matches!(&x.meta, Meta::Path(p) if p.is_ident("id"))) 98 | { 99 | if id.is_some() { 100 | return Err(Error::new(f.span(), "Only a single field can be marked with `#[id]`")); 101 | } 102 | id = Some(( 103 | f.ident.clone().map(Field::Ident).unwrap_or(Field::Tuple(i)), 104 | f.ty.clone(), 105 | )); 106 | } 107 | } 108 | 109 | id.ok_or_else(|| Error::new(input.struct_token.span(), "There must be a field marked with `#[id]`")) 110 | } 111 | -------------------------------------------------------------------------------- /compiler/hir/src/ast.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt::Debug, hash::Hash, marker::PhantomData}; 2 | 3 | use diagnostics::{FilePath, FullSpan, Span}; 4 | use rustc_hash::FxHashMap; 5 | use syntax::{ast::Item, AstElement, SyntaxElement, TextRange}; 6 | use verde::Id; 7 | 8 | use crate::ident::AbsPath; 9 | 10 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] 11 | pub struct ErasedAstId { 12 | pub item: Id, 13 | pub index: u32, 14 | } 15 | impl Span for ErasedAstId { 16 | type Ctx = AstMap; 17 | 18 | fn to_raw(self, ctx: &Self::Ctx) -> FullSpan { 19 | let item = ctx.items.get(&self.item).unwrap(); 20 | let span = match item.sub[self.index as usize] { 21 | ItemElement::Concrete(ref n) => n.text_range(), 22 | ItemElement::Error => { 23 | let start = ErasedAstId { 24 | item: self.item, 25 | index: self.index - 1, 26 | } 27 | .to_raw(ctx); 28 | let end = ErasedAstId { 29 | item: self.item, 30 | index: self.index + 1, 31 | } 32 | .to_raw(ctx); 33 | let start = if start.end + 1 == end.start { 34 | end.start 35 | } else { 36 | start.end + 1 37 | }; 38 | TextRange::new(start.into(), end.start.into()) 39 | }, 40 | }; 41 | FullSpan { 42 | start: span.start().into(), 43 | end: span.end().into(), 44 | relative: item.file, 45 | } 46 | } 47 | } 48 | 49 | pub struct AstId(pub ErasedAstId, pub PhantomData T>); 50 | impl Clone for AstId { 51 | fn clone(&self) -> Self { *self } 52 | } 53 | impl Copy for AstId {} 54 | impl PartialEq for AstId { 55 | fn eq(&self, other: &Self) -> bool { self.0 == other.0 } 56 | } 57 | impl Eq for AstId {} 58 | impl Hash for AstId { 59 | fn hash(&self, state: &mut H) { self.0.hash(state) } 60 | } 61 | impl From> for ErasedAstId { 62 | fn from(id: AstId) -> Self { id.0 } 63 | } 64 | impl Debug for AstId { 65 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 66 | write!( 67 | f, 68 | "AstId<{}>({:?}, {})", 69 | std::any::type_name::(), 70 | self.0.item, 71 | self.0.index 72 | ) 73 | } 74 | } 75 | impl AstId { 76 | pub fn erased(self) -> ErasedAstId { self.0 } 77 | } 78 | 79 | #[derive(Debug, Clone)] 80 | pub enum ItemElement { 81 | Concrete(SyntaxElement), 82 | Error, 83 | } 84 | 85 | #[derive(Debug, Clone)] 86 | pub struct ItemData { 87 | pub item: Item, 88 | pub file: FilePath, 89 | pub path: Id, 90 | pub sub: Vec, 91 | } 92 | 93 | #[derive(Debug, Default)] 94 | pub struct AstMap { 95 | items: FxHashMap, ItemData>, 96 | } 97 | 98 | impl AstMap { 99 | pub fn new(items: impl IntoIterator) -> Self { 100 | Self { 101 | items: items.into_iter().map(|x| (x.path, x)).collect(), 102 | } 103 | } 104 | 105 | pub fn get(&self, id: AstId) -> Option { 106 | let item = self.items.get(&id.0.item).unwrap(); 107 | let node = item.sub[id.0.index as usize].clone(); 108 | match node { 109 | ItemElement::Concrete(n) => Some(T::cast(n).expect("invalid AstId")), 110 | ItemElement::Error => None, 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /compiler/hir/src/lang_item.rs: -------------------------------------------------------------------------------- 1 | use diagnostics::Span; 2 | use rustc_hash::FxHashMap; 3 | use syntax::ast; 4 | use tracing::{span, Level}; 5 | use verde::{query, Ctx, Id, Tracked}; 6 | 7 | use crate::{ast::AstId, ident::AbsPath, AttrKind, Item, ItemKind, TypeKind}; 8 | 9 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] 10 | pub enum LangItem { 11 | // Primitives. 12 | U8, 13 | U16, 14 | U32, 15 | U64, 16 | U128, 17 | I8, 18 | I16, 19 | I32, 20 | I64, 21 | I128, 22 | Bool, 23 | Char, 24 | F32, 25 | F64, 26 | } 27 | 28 | const COUNT: usize = LangItem::F64 as usize + 1; 29 | 30 | #[derive(Tracked, Default, Eq, PartialEq)] 31 | pub struct LangItemMap { 32 | #[id] 33 | id: (), 34 | items: [Option>; COUNT], 35 | reverse: FxHashMap, LangItem>, 36 | } 37 | 38 | impl LangItemMap { 39 | pub fn get(&self, item: LangItem) -> Option> { self.items[item as usize] } 40 | 41 | pub fn insert(&mut self, item: LangItem, path: Id) -> Option> { 42 | let prev = self.items[item as usize]; 43 | self.items[item as usize] = Some(path); 44 | self.reverse.insert(path, item); 45 | prev 46 | } 47 | 48 | pub fn get_lang_item_of(&self, path: Id) -> Option { self.reverse.get(&path).copied() } 49 | } 50 | 51 | #[query] 52 | pub fn build_lang_item_map(ctx: &Ctx, #[ignore] items: &FxHashMap, Id>) -> LangItemMap { 53 | let s = span!(Level::DEBUG, "build lang item map"); 54 | let _e = s.enter(); 55 | 56 | let mut map = LangItemMap::default(); 57 | for (&id, &item) in items.iter() { 58 | let mut prev: Option> = None; 59 | let item = ctx.get(item); 60 | for attr in item.attrs.iter() { 61 | if let Some(prev) = prev { 62 | let prev = prev.erased(); 63 | ctx.push( 64 | prev.error("cannot have multiple lang items attributes on one item") 65 | .label(prev.label("previous attribute")) 66 | .label(attr.id.erased().label("other lang item attribute")), 67 | ); 68 | } 69 | 70 | let AttrKind::LangItem(l) = attr.kind; 71 | map.insert(l, id); 72 | prev = Some(attr.id); 73 | 74 | verify_lang_item_shape(ctx, l, &item); 75 | } 76 | } 77 | map 78 | } 79 | 80 | fn verify_lang_item_shape(ctx: &Ctx, l: LangItem, item: &Item) { 81 | use LangItem::*; 82 | match l { 83 | U8 | U16 | U32 | U64 | U128 | I8 | I16 | I32 | I64 | I128 | Bool | Char | F32 | F64 => { 84 | let ItemKind::TypeAlias(alias) = &item.kind else { 85 | let span = item.name.id.erased(); 86 | ctx.push( 87 | span.error("primitive lang item must be a type alias to itself") 88 | .label(span.label(format!("found {:?}", item.kind))), 89 | ); 90 | return; 91 | }; 92 | let ty = &item.types[alias.ty]; 93 | let TypeKind::Alias(path) = &ty.kind else { 94 | let span = ty.id.erased(); 95 | ctx.push( 96 | span.error("primitive lang item must be a type alias to itself") 97 | .label(span.label(format!("found {:?}", ty.kind))), 98 | ); 99 | return; 100 | }; 101 | if *path != item.path { 102 | let span = ty.id.erased(); 103 | let alias = match *ctx.geti(*path) { 104 | AbsPath::Name { name, .. } => name, 105 | _ => unreachable!("found type alias to package?"), 106 | }; 107 | ctx.push( 108 | span.error("primitive lang item must be a type alias to itself") 109 | .label(span.label(format!("aliases `{}`", alias.as_str()))), 110 | ); 111 | } 112 | }, 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /compiler/diagnostics/src/span.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fmt::{Debug, Display}, 3 | hash::{Hash, Hasher}, 4 | ops::{Add, Index}, 5 | path::Path, 6 | }; 7 | 8 | use text::Text; 9 | 10 | use crate::{DiagKind, Diagnostic, Label}; 11 | 12 | /// A span of source code. 13 | pub trait Span: Sized { 14 | /// A context for resolving the span to a full span. 15 | type Ctx; 16 | 17 | /// Resolve the span. 18 | fn to_raw(self, ctx: &Self::Ctx) -> FullSpan; 19 | 20 | /// Create an error diagnostic pointing at this span. 21 | fn error(self, message: impl ToString) -> Diagnostic { Diagnostic::new(DiagKind::Error, message, self) } 22 | 23 | /// Create a warning diagnostic pointing at this span. 24 | fn warning(self, message: impl ToString) -> Diagnostic { Diagnostic::new(DiagKind::Warning, message, self) } 25 | 26 | /// Create an advice diagnostic pointing at this span. 27 | fn advice(self, message: impl ToString) -> Diagnostic { Diagnostic::new(DiagKind::Advice, message, self) } 28 | 29 | /// Create a label pointing at this span. 30 | fn label(self, message: impl ToString) -> Label { Label::new(self, message) } 31 | 32 | /// Create a label pointing at this span with no message. 33 | fn mark(self) -> Label { Label::no_message(self) } 34 | } 35 | 36 | /// The path of a file. 37 | #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] 38 | pub struct FilePath(Text); 39 | 40 | impl Display for FilePath { 41 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.0.fmt(f) } 42 | } 43 | 44 | impl FilePath { 45 | pub fn new(name: &str) -> Self { Self(Text::new(name)) } 46 | 47 | pub fn path(&self) -> &'static Path { Path::new(self.0.as_str()) } 48 | } 49 | 50 | /// A span of source code. 51 | #[derive(Debug, Default, Copy, Clone)] 52 | pub struct RawSpan { 53 | pub start: u32, 54 | pub end: u32, 55 | pub relative: F, 56 | } 57 | 58 | impl PartialEq for RawSpan { 59 | fn eq(&self, _: &Self) -> bool { true } 60 | } 61 | 62 | impl Eq for RawSpan {} 63 | 64 | impl Hash for RawSpan { 65 | fn hash(&self, _: &mut H) {} 66 | } 67 | 68 | impl Add for RawSpan { 69 | type Output = Self; 70 | 71 | fn add(self, other: Self) -> Self { 72 | debug_assert_eq!(self.relative, other.relative, "Cannot merge unrelated spans"); 73 | 74 | RawSpan { 75 | start: self.start.min(other.start), 76 | end: self.end.max(other.end), 77 | relative: self.relative, 78 | } 79 | } 80 | } 81 | 82 | impl Index> for str { 83 | type Output = str; 84 | 85 | fn index(&self, span: RawSpan) -> &Self::Output { &self[span.start as usize..span.end as usize] } 86 | } 87 | 88 | impl ariadne::Span for RawSpan { 89 | type SourceId = F; 90 | 91 | fn source(&self) -> &Self::SourceId { &self.relative } 92 | 93 | fn start(&self) -> usize { self.start as _ } 94 | 95 | fn end(&self) -> usize { self.end as _ } 96 | } 97 | 98 | /// A span of source code. 99 | pub type FullSpan = RawSpan; 100 | /// A span of source that is local to a file, but we don't know which one. 101 | pub type FileSpan = RawSpan<()>; 102 | 103 | impl Span for FullSpan { 104 | type Ctx = (); 105 | 106 | fn to_raw(self, _: &Self::Ctx) -> FullSpan { self } 107 | } 108 | 109 | impl Span for FileSpan { 110 | type Ctx = FilePath; 111 | 112 | fn to_raw(self, ctx: &Self::Ctx) -> FullSpan { 113 | FullSpan { 114 | start: self.start, 115 | end: self.end, 116 | relative: *ctx, 117 | } 118 | } 119 | } 120 | 121 | impl FileSpan { 122 | pub fn with(self, span: T) -> RawSpan { 123 | RawSpan { 124 | start: self.start, 125 | end: self.end, 126 | relative: span, 127 | } 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /compiler/hir-lower/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(drain_keep_rest)] 2 | #![feature(let_chains)] 3 | #![feature(try_blocks)] 4 | 5 | use std::path::Path; 6 | 7 | use diagnostics::{Diagnostic, FilePath, FullDiagnostic}; 8 | use hir::ident::{AbsPath, PackageId}; 9 | use rustc_hash::FxHashMap; 10 | use syntax::ast; 11 | use text::Text; 12 | use verde::{storage, Ctx, Db, Id, Tracked}; 13 | 14 | use crate::index::ErasedTempId; 15 | 16 | pub mod index; 17 | pub mod lower; 18 | pub mod prelude; 19 | mod resolve; 20 | 21 | pub type TempDiagnostic = Diagnostic; 22 | 23 | #[storage] 24 | pub struct Storage( 25 | FullDiagnostic, 26 | TempDiagnostic, 27 | Module, 28 | Packages, 29 | VisiblePackages, 30 | prelude::PackagePrelude, 31 | prelude::Prelude, 32 | prelude::get_prelude, 33 | prelude::get_prelude_from_package, 34 | index::Index, 35 | index::PublicIndex, 36 | index::ModuleTree, 37 | index::PackageTree, 38 | index::generate_index, 39 | index::build_package_tree, 40 | lower::LoweredModule, 41 | lower::lower_to_hir, 42 | ); 43 | 44 | // Note on deriving `Eq` and `PartialEq`: 45 | // This does a pointer comparison, which is surprisingly what we want. 46 | // On every reparse, this pointer will change and we want to invalidate the index as well the lowered HIR for 47 | // this module. However, if there wasn't a reparse, we want to keep the old index and HIR if possible - and the 48 | // pointer wouldn't have changed. 49 | #[derive(Tracked, Eq, PartialEq)] 50 | pub struct Module { 51 | #[id] 52 | pub path: Id, 53 | pub file: FilePath, 54 | pub ast: ast::File, 55 | } 56 | 57 | impl Module { 58 | pub fn new(ast: ast::File, file: FilePath, path: Id) -> Self { Self { path, file, ast } } 59 | 60 | /// Figure out the module's path from its relative file path from a root file. 61 | pub fn from_file(db: &dyn Db, root: FilePath, ast: ast::File, file: FilePath, package: PackageId) -> Self { 62 | if root == file { 63 | return Self::new(ast, file, db.add(package.into())); 64 | } 65 | 66 | let root = root.path().parent().expect("root must be a file"); 67 | // Ensure path is something like `src/x/x.yam`. 68 | let p = file.path(); 69 | assert_eq!(p.extension(), Some("yam".as_ref()), "path is not a .yam file"); 70 | let relative = p.strip_prefix(root).expect("path is not a child of the root"); 71 | // `relative` is now `x/x.yam`. 72 | 73 | let mut last_name = String::new(); 74 | let mut prec = db.add(package.into()); 75 | for component in relative.components() { 76 | let path: &Path = component.as_ref(); // `x` or `x.yam`. 77 | let path = path.to_str().expect("path is not valid UTF-8"); 78 | 79 | let file_name = path.strip_suffix(".yam"); 80 | if file_name != Some(last_name.as_str()) { 81 | // Create a child module if we aren't `x/x.yam`. 82 | let path = file_name.unwrap_or(path); 83 | last_name = path.to_string(); 84 | let name = Text::new(path); 85 | 86 | prec = db.add(AbsPath::Name { prec, name }); 87 | } 88 | } 89 | 90 | Self::new(ast, file, prec) 91 | } 92 | } 93 | 94 | /// The packages visible to a package. 95 | #[derive(Tracked, Eq, PartialEq)] 96 | pub struct VisiblePackages { 97 | #[id] 98 | pub package: PackageId, 99 | pub packages: FxHashMap, 100 | } 101 | 102 | #[derive(Tracked, Eq, PartialEq)] 103 | pub struct Packages { 104 | #[id] 105 | pub id: (), 106 | pub packages: FxHashMap>, 107 | } 108 | 109 | fn is_child_of(ctx: &Ctx, parent: Id, mut child: Id) -> bool { 110 | loop { 111 | if parent == child { 112 | return true; 113 | } 114 | 115 | child = match *ctx.geti(child) { 116 | AbsPath::Package(_) => return false, 117 | AbsPath::Name { prec, .. } => prec, 118 | }; 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /compiler/lex/src/tests.rs: -------------------------------------------------------------------------------- 1 | use crate::{Lexer, T}; 2 | 3 | #[test] 4 | fn random_input_is_lossless() { 5 | let random_unicode = r#" 6 | 㴍ꜟ⍽䋎攣搞ν溒ꁁ쾄眀뫢�ꈒ땥楚팡硝䫂喹怟Ѭ韗戵ꥩ㍚八춝㼊䊅桦滗ʧ휕䗒⃦呢❗᱁衽筕㾀홶䇁뛤꺽㡺閖⟃歆鴐숧䜠홁흷ꇪ㓿�⊚쥒䌬ⓠ⧦ĉ疞㋢⠇�剨�籎龟쀎☭샭霐䨛�罥㟨㬳ꀑ쏇揭歙愿�眫玏㯰햱选Ṋ⮡⯛�谐䁦슃Ꞿ㨖겛ᶑᇘ嫬罜횇䭣坚⎖䄆毋쿶ꣃ�趓ሦ㠥짛耯赀�氃ꗡៀ嵄೻ȃ핧꺏㍰憶쮴뽁�갼餟蟥⥌Ǽ䫃㕁㛥쟀窃ꥹ祐燧ﯞ㑸쉶Ზ膾狹笪궸閈轤҄뜜櫀ᓏ⒲㘘砊狥䝾㐱㿍긿᪰鰼žݵ縞졲�꒿琗꟭ᒹ殉骷ꔖ鷐䟘㾗ꗼ깣컜㉤ꗩ鼡稁蚘랤ᇒ띵ꁔఠ럤볻圚箧탬侑탌敮嫋쇩⺩祉满洜핟묕頳᱈䶐&蚃姑鏥�풦菁觇�쪜缲鞣熐㮵⵬ɋ䗍鋿ꐖ껫깡醣괌嚮퀪㵠褶▉龠召ⵊ蟒巾⒓慨爔揼㒪裵㝳閅㲌ƪꂓ⏥⿳㶱狑づ쵑㿶牅ꩈद쉆奴筛鼘徒�敱䱰ဵ꠬䆶⮱嬭돝郖퉕倂謞썟粯᝴䂳蹀㋫Ⲅ㘑茩悧᳥ḙ⏪Ṝ斐嘮铎⑉駃ᒨ悚衦ૂϤ뭖ᄉ쎺࿙$谺쓜௥붭ᒆ溯쉗ꬒᙧ勇危㥸鸠郕謹ᤷ섵ⵁ卿�툋鼾푼熹༦�㚚틨៖푟息疄ᥳ녴꟦鐈הּℂ屺䁚࠾엸臁⣎澺ᶖ�놎녮콱覍義�ꇿ䕸忐཭ю鐝퓪䉩㸾譃섖൞砉₇뀀鬒먚赥疹疓塭ꖯ↓ḃ昸쟆鐊༡᢯烢葿◟⬭ֈ쯚簮옞间䳰츊໷莉໾ᇧ杛屁簠億ᾣ㠖椏ߡٞ遰䷴ா긍᪉賽햾옻ﮍ㳏椓ᬘ쳇⤜믧麶ꞃ聾璓巕쯺膸以솔〡筯ѧ敡渴㛼ྜ�䃍吀竑愬嶞旻嫓䎱髺襔䯊땾ꇿ�㚒ퟣ涝胺⇄窆爾灀興寛合촐腩グ⑇鲦◚�ਜ਼婤婫粺ᶁ叄嗆哌쳺䘟瑡湰欬闋䂮噝媺跓黽ꩴ㤢䟡䯷늝⵸Ց酦妆쮹赓䤓௒횿꺐忩긦硶⤫峐輁禣✓᭝阒湵쁉㵪䡣⬳펍嬉筓嶴㮦⤪⧽擀㞀䒢⳿鼋⾫몥ૹ䭒硞帬㸪㫵孜딉Აࡓ鼒辧啺꫊绻砼芚탻뼳젥ޜນ㚗纪竵⨹꼀鴊⁠葖⮣尔બ밅駲൮ѷ⵱ˆ郗䛍憞띁鏉譻徱ꆧ䙈轭攓梗ᬇ淺쨵私穢嗯퀤꘭ᇠ慱ხ䠒镫咩ฑ풫㔈Əӷ㋅�馃뫦ड़頋஧῀获⚘捉銫两興컍׾ꔰ짖쐀赱⨻�㺍ꊍ襀曂䷣Ⲉሎ̀摩ኲ౗䎟ϵ诅耥鋚��䋝쪭窰랪稛蕄䦼둮ᷣۥ㨉噽䭂徆呎쩀꤅걓젯�ᗕ址ꂪ갨胆牉ᗫ�ꤌ鼾臖㉐੾⏏᜷穷걇鿰ᐾ�霌볁脈ꀈ뎡癁䵵ㄼ蝭⪁争伲鱾烁籓쭍旇꿖泀⶝諴陌镫楸瓇׬核눣髎侰ၒ铵霵铂浛춤䉶厱㛏骊뒐ᘾ┒嵔辀↗略䈋庑쮴喦⹹铟ູᬏ郺傾ꈋ᠏ⅸ屺涷骘桒孨砞✠뎘�벞鬫⣟紈�祈캅⥤槐㞹⺩穽⎱Ӂ絘貑꫒敮ꑅ럕븊눣竃ꏥ㱳э넀俘弐칄쀚䥼설ퟁ靹哠䵈�ࢡ衺髑۷ꇩ璅羙婾㣖䥬⫮㙚鵒嬳평冨倝㼀霭⬼閈扱趿鑆ㇼ哃ᔜ㡥懄犙꜂ね髋㔤戵Ƿ倨ꩫ嗲뫎祕㲱죩躿ᬜ✩쭾‌瑶ਔ럘ꖠ⛙�颕僈䊱㭮ݺ�嚳䰯붽岀끰퀥ᓅ屌࣢ⵖ䣬⊛射脫উꯘ뻮驢蝇굩얾ᑯꃡ偂⌫ᆝ㛦鑏ぃ蛓긨ꪡḔ떹ᨁθ푐 7 | "#; 8 | let lexer = Lexer::new(random_unicode); 9 | verify_span_is_continuous(lexer); 10 | 11 | let random_ascii = r#"7BFESwrsvpe0jgZVZTBtJIYDQzp5nmaVcnoQlpJcrbL6xRLji8RJNLtOh9UV1Qd6emYSvZ48N9cluPJWmNaGeXD8qjgN6wo5DTR9Tsi0gPK18q8fF0FRTqdEuzbGEOWt90TSisIB55KMC2y76kDR4zvGyS9du9u0lLvFkYhbXdqqq58zHzrQllbBFis08PtvvXc7vYkqswezI32VYavYiJOQmEOeNlQkV5p4pa2FQCBo33DermaQ7PR0vvuNJfzyCUxqkNwSh4TsBA6axq16IirsNaOwVIPF7xmlXdjnb2JKMVUmZwz6J6xijMPsMRC7RbztCBDGyCwFWOG4v2C2Y4Uj6UqcJ11jhnTdzolhbZpN7GxX7eXam0cHTtKp7n5YAwTGzSZ7boAkEmGyxX9rp42uZEGJf2923uSnquB39QSYgjenUdv3AtMVUka3F7VhOsFqU4EFlCQZKDjBLDxE4R3jRu76185OG52OBDMPuAwRSdDG5YXFcGCkW8j8wr5puE43h4tQiFl6N1JZPj9eLvQJH5LHVO2p3kI1h7Ve7lMP1SMlg9v2eWvEsUSmWSo0wfHDMEZd4gHZERczazRyax37S6quDvj4dUzniJVSxR5UXmeqMEoHRelIIjcE87AeIZNGpzMQlQ1UvNemIP0JlC7p1RwSbPuXKqxKVUx7f8BDczFpMG86j2psKiDVCwo4CoyoLf1K1gwtOFc4ESngfy7O0uPoLAGwjlIiGSV3WPMxhNWHSntfPWckCBwCEdSCdn7uwo5Pp4vqLQXI8I73PNQTM654Rg8uUjsFquc0ghc3IacORj4MvcdnXSMdfWkPH1XTMHKH09tq7RzaJTgPuI3uWIOFiwRvjVeC78CPHQuLV2XJWpGEQSOzXIReby0e7MjhL6ZKfmQbo1wKT2HIGq9O27F4AZ2G32VSVVpiqjgHSFzad2XrSnWhFUhVf7z5Y6wPGnjLodzGlMkEtPylU2sI7I7cq1QTYX2Jan40slhzFWPv8geNyyJdE6Fun0a8gRpHykXQ5vr8Wt9OUSaFRerYGYXvH4jBnoPLApZIEkdrw1OhEax2Z1yWhM6LrgpLtinSynA81jOU0Vu4uVhorFlwaNd39L71ZPTgV4wydsMK1kuW4XVOV90VwvAGX2M5baK7F4buLukkNuWZATStPVrb9COwxQrjlBocEznlNHxIIQ1g2LaGLpL3x0mN2NfuJb7D5pillik8LqrWv6MsSy1fgLdK4d5RNGEJAlYgQ5hLjGDUjXv5pmaEd4nqlRV7HBSeGTC5z2kDHrZWiZIQb2Ig2gmc43zHMPIX9PpMMPVL6ri8EXYrR4yjx43gNat7XWiZE9KZDs1vikFACgBEox986EXl5DNd4Vaqa90b58MmThj8C0Bbx9BVq0kw9RXdhpJBej5zCYaI3bZCLDjr2u0RvCdXQEAkbLTshuBgw5btT1KI54OgfneYzl0hFWHogtQeHtmmIB3jt55vymUeRlc7MBG9R5VjidEhyDQkJ3t51CZcbaGhBPGquDgpexUVSOgRwaKE4NfHFC6ccA6uZ96jVpucJ2VX6XCUEoIa9sGRHtSxMEjiECPtPeiGAmkV1M6knzT9LWyW5WOWtsy6pyyJNCHbh4rJRJ1E9J1FChGGaSSqqflsS5KsvCOJlspYWzDAHUXRWf5JIwpMjoY95sCghQxkQQgSt3eUR2OMsCUNf1E1GbmcbDYygw2UnlIL0SLfIiGDOL9QJ0BSYDZVbosNqUzU3MFuEHAWTbm1SnO55iVqiqP7RrVecQv8sRmN75gfnQvnrsetaXBKDHTyfpO4eyX4EGk3pvQ8dn8N7NZz2SZLFDMoUeI6Xh1q9FSouzbnzDweJVWvfOR2RpVa8UIv5628rjI0u0olUEHq6CQNGZnD4wnYWqiBjUqXsfkmyWYAYzjqHrbuJ5MHceDA4wiVhsldzKaQoEZDbXMzcGUI3iQK78Z25FA8HAEkvXPd51MXSe8GJrO8G1oZ"#; 12 | let lexer = Lexer::new(random_ascii); 13 | verify_span_is_continuous(lexer); 14 | 15 | let random_code = r#" 16 | struct S {} 17 | 18 | fn main() { 19 | while true { break continue; } 20 | 21 | let x = 1; 22 | let y = 2; 23 | let z = x +-&<< y; 24 | 25 | // comment 26 | 27 | a(); 28 | } 29 | "#; 30 | let lexer = Lexer::new(random_code); 31 | verify_span_is_continuous(lexer); 32 | } 33 | 34 | fn verify_span_is_continuous(mut lexer: Lexer) { 35 | let mut next = lexer.next(); 36 | let mut last_end = 0; 37 | 38 | while !matches!(next.kind, T![eof]) { 39 | let span = next.span; 40 | assert_eq!(last_end, span.start); 41 | last_end = span.end; 42 | next = lexer.next(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/storage/query.rs: -------------------------------------------------------------------------------- 1 | use parking_lot::{Mutex, RwLock}; 2 | use rustc_hash::FxHashSet; 3 | 4 | use crate::{ 5 | event, 6 | internal::{ 7 | storage::{routing::Route, DashMap, ErasedId}, 8 | Ctx, 9 | Query, 10 | }, 11 | span, 12 | Id, 13 | }; 14 | 15 | #[derive(Copy, Clone, Eq, PartialEq, Hash)] 16 | pub struct ErasedQueryId { 17 | pub(crate) index: u32, 18 | pub(crate) route: Route, 19 | } 20 | 21 | pub trait ErasedQueryStorage {} 22 | 23 | impl<'a> dyn ErasedQueryStorage + 'a { 24 | pub unsafe fn start_query(&self, input: T::Input) -> u32 { 25 | unsafe { 26 | let storage = self as *const dyn ErasedQueryStorage as *const QueryStorage; 27 | (*storage).start_query(input) 28 | } 29 | } 30 | 31 | /// **Safety**: The type of `self` must be `QueryStorage`. 32 | pub unsafe fn execute(&self, ctx: &Ctx, f: impl FnOnce() -> T::Output) -> Id { 33 | unsafe { 34 | let storage = self as *const dyn ErasedQueryStorage as *const QueryStorage; 35 | (*storage).execute(ctx, f) 36 | } 37 | } 38 | } 39 | 40 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 41 | pub struct QueryStorage { 42 | pub(crate) map: DashMap, 43 | pub(crate) values: RwLock>>>, 44 | } 45 | 46 | impl ErasedQueryStorage for QueryStorage {} 47 | 48 | impl QueryStorage { 49 | pub fn start_query(&self, input: T::Input) -> u32 { 50 | match self.map.get(&input) { 51 | Some(index) => *index, 52 | None => { 53 | let mut values = self.values.write(); 54 | let index = values.len() as u32; 55 | values.push(Mutex::new(QueryData { 56 | dependencies: Default::default(), 57 | output: None, 58 | })); 59 | self.map.insert(input, index); 60 | index 61 | }, 62 | } 63 | } 64 | 65 | pub fn execute(&self, ctx: &Ctx, f: impl FnOnce() -> T::Output) -> Id { 66 | let query = std::any::type_name::(); 67 | span!(enter trace, "fetch query", query); 68 | 69 | let index = ctx.curr_query.index; 70 | let values = self.values.read(); 71 | let data = values[index as usize].lock(); 72 | 73 | let f = || { 74 | let ret = { 75 | span!(enter trace, "execute query", query); 76 | f() 77 | }; 78 | let output = ctx.insert(ret); 79 | let dependencies = unsafe { ctx.dependencies.borrow_mut().assume_init_read() }; 80 | 81 | let values = self.values.read(); 82 | let mut data = values[index as usize].lock(); 83 | *data = QueryData { 84 | dependencies, 85 | output: Some(output), 86 | }; 87 | output 88 | }; 89 | 90 | match data.output { 91 | Some(id) => { 92 | event!(trace, "query already exists"); 93 | for &(dep, gen) in data.dependencies.iter() { 94 | let dep_generation = ctx.get_generation(dep); 95 | if dep_generation > gen { 96 | event!(debug, "dependencies have changed, re-executing"); 97 | 98 | // Drop so we don't deadlock in recursive queries. 99 | drop(data); 100 | drop(values); 101 | return f(); 102 | } 103 | } 104 | event!(trace, "query is up to date"); 105 | let _ = unsafe { ctx.dependencies.borrow_mut().assume_init_read() }; 106 | id 107 | }, 108 | None => { 109 | event!(trace, "first query execution"); 110 | 111 | // Drop so we don't deadlock in recursive queries. 112 | drop(data); 113 | drop(values); 114 | f() 115 | }, 116 | } 117 | } 118 | } 119 | 120 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 121 | pub(crate) struct QueryData { 122 | pub(crate) dependencies: FxHashSet<(ErasedId, u64)>, 123 | pub(crate) output: Option>, 124 | } 125 | 126 | impl Default for QueryStorage { 127 | fn default() -> Self { 128 | Self { 129 | map: DashMap::default(), 130 | values: Default::default(), 131 | } 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /compiler/verde/README.md: -------------------------------------------------------------------------------- 1 | # verde 2 | 3 | A refreshingly simple incremental computation engine. 4 | 5 | ### What is incremental computation? 6 | 7 | Incremental computation deals with a problem that seems rather simple: 8 | 9 | Given a pure function `f` and some input `x`, compute `f(x)`, and *memoize* the output. 10 | The next time, if `f` is executed with the same inputs, return the memoized output instead of recomputing it. 11 | However, if the input changes, recompute the value, and repeat. 12 | 13 | ### The terminology 14 | 15 | The central component of verde is a *database*. 16 | The database stores all the memoized data (*tracked* types), 17 | as well as track the state of each memoized function (a *query*). 18 | Furthermore, the database also allows for interning values such that they are deduplicated, 19 | and comparison can be done by a simple ID check (*interned* types). 20 | Finally, the database also allows for a query-safe 'side-channel' so that diagnostics can be collected without having to be a part of the output of a query (*pushable* types). 21 | 22 | ### Getting started 23 | 24 | First, we must define the structs that verde will track. This can be done as so: 25 | ```rust ignore 26 | #[derive(verde::Tracked, Eq, PartialEq)] 27 | struct MyTrackedStruct { 28 | #[id] 29 | id: UniqueIdentifier, 30 | value: u32, 31 | } 32 | ``` 33 | The field marked with `#[id]` is used as a unique identifier for each tracked type. 34 | It must be unique in every query (a query function must not produce a struct with the same ID when given different input, but different queries can). 35 | Note that the `Eq` trait is required for `Tracked` to be implemented, and the ID must implement `Clone`, `Eq`, and `Hash`. 36 | 37 | Next, we must define any pushable types. 38 | ```rust ignore 39 | #[derive(verde::Pushable)] 40 | struct MyPushableStruct { 41 | value: u32, 42 | } 43 | ``` 44 | No other traits are required for `Pushable` to be implemented. 45 | 46 | Interning is provided as a convenience feature: 47 | ```rust ignore 48 | #[derive(verde::Interned, Eq, PartialEq, Hash)] 49 | struct MyInternedStruct { 50 | value: u32, 51 | } 52 | ``` 53 | `Clone`, `Eq`, and `Hash` are required to be implemented. 54 | 55 | Finally, we must define the query functions. 56 | ```rust ignore 57 | #[verde::query] 58 | fn double(ctx: &verde::Ctx, input: verde::Id) -> MyTrackedStruct { 59 | let interned = ctx.add(MyInternedStruct { value: 5 }); 60 | assert_eq!(ctx.geti(interned).value, 5); 61 | ctx.push(MyPushableStruct { value: 5 }); 62 | let input = ctx.get(input); 63 | MyTrackedStruct { 64 | id: input.id, 65 | value: input.value * 2, 66 | } 67 | } 68 | ``` 69 | Queries are normal functions that must take a `&Ctx` as the first parameter. 70 | They can simply be called as normal functions, with verde handling all the incremental logical transparently. 71 | 72 | However, before we can call our queries, we must first create our database: 73 | ```rust ignore 74 | #[verde::storage] 75 | struct Storage(MyTrackedStruct, MyInternedStruct, MyPushableStruct, double); 76 | 77 | #[verde::db] 78 | struct Database(Storage); 79 | ``` 80 | The `storage` attribute is used to create a *storage struct* that holds types that the database will know about. 81 | Several storage structs composed together form a database. This multi-tiered approach allows for a modular design, 82 | where each crate can define a storage struct, with the driver crate only having to include them. 83 | 84 | Finally, we can run our queries: 85 | ```rust ignore 86 | fn main() { 87 | let mut db = Database::default(); 88 | let db = &mut db as &mut dyn verde::Db; 89 | let input = db.set(MyTrackedStruct { id: UniqueIdentifier::new(), value: 5 }); 90 | let output = db.execute(|ctx| double(ctx, input)); 91 | assert_eq!(db.get(output).value, 10); 92 | let mut pushables = db.get_all::(); 93 | assert_eq!(pushables.next().map(|x| x.value), Some(5)); 94 | assert_eq!(pushables.next(), None); 95 | } 96 | ``` 97 | -------------------------------------------------------------------------------- /compiler/verde/src/test.rs: -------------------------------------------------------------------------------- 1 | //! Utilities for testing with verde. 2 | //! 3 | //! This module provides a [`TestDatabase`] which can be used to test queries, without the boilerplate of creating the 4 | //! appropriate storage and database types 5 | 6 | use std::cell::RefCell; 7 | 8 | use rustc_hash::FxHashMap; 9 | 10 | use crate::{ 11 | internal::{ 12 | storage::{ 13 | ErasedInternedStorage, 14 | ErasedPushableStorage, 15 | ErasedQueryStorage, 16 | ErasedTrackedStorage, 17 | InternedStorage, 18 | PushableStorage, 19 | QueryStorage, 20 | RouteBuilder, 21 | RoutingTable, 22 | RoutingTableBuilder, 23 | TrackedStorage, 24 | }, 25 | Query, 26 | Storage, 27 | }, 28 | Db, 29 | Interned, 30 | Pushable, 31 | Tracked, 32 | }; 33 | 34 | #[doc(hidden)] 35 | pub enum StorageType { 36 | Tracked(Box), 37 | Query(Box), 38 | Pushable(Box), 39 | Interned(Box), 40 | } 41 | 42 | impl From> for StorageType { 43 | fn from(storage: TrackedStorage) -> Self { Self::Tracked(Box::new(storage)) } 44 | } 45 | 46 | impl From> for StorageType { 47 | fn from(storage: QueryStorage) -> Self { Self::Query(Box::new(storage)) } 48 | } 49 | 50 | impl From> for StorageType { 51 | fn from(storage: PushableStorage) -> Self { Self::Pushable(Box::new(storage)) } 52 | } 53 | 54 | impl From> for StorageType { 55 | fn from(storage: InternedStorage) -> Self { Self::Interned(Box::new(storage)) } 56 | } 57 | 58 | /// A database for easy testing. 59 | pub struct TestDatabase { 60 | table: RoutingTable, 61 | storage: RefCell>, 62 | } 63 | 64 | impl TestDatabase { 65 | pub fn new() -> Self { 66 | Self { 67 | table: RoutingTableBuilder::default().finish(), 68 | storage: RefCell::new(FxHashMap::default()), 69 | } 70 | } 71 | 72 | fn make(&self) { 73 | for make in self.table.make() { 74 | let (storage, index) = make(); 75 | self.storage.borrow_mut().insert(index, storage); 76 | } 77 | } 78 | } 79 | 80 | impl Db for TestDatabase { 81 | fn init_routing(_: &mut RoutingTableBuilder) 82 | where 83 | Self: Sized, 84 | { 85 | } 86 | 87 | fn routing_table(&self) -> &RoutingTable { &self.table } 88 | 89 | fn storage_struct(&self, storage: u16) -> &dyn Storage { 90 | assert_eq!(storage, 1); 91 | self 92 | } 93 | } 94 | 95 | impl Default for TestDatabase { 96 | fn default() -> Self { Self::new() } 97 | } 98 | 99 | impl Storage for TestDatabase { 100 | fn init_routing(_: &mut RouteBuilder) 101 | where 102 | Self: Sized, 103 | { 104 | } 105 | 106 | fn tracked_storage(&self, index: u16) -> Option<&dyn ErasedTrackedStorage> { 107 | // SAFETY: Uh I think it should be fine. 108 | self.make(); 109 | match self.storage.borrow().get(&index) { 110 | Some(StorageType::Tracked(storage)) => Some(unsafe { std::mem::transmute(storage.as_ref()) }), 111 | _ => None, 112 | } 113 | } 114 | 115 | fn query_storage(&self, index: u16) -> Option<&dyn ErasedQueryStorage> { 116 | self.make(); 117 | match self.storage.borrow().get(&index) { 118 | Some(StorageType::Query(storage)) => Some(unsafe { std::mem::transmute(storage.as_ref()) }), 119 | _ => None, 120 | } 121 | } 122 | 123 | fn pushable_storage(&self, index: u16) -> Option<&dyn ErasedPushableStorage> { 124 | self.make(); 125 | match self.storage.borrow().get(&index) { 126 | Some(StorageType::Pushable(storage)) => Some(unsafe { std::mem::transmute(storage.as_ref()) }), 127 | _ => None, 128 | } 129 | } 130 | 131 | fn interned_storage(&self, index: u16) -> Option<&dyn ErasedInternedStorage> { 132 | self.make(); 133 | match self.storage.borrow().get(&index) { 134 | Some(StorageType::Interned(storage)) => Some(unsafe { std::mem::transmute(storage.as_ref()) }), 135 | _ => None, 136 | } 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /docs/overview.md: -------------------------------------------------------------------------------- 1 | # Overview of the yam compiler infrastructure 2 | 3 | The yam compiler is built around the needs of both a batch compiler and a language server, allowing for fully-featured and up-to-date IDE support at all time, and also allowing the compiler to use the LSP's refactoring support for diagnostics and fix suggestions. 4 | 5 | ## The pipeline 6 | 7 | The compiler is heavily focused around incrementality, reducing the recomputation required on edit, keeping IDE responsiveness. 8 | 9 | ### Parsing 10 | 11 | Source code is parsed into a concrete syntax tree (CST), with the entire file being reparsed on every edit. This may seem wasteful, but parsing even huge files is fast enough that the added headache of maintaining an incremental parser is not worth it. 12 | 13 | ### Indexing 14 | 15 | To constrain change propogation through the incremental system, on every reparse, a public and private *index* is generated for each file. An index contains the symbols declared in the file for name resolution. 16 | 17 | The public index is used for name resolution in modules outside the current one, while the private index is used for name resolution in the current module and its submodules. This allows us to save on re-running name resolution for *every* file whenever a new symbol is added. 18 | 19 | Imports and reimports are left as-is in the indices because we only have local knowledge at the moment. 20 | 21 | ### Canonicalization 22 | 23 | After indexing, a module tree is generated, storing each index pair (public and private). 24 | 25 | This module tree is then used to canonicalize all indices and generate a new tree, where each index now knows the absolute paths of everything visible. 26 | 27 | Module trees are recursively tracked trees, which gives us an interesting benefit for free - since IDs are stable, and only IDs used are tracked, we only build a dependency on parts of the tree we search through. 28 | 29 | ### HIR 30 | 31 | Next, the canonicalized tree is used to generate HIR nodes for each item in the changed module. If visible symbols in the indices have changed, HIR is also generated for other modules affected by this change. 32 | 33 | HIR is a name-resolved and desugared tree-like IR. It is here that our IR transitions from being file-based to item based. Each item becomes it's own HIR node, and the workspace becomes a flat list of HIR nodes. This allows us to be incremental over each HIR node, instead of over each file. At the moment, we *do not* have more fine-grained incrementality than an HIR node. 34 | 35 | ### THIR 36 | Once HIR for each item has been generated, we type check in two stages: 37 | 1. Declaration - go through each item's 'signature' and generate relevant (interned) types. 38 | 2. Definition - go through each item's 'body' and infer types. 39 | 40 | #### Declaration 41 | Structs generate a type describing themselves, as well as the type of each field. 42 | Enums generate the integer lang item representation of themselves. 43 | Functions generate a type describing themselves, as well as the type of each parameter and the return type. 44 | Static generate their own type, and so do type aliases. 45 | 46 | #### Definition 47 | Using the type declarations of each item, function bodies and static initializers are type checked with constraint-based type inference. 48 | While going through the whole expression tree, constraints are generated between partially-resolved types, and then solved at the end to generate a fully-resolved type. 49 | 50 | Constraints are solved in a loop unless a whole pass goes through without any constraints being solved. 51 | Then, constraints are 'finalized', where they can default to a type (for example, integer literals default to `i32`). 52 | After that, the constraint solve loop is repeated until no more constraints are solved. 53 | After this loop, any unsolved constraints are reported as errors. 54 | 55 | ## Avoiding span invalidation 56 | 57 | If we were to store spans directly in HIR nodes, we would force a recomputation every time a span changes. 58 | 59 | Instead, we build a stable side-channel that stores CST nodes, and store these IDs in HIR. We also base diagnostics on these IDs so even the diagnostics generated are always valid. 60 | -------------------------------------------------------------------------------- /compiler/parse/src/api.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::VecDeque, ops::Range}; 2 | 3 | use lex::{token::Token, Lexer, T}; 4 | use syntax::{ 5 | builder::{Checkpoint, TreeBuilder, TreeBuilderContext}, 6 | SyntaxKind, 7 | }; 8 | 9 | pub struct Api<'c, 's> { 10 | lexer: Lexer<'s>, 11 | builder: TreeBuilder<'c>, 12 | lookahead: [Token; Api::MAX_LOOKAHEAD], 13 | trivia_ranges: [Range; Api::MAX_LOOKAHEAD], 14 | trivia_buf: VecDeque, 15 | } 16 | 17 | impl<'c, 's> Api<'c, 's> { 18 | pub fn new(source: &'s str, ctx: &'c mut TreeBuilderContext) -> Self { 19 | const EMPTY_RANGE: Range = 0..0; 20 | 21 | let mut builder = TreeBuilder::new(ctx); 22 | builder.start_node(SyntaxKind::File); 23 | let mut this = Api { 24 | builder, 25 | lexer: Lexer::new(source), 26 | lookahead: [Token::default(); Self::MAX_LOOKAHEAD], 27 | trivia_ranges: [EMPTY_RANGE; Self::MAX_LOOKAHEAD], 28 | trivia_buf: VecDeque::new(), 29 | }; 30 | 31 | this.fill_lookahead(); 32 | this 33 | } 34 | 35 | pub fn finish(mut self) -> TreeBuilder<'c> { 36 | for _ in 0..Self::MAX_LOOKAHEAD { 37 | self.output_trivia(); 38 | 39 | let (next, range) = self.next(); 40 | self.push_lookahead(next, range); 41 | } 42 | 43 | self.builder.finish_node(); 44 | self.builder 45 | } 46 | } 47 | 48 | impl Api<'_, '_> { 49 | const MAX_LOOKAHEAD: usize = 2; 50 | 51 | /// Skips trivia. 52 | pub fn bump(&mut self) { 53 | self.output_trivia(); 54 | 55 | let tok = self.peek(); 56 | if !matches!(tok.kind, T![eof]) { 57 | self::tok(tok, &self.lexer, &mut self.builder); 58 | 59 | let (next, range) = self.next(); 60 | self.push_lookahead(next, range); 61 | } 62 | } 63 | 64 | pub fn peek_n(&self, i: u8) -> Token { self.lookahead[i as usize] } 65 | 66 | pub fn peek(&self) -> Token { self.peek_n(0) } 67 | 68 | pub fn node_depth(&self) -> usize { self.builder.node_depth() } 69 | 70 | pub fn start_node(&mut self, kind: SyntaxKind) { 71 | self.output_trivia(); 72 | self.builder.start_node(kind); 73 | } 74 | 75 | pub fn finish_node(&mut self) { 76 | self.builder.finish_node(); 77 | self.output_trivia(); 78 | } 79 | 80 | pub fn finish_node_at(&mut self, node_depth: usize) { 81 | self.builder.finish_node_at(node_depth); 82 | self.output_trivia(); 83 | } 84 | 85 | pub fn checkpoint(&self) -> Checkpoint { self.builder.checkpoint() } 86 | 87 | pub fn start_node_at(&mut self, checkpoint: Checkpoint, kind: SyntaxKind) { 88 | self.builder.start_node_at(checkpoint, kind); 89 | } 90 | 91 | fn push_lookahead(&mut self, tok: Token, trivia_range: Range) { 92 | for i in 0..Self::MAX_LOOKAHEAD - 1 { 93 | self.lookahead[i] = self.lookahead[i + 1]; 94 | self.trivia_ranges[i] = self.trivia_ranges[i + 1].clone(); 95 | } 96 | self.lookahead[Self::MAX_LOOKAHEAD - 1] = tok; 97 | self.trivia_ranges[Self::MAX_LOOKAHEAD - 1] = trivia_range; 98 | } 99 | 100 | fn next(&mut self) -> (Token, Range) { 101 | let len = self.trivia_buf.len(); 102 | let mut added = 0; 103 | loop { 104 | let next = self.lexer.next(); 105 | if !matches!(next.kind, T![comment] | T![ws]) { 106 | return (next, len..len + added); 107 | } 108 | self.trivia_buf.push_back(next); 109 | added += 1; 110 | } 111 | } 112 | 113 | fn output_trivia(&mut self) { 114 | let range_removed = self.trivia_ranges[0].clone(); 115 | let count = range_removed.len(); 116 | for tok in self.trivia_buf.drain(range_removed) { 117 | self::tok(tok, &self.lexer, &mut self.builder); 118 | } 119 | 120 | let range = &mut self.trivia_ranges[0]; 121 | range.start = 0; 122 | range.end = 0; 123 | for trivia in self.trivia_ranges[1..].iter_mut() { 124 | *trivia = trivia.start - count..trivia.end - count; 125 | } 126 | } 127 | 128 | fn fill_lookahead(&mut self) { 129 | for _ in 0..Self::MAX_LOOKAHEAD { 130 | let (tok, range) = self.next(); 131 | self.push_lookahead(tok, range); 132 | } 133 | } 134 | } 135 | 136 | fn tok(tok: Token, lexer: &Lexer, builder: &mut TreeBuilder) { 137 | let kind = SyntaxKind::from(tok.kind); 138 | let text = &lexer.source()[tok.span.start as usize..tok.span.end as usize]; 139 | builder.token(kind, text); 140 | } 141 | -------------------------------------------------------------------------------- /compiler/syntax/yam.ungram: -------------------------------------------------------------------------------- 1 | //**************** 2 | // Names and Paths 3 | //**************** 4 | 5 | Path = 6 | PathSegment* 7 | 8 | PathSegment = 9 | '.' 10 | | Name 11 | 12 | Name = 13 | 'ident' 14 | 15 | //******* 16 | // Items 17 | //******* 18 | 19 | File = 20 | Item* 21 | 22 | TokenTree = 23 | ';' // Ignore, manually implemented 24 | 25 | Attribute = 26 | '@' Name TokenTree? 27 | 28 | Item = 29 | (Attribute)* Visibility? ItemKind 30 | 31 | ItemKind = 32 | Fn 33 | | Struct 34 | | Enum 35 | | TypeAlias 36 | | Static 37 | | Import 38 | 39 | Fn = 40 | Abi? 'fn' Name ParamList RetTy? FnBody 41 | 42 | FnBody = 43 | Block 44 | | ';' 45 | 46 | ParamList = 47 | '(' (Param (',' Param)* ','?)? ')' 48 | 49 | Param = 50 | Name ':' Type 51 | 52 | RetTy = 53 | '->' Type 54 | 55 | Visibility = 56 | 'pub' 57 | 58 | Abi = 59 | 'extern' 'string'? 60 | 61 | Struct = 62 | 'struct' Name '{' fields:(Param*) '}' 63 | 64 | Enum = 65 | 'enum' Name VariantList 66 | 67 | VariantList = 68 | '{' variants:((Name (',' Name)* ','?)?) '}' 69 | 70 | TypeAlias = 71 | 'type' Name '=' Type ';' 72 | 73 | Static = 74 | 'static' Name ':' Type '=' init:Expr ';' 75 | 76 | Rename = 77 | 'as' Name 78 | 79 | Import = 80 | 'import' ImportTree ';' 81 | 82 | ImportTree = 83 | ListImport 84 | | RenameImport 85 | 86 | ListImport = 87 | Path? ImportTreeList 88 | 89 | RenameImport = 90 | Path Rename? 91 | 92 | ImportTreeList = 93 | '{' (ImportTree (',' ImportTree)* ','?)? '}' 94 | 95 | //******* 96 | // Types 97 | //******* 98 | 99 | Type = 100 | ArrayType 101 | | FnType 102 | | InferType 103 | | PathType 104 | | PtrType 105 | 106 | ArrayType = 107 | '[' Type ';' len:Expr ']' 108 | 109 | FnType = 110 | Abi? 'fn' params:TyParamList RetTy? 111 | 112 | TyParamList = 113 | '(' (Type (',' Type)* ','?)? ')' 114 | 115 | InferType = 116 | '_' 117 | 118 | PathType = Path 119 | 120 | PtrType = 121 | '*' 'mut'? Type 122 | 123 | //**************************** 124 | // Statements and Expressions 125 | //**************************** 126 | 127 | Stmt = 128 | ';' 129 | | SemiExpr 130 | | Expr 131 | | Item 132 | 133 | SemiExpr = 134 | Expr ';' 135 | 136 | Expr = 137 | ArrayExpr 138 | | InfixExpr 139 | | Block 140 | | BreakExpr 141 | | CallExpr 142 | | CastExpr 143 | | 'continue' 144 | | FieldExpr 145 | | ForExpr 146 | | IfExpr 147 | | IndexExpr 148 | | Literal 149 | | LoopExpr 150 | | MatchExpr 151 | | ParenExpr 152 | | NameExpr 153 | | PrefixExpr 154 | | RefExpr 155 | | ReturnExpr 156 | | WhileExpr 157 | | LetExpr 158 | 159 | ArrayExpr = 160 | '[' ArrayInit ']' 161 | 162 | ArrayInit = 163 | ArrayList 164 | | ArrayRepeat 165 | 166 | ArrayList = 167 | (Expr (',' Expr)* ','?)? 168 | 169 | ArrayRepeat = 170 | Expr ';' len:Expr 171 | 172 | LetExpr = 173 | 'let' Name (':' Type)? ('=' init:Expr)? 174 | 175 | Block = 176 | '{' statements:(Stmt*) '}' 177 | 178 | InfixExpr = 179 | lhs:Expr op:InfixOp rhs:Expr 180 | 181 | InfixOp = 182 | '||' | '&&' 183 | | '==' | '!=' | '<=' | '>=' | '<' | '>' 184 | | '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&' 185 | | '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^=' 186 | 187 | BreakExpr = 188 | 'break' Expr? 189 | 190 | CallExpr = 191 | Expr ArgList 192 | 193 | CastExpr = 194 | Expr 'as' Type 195 | 196 | FieldExpr = 197 | Expr '.' Name 198 | 199 | ForExpr = 200 | 'for' Name 'in' iter:Expr body:Block 201 | 202 | IfExpr = 203 | 'if' cond:Expr then:Block ('else' else_:Expr)? 204 | 205 | IndexExpr = 206 | base:Expr '[' index:Expr ']' 207 | 208 | Literal = 209 | 'bool' | 'char' | 'float' | 'int' | 'string' 210 | 211 | LoopExpr = 212 | 'loop' body:Block ('while' Expr)? 213 | 214 | MatchExpr = 215 | 'match' Expr '{' arms:(MatchArm*) '}' 216 | 217 | MatchArm = 218 | value:Expr '=>' then:Expr ','? 219 | 220 | ParenExpr = 221 | '(' Expr ')' 222 | 223 | NameExpr = 224 | '.'? Name 225 | 226 | RefExpr = 227 | '&' 'mut'? Expr 228 | 229 | PrefixExpr = 230 | op:PrefixOp Expr 231 | 232 | PrefixOp = 233 | '-' | '!' | '*' 234 | 235 | ReturnExpr = 236 | 'return' Expr? 237 | 238 | WhileExpr = 239 | 'while' Expr body:Block 240 | 241 | ArgList = 242 | '(' (Expr (',' Expr)* ','?)? ')' 243 | -------------------------------------------------------------------------------- /compiler/diagnostics/src/diag.rs: -------------------------------------------------------------------------------- 1 | use ariadne::{Report, ReportKind}; 2 | use verde::Pushable; 3 | 4 | use crate::{FileCache, FileSpan, FullSpan, Span}; 5 | 6 | #[derive(Copy, Clone, Eq, PartialEq)] 7 | pub enum DiagKind { 8 | Error, 9 | Warning, 10 | Advice, 11 | } 12 | 13 | impl DiagKind { 14 | pub(crate) fn into_report_kind(self) -> ReportKind<'static> { 15 | match self { 16 | DiagKind::Error => ReportKind::Error, 17 | DiagKind::Warning => ReportKind::Warning, 18 | DiagKind::Advice => ReportKind::Advice, 19 | } 20 | } 21 | } 22 | 23 | #[derive(Clone)] 24 | pub struct Label { 25 | pub span: S, 26 | pub message: Option, 27 | } 28 | 29 | impl Label { 30 | pub fn new(span: S, message: impl ToString) -> Self { 31 | Self { 32 | span, 33 | message: Some(message.to_string()), 34 | } 35 | } 36 | 37 | pub fn no_message(span: S) -> Self { Self { span, message: None } } 38 | 39 | pub fn map_span(self, f: impl FnOnce(S) -> T) -> Label { 40 | Label { 41 | span: f(self.span), 42 | message: self.message, 43 | } 44 | } 45 | } 46 | 47 | #[derive(Pushable, Clone)] 48 | pub struct Diagnostic { 49 | pub kind: DiagKind, 50 | pub message: String, 51 | pub span: S, 52 | pub labels: Vec>, 53 | } 54 | 55 | impl Diagnostic { 56 | pub fn new(kind: DiagKind, message: impl ToString, span: S) -> Self { 57 | Self { 58 | kind, 59 | message: message.to_string(), 60 | span, 61 | labels: Vec::new(), 62 | } 63 | } 64 | 65 | pub fn label(mut self, label: Label) -> Self { 66 | self.labels.push(label); 67 | self 68 | } 69 | 70 | pub fn map_span(self, mut f: impl FnMut(S) -> T) -> Diagnostic { 71 | Diagnostic { 72 | kind: self.kind, 73 | message: self.message, 74 | span: f(self.span), 75 | labels: self.labels.into_iter().map(|label| label.map_span(&mut f)).collect(), 76 | } 77 | } 78 | } 79 | 80 | impl Diagnostic 81 | where 82 | S: Span, 83 | { 84 | /// Emit the diagnostic with a cache and span resolution context. 85 | pub fn emit(self, cache: &FileCache, ctx: &S::Ctx) { 86 | let span = self.span.to_raw(ctx); 87 | let mut builder = Report::build(self.kind.into_report_kind(), span.relative, span.start as _); 88 | builder.set_message(&self.message); 89 | for label in self.labels { 90 | builder.add_label(if let Some(message) = &label.message { 91 | ariadne::Label::new(label.span.to_raw(ctx)).with_message(message) 92 | } else { 93 | ariadne::Label::new(label.span.to_raw(ctx)) 94 | }); 95 | } 96 | 97 | builder.finish().eprint(cache).expect("Failed to emit diagnostic"); 98 | } 99 | } 100 | 101 | /// A diagnostic that is fully resolved. 102 | pub type FullDiagnostic = Diagnostic; 103 | /// A diagnostic that local to a specific file, but we don't know which one. 104 | pub type FileDiagnostic = Diagnostic; 105 | 106 | pub mod test { 107 | use std::fmt::{Debug, Display}; 108 | 109 | use ariadne::{CharSet, Config, Source}; 110 | 111 | use super::*; 112 | 113 | impl Diagnostic 114 | where 115 | S: Span, 116 | { 117 | pub fn emit_test(self, source: &str, ctx: &S::Ctx) -> String { 118 | let cache = Cache { 119 | source: Source::from(source), 120 | }; 121 | let mut s = Vec::new(); 122 | 123 | let span = self.span.to_raw(ctx); 124 | let mut builder = Report::build(self.kind.into_report_kind(), span.relative, span.start as _) 125 | .with_config(Config::default().with_color(false).with_char_set(CharSet::Ascii)); 126 | builder.set_message(self.message); 127 | for label in self.labels { 128 | builder.add_label(if let Some(message) = label.message { 129 | ariadne::Label::new(label.span.to_raw(ctx)).with_message(message) 130 | } else { 131 | ariadne::Label::new(label.span.to_raw(ctx)) 132 | }); 133 | } 134 | 135 | builder 136 | .finish() 137 | .write(&cache, &mut s) 138 | .expect("Failed to emit diagnostic"); 139 | 140 | String::from_utf8(s).expect("Failed to convert to string") 141 | } 142 | } 143 | 144 | struct Cache { 145 | source: Source, 146 | } 147 | 148 | impl ariadne::Cache for &Cache { 149 | fn fetch(&mut self, _: &T) -> Result<&Source, Box> { Ok(&self.source) } 150 | 151 | fn display<'a>(&self, _: &'a T) -> Option> { None } 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /compiler/yamw/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ffi::OsStr, 3 | path::{Path, PathBuf}, 4 | time::Duration, 5 | }; 6 | 7 | use clap::Parser; 8 | use diagnostics::{emit, quick_diagnostic, DiagKind, FileCache, FileDiagnostic, FilePath, FullSpan}; 9 | use hir_lower::{ 10 | index::{generate_index, InnerIndex}, 11 | tree::generate_tree, 12 | Module, 13 | }; 14 | use notify_debouncer_mini::{new_debouncer, notify::RecursiveMode}; 15 | use parse::{syntax::ast::File, ParseContext}; 16 | use rustc_hash::FxHashMap; 17 | use tracing::trace; 18 | use tracing_subscriber::{fmt, fmt::format::FmtSpan, EnvFilter}; 19 | use verde::{db, Db, Id}; 20 | use walkdir::WalkDir; 21 | 22 | #[db] 23 | struct Database(hir::Storage, hir_lower::Storage); 24 | 25 | #[derive(Parser)] 26 | #[command(author, version, about, long_about = None)] 27 | struct Options { 28 | path: String, 29 | } 30 | 31 | struct FileData { 32 | file: FilePath, 33 | ast: File, 34 | diags: Vec, 35 | } 36 | 37 | struct WatchContext<'a> { 38 | db: &'a mut dyn Db, 39 | cache: FileCache, 40 | file_map: FxHashMap, 41 | files: Vec, 42 | indices: Vec>, 43 | parser: ParseContext, 44 | root: FilePath, 45 | } 46 | 47 | impl<'a> WatchContext<'a> { 48 | fn new(db: &'a mut dyn Db, root: FilePath) -> Self { 49 | Self { 50 | db, 51 | cache: FileCache::new(), 52 | file_map: FxHashMap::default(), 53 | files: Vec::new(), 54 | indices: Vec::new(), 55 | parser: ParseContext::new(), 56 | root, 57 | } 58 | } 59 | 60 | fn on_file_change(&mut self, path: &Path) { 61 | if !matches!(path.extension().and_then(OsStr::to_str), Some("yam")) { 62 | return; 63 | } 64 | let path = path.canonicalize().unwrap(); 65 | 66 | trace!("file changed: {}", path.display()); 67 | 68 | let Ok(source) = std::fs::read_to_string(&path) else { 69 | return; 70 | }; 71 | let Some(path) = path.as_os_str().to_str() else { 72 | quick_diagnostic(DiagKind::Error, "paths are required to be UTF-8"); 73 | return; 74 | }; 75 | let file = FilePath::new(path); 76 | let (ast, mut diags) = self.parser.parse_file(&source); 77 | self.cache.set_file(file, source); 78 | 79 | let (index, index_diags) = generate_index(Module { file, ast: ast.clone() }); 80 | diags.extend(index_diags); 81 | 82 | let index = self.db.set_input(index); 83 | match self.file_map.get(&file) { 84 | Some(id) => { 85 | self.files[*id] = FileData { file, ast, diags }; 86 | self.indices[*id] = index; 87 | }, 88 | None => { 89 | self.file_map.insert(file, self.files.len()); 90 | self.files.push(FileData { file, ast, diags }); 91 | self.indices.push(index); 92 | }, 93 | } 94 | } 95 | 96 | fn recompile(&mut self) { 97 | for data in self.files.iter() { 98 | emit(data.diags.iter().cloned(), &self.cache, &data.file); 99 | } 100 | self.order_files(); 101 | 102 | let tree = self.db.execute(|db| generate_tree(db, &self.indices)); 103 | } 104 | 105 | fn order_files(&mut self) { 106 | let root_id = *self.file_map.get(&self.root).unwrap(); 107 | if root_id != 0 { 108 | self.file_map.insert(self.root, 0); 109 | self.file_map.insert(self.files[0].file, root_id); 110 | self.files.swap(0, root_id); 111 | } 112 | } 113 | } 114 | 115 | fn main() { 116 | let _ = tracing::subscriber::set_global_default( 117 | fmt() 118 | .pretty() 119 | .with_env_filter(EnvFilter::from_env("YAMLOG")) 120 | .with_span_events(FmtSpan::CLOSE) 121 | .finish(), 122 | ); 123 | let mut db = Database::default(); 124 | let db = &mut db as &mut dyn Db; 125 | 126 | let options = Options::parse(); 127 | let Ok(root) = Path::new(&options.path).canonicalize() else { 128 | quick_diagnostic(DiagKind::Error, format!("`{}` does not exist", options.path)); 129 | return; 130 | }; 131 | if root.file_name().is_none() { 132 | quick_diagnostic(DiagKind::Error, format!("`{}` is not a file", root.display())); 133 | } 134 | let watch = root.parent().expect("file doesn't have a parent directory"); 135 | 136 | let (s, r) = crossbeam_channel::unbounded(); 137 | 138 | let Ok(mut watcher) = new_debouncer(Duration::from_millis(500), None, s) else { 139 | quick_diagnostic(DiagKind::Error, "failed to create file watcher"); 140 | return; 141 | }; 142 | 143 | let mut ctx = WatchContext::new(db, FilePath::new(root.to_str().unwrap())); 144 | 145 | for entry in WalkDir::new(watch) { 146 | let Ok(entry) = entry else { 147 | continue; 148 | }; 149 | 150 | ctx.on_file_change(entry.path()); 151 | } 152 | ctx.recompile(); 153 | 154 | let Ok(_) = watcher.watcher().watch(watch, RecursiveMode::Recursive) else { 155 | quick_diagnostic(DiagKind::Error, "failed to watch directory"); 156 | return; 157 | }; 158 | while let Ok(event) = r.recv() { 159 | let Ok(events) = event else { 160 | continue; 161 | }; 162 | 163 | for event in events { 164 | ctx.on_file_change(&event.path); 165 | } 166 | ctx.recompile(); 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /compiler/parse/src/parse/recovery.rs: -------------------------------------------------------------------------------- 1 | use std::{convert::Infallible, fmt::Display, num::NonZeroUsize, ops::FromResidual}; 2 | 3 | use lex::{token::TokenKind, T}; 4 | 5 | use crate::parse::Parser; 6 | 7 | #[derive(Copy, Clone, Eq, PartialEq, Debug)] 8 | pub enum ParseRule { 9 | Item, 10 | Param, 11 | Field, 12 | Struct, 13 | Fn, 14 | Enum, 15 | EnumVariant, 16 | TypeAlias, 17 | Import, 18 | Static, 19 | ImportTree, 20 | Attribute, 21 | Type, 22 | Block, 23 | Expr, 24 | MatchArm, 25 | Repeat, 26 | List, 27 | } 28 | 29 | pub trait Rule: Copy { 30 | fn rule(&self) -> ParseRule; 31 | 32 | fn parse(self, p: &mut Parser) -> Recovery; 33 | } 34 | 35 | #[must_use] 36 | #[derive(Copy, Clone)] 37 | pub struct Recovery(Option); 38 | 39 | impl Recovery { 40 | pub fn ok() -> Self { Self(None) } 41 | 42 | pub fn to(to: usize) -> Self { Self(Some(NonZeroUsize::new(to + 1).unwrap())) } 43 | 44 | pub fn get(self) -> usize { 45 | self.0 46 | .map(|n| n.get() - 1) 47 | .expect("Recovery::get called on Recovery::ok") 48 | } 49 | 50 | pub fn check(self, p: &mut Parser) -> Result<(), Recovery> { 51 | if matches!(self.0, Some(n) if p.rule_stack.len() != n.get()) { 52 | let last = p.rule_stack.last().unwrap(); 53 | p.api.finish_node_at(last.node_depth); 54 | Err(self) 55 | } else { 56 | Ok(()) 57 | } 58 | } 59 | } 60 | 61 | impl FromResidual> for Recovery { 62 | fn from_residual(residual: Result) -> Self { residual.unwrap_err() } 63 | } 64 | 65 | impl ParseRule { 66 | pub fn start(&self) -> &'static [TokenKind] { 67 | match self { 68 | Self::Item => &[T![pub], T![struct], T![enum], T![fn], T![static], T![type], T![import]], 69 | Self::Param | Self::Field => &[T![ident]], 70 | Self::Struct => &[T![struct]], 71 | Self::Fn => &[T![fn]], 72 | Self::Enum => &[T![enum]], 73 | Self::EnumVariant => &[T![ident]], 74 | Self::TypeAlias => &[T![type]], 75 | Self::Import => &[T![import]], 76 | Self::Static => &[T![static]], 77 | Self::ImportTree => &[T![ident], T!['{']], 78 | Self::Attribute => &[T![@]], 79 | Self::Type => &[T!['['], T![extern], T![fn], T![_], T![ident], T![.], T![*]], 80 | Self::Block => &[T!['{']], 81 | Self::Expr | Self::MatchArm => &[ 82 | T!['('], 83 | T!['{'], 84 | T![bool], 85 | T![char], 86 | T![float], 87 | T![int], 88 | T![string], 89 | T![ident], 90 | T![.], 91 | T![break], 92 | T![return], 93 | T![continue], 94 | T![loop], 95 | T![while], 96 | T![for], 97 | T![if], 98 | T![let], 99 | T![match], 100 | T![-], 101 | T![!], 102 | T![&], 103 | T![*], 104 | ], 105 | Self::Repeat => unreachable!("Repeat is not a parse rule"), 106 | Self::List => unreachable!("List is not a parse rule"), 107 | } 108 | } 109 | 110 | pub fn end(&self) -> &'static [(TokenKind, bool)] { 111 | match self { 112 | Self::Item => &[(T![;], true), (T!['}'], true)], 113 | Self::Param => &[(T![,], false), (T![')'], false)], 114 | Self::Field => &[(T![,], false), (T!['}'], false)], 115 | Self::Struct | Self::Fn | Self::Enum => &[(T!['}'], true)], 116 | Self::EnumVariant => &[(T![,], false), (T!['}'], false)], 117 | Self::TypeAlias | Self::Import | Self::Static => &[(T![;], true)], 118 | Self::ImportTree => &[(T!['}'], true), (T![;], false)], 119 | Self::Attribute => &[ 120 | (T![pub], false), 121 | (T![struct], false), 122 | (T![enum], false), 123 | (T![static], false), 124 | (T![type], false), 125 | (T![import], false), 126 | ], 127 | Self::Type => &[ 128 | (T![,], false), 129 | (T![;], false), 130 | (T![')'], false), 131 | (T![=], false), 132 | (T!['}'], false), 133 | ], 134 | Self::Block => &[(T!['}'], true)], 135 | Self::Expr => &[(T![,], false), (T![;], false), (T![')'], false), (T!['}'], false)], 136 | Self::MatchArm => &[(T![=>], false), (T!['}'], false), (T![,], false)], 137 | Self::List => &[], 138 | Self::Repeat => unreachable!("Repeat is not a parse rule"), 139 | } 140 | } 141 | } 142 | 143 | impl Display for ParseRule { 144 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 145 | write!( 146 | f, 147 | "{}", 148 | match self { 149 | Self::Item => "item", 150 | Self::Param => "function parameter", 151 | Self::Field => "struct field", 152 | Self::Struct => "struct", 153 | Self::Fn => "function", 154 | Self::Enum => "enum", 155 | Self::EnumVariant => "enum variant", 156 | Self::TypeAlias => "type alias", 157 | Self::Import => "import", 158 | Self::Static => "static", 159 | Self::ImportTree => "import tree", 160 | Self::Attribute => "attribute", 161 | Self::Type => "type", 162 | Self::Block => "block", 163 | Self::Expr => "expression", 164 | Self::MatchArm => "match arm", 165 | Self::Repeat => unreachable!("Repeat is not a parse rule"), 166 | Self::List => unreachable!("List is not a parse rule"), 167 | } 168 | ) 169 | } 170 | } 171 | -------------------------------------------------------------------------------- /compiler/verde/derive/src/lib.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::{quote, quote_spanned, ToTokens}; 3 | use syn::{parse_macro_input, spanned::Spanned, DeriveInput, ItemFn, ItemStruct}; 4 | 5 | mod database; 6 | mod pushable; 7 | mod query; 8 | mod tracked; 9 | 10 | type Result = std::result::Result; 11 | struct Error { 12 | span: proc_macro2::Span, 13 | message: String, 14 | } 15 | 16 | impl Error { 17 | fn new(span: proc_macro2::Span, message: impl ToString) -> Self { 18 | Self { 19 | span, 20 | message: message.to_string(), 21 | } 22 | } 23 | } 24 | 25 | impl ToTokens for Error { 26 | fn to_tokens(&self, tokens: &mut TokenStream) { 27 | let message = &self.message; 28 | tokens.extend(quote_spanned! { self.span => compile_error!(#message); }); 29 | } 30 | } 31 | 32 | /// Allow a type to be tracked by the database. 33 | /// 34 | /// This type must also implement `Eq`, and by extension, `PartialEq`. 35 | /// 36 | /// A single field must be marked with `#[id]`, which will uniquely identify an instance of this type output 37 | /// by a query. Different query functions may output equal IDs, but they will not interfere with each other. The ID must 38 | /// implement `Eq`, `Hash`, and `Clone`. 39 | #[proc_macro_derive(Tracked, attributes(id))] 40 | pub fn tracked(item: proc_macro::TokenStream) -> proc_macro::TokenStream { 41 | let input = parse_macro_input!(item as DeriveInput); 42 | match tracked::tracked(input) { 43 | Ok(x) => x, 44 | Err(e) => quote!(#e), 45 | } 46 | .into() 47 | } 48 | 49 | /// Allow a type to be interned in the database. 50 | /// 51 | /// This type must implement `Clone`, `Eq`, and `Hash`. 52 | #[proc_macro_derive(Interned)] 53 | pub fn interned(item: proc_macro::TokenStream) -> proc_macro::TokenStream { 54 | let input = parse_macro_input!(item as DeriveInput); 55 | let ty = input.ident; 56 | let (i, t, w) = input.generics.split_for_impl(); 57 | (quote! { 58 | impl #i ::verde::Interned for #ty #t #w {} 59 | 60 | impl #i ::verde::internal::Storable for #ty #t #w { 61 | type Storage = ::verde::internal::storage::InternedStorage; 62 | 63 | fn tracked_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedTrackedStorage> { 64 | None 65 | } 66 | 67 | fn query_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedQueryStorage> { 68 | None 69 | } 70 | 71 | fn pushable_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedPushableStorage> { 72 | None 73 | } 74 | 75 | fn interned_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedInternedStorage> { 76 | Some(store) 77 | } 78 | } 79 | }) 80 | .into() 81 | } 82 | 83 | /// Allow a type to be pushed into the database from queries. 84 | #[proc_macro_derive(Pushable)] 85 | pub fn pushable(item: proc_macro::TokenStream) -> proc_macro::TokenStream { 86 | let input = parse_macro_input!(item as DeriveInput); 87 | match pushable::pushable(input) { 88 | Ok(x) => x, 89 | Err(e) => quote!(#e), 90 | } 91 | .into() 92 | } 93 | 94 | /// Generate a query. 95 | /// 96 | /// The first argument of the function must be of type `&verde::Ctx`, and they must return a `Tracked` struct. 97 | /// 98 | /// Arguments can be marked with `#[ignore]` to not be used to identify an execution of the query. 99 | #[proc_macro_attribute] 100 | pub fn query(attr: proc_macro::TokenStream, item: proc_macro::TokenStream) -> proc_macro::TokenStream { 101 | let attr = TokenStream::from(attr); 102 | if !attr.is_empty() { 103 | return quote_spanned! { attr.span() => compile_error!("`query` does not take any arguments"); }.into(); 104 | } 105 | 106 | let input = parse_macro_input!(item as ItemFn); 107 | match query::query(input) { 108 | Ok(x) => x, 109 | Err(e) => quote!(#e), 110 | } 111 | .into() 112 | } 113 | 114 | /// Generate a storage struct. 115 | /// 116 | /// Storage structs must be a tuple struct that contain the `Tracked`, `Pushable`, and `Interned` types, as well the 117 | /// queries that to be stored into the database. 118 | #[proc_macro_attribute] 119 | pub fn storage(attr: proc_macro::TokenStream, item: proc_macro::TokenStream) -> proc_macro::TokenStream { 120 | let attr = TokenStream::from(attr); 121 | if !attr.is_empty() { 122 | return quote_spanned! { attr.span() => compile_error!("`storage` does not take any arguments"); }.into(); 123 | } 124 | 125 | let input = parse_macro_input!(item as ItemStruct); 126 | match database::storage(input) { 127 | Ok(x) => x, 128 | Err(e) => quote!(#e), 129 | } 130 | .into() 131 | } 132 | 133 | /// Generate a database. 134 | /// 135 | /// Databases must be a tuple struct that contain the `Storage` types. 136 | #[proc_macro_attribute] 137 | pub fn db(attr: proc_macro::TokenStream, item: proc_macro::TokenStream) -> proc_macro::TokenStream { 138 | let attr = TokenStream::from(attr); 139 | if !attr.is_empty() { 140 | return quote_spanned! { attr.span() => compile_error!("`database` does not take any arguments"); }.into(); 141 | } 142 | 143 | let input = parse_macro_input!(item as ItemStruct); 144 | match database::database(input) { 145 | Ok(x) => x, 146 | Err(e) => quote!(#e), 147 | } 148 | .into() 149 | } 150 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/storage/tracked.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ops::Deref, 3 | sync::atomic::{AtomicU64, Ordering}, 4 | }; 5 | 6 | use parking_lot::{ 7 | lock_api::{RawRwLock, RawRwLockFair}, 8 | RwLock, 9 | }; 10 | 11 | use crate::{ 12 | event, 13 | internal::storage::{routing::Route, DashMap}, 14 | Tracked, 15 | }; 16 | 17 | #[derive(Copy, Clone, Eq, PartialEq, Hash)] 18 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 19 | pub struct ErasedId { 20 | pub(crate) route: Route, 21 | pub(crate) index: u32, 22 | } 23 | 24 | pub trait ErasedTrackedStorage { 25 | fn get_generation(&self, index: u32) -> u64; 26 | } 27 | 28 | pub struct Get<'a, T> { 29 | slot: &'a RwLock, 30 | values: &'a RwLock>>, 31 | } 32 | 33 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 34 | pub struct TrackedStorage { 35 | pub(crate) map: DashMap, u32>, 36 | pub(crate) values: RwLock>>, 37 | } 38 | 39 | impl ErasedTrackedStorage for TrackedStorage { 40 | fn get_generation(&self, index: u32) -> u64 { 41 | let values = self.values.read(); 42 | let slot = &values[index as usize]; 43 | slot.generation.load(Ordering::Acquire) 44 | } 45 | } 46 | 47 | impl TrackedStorage { 48 | /// Insert a new value into the storage. 49 | pub fn insert(&self, value: T, query: Route) -> u32 { 50 | let ident = TrackedIdent { 51 | id: value.id().clone(), 52 | query, 53 | }; 54 | match self.map.get(&ident) { 55 | Some(index) => { 56 | let index = *index; 57 | let values = self.values.read(); 58 | let slot = &values[index as usize]; 59 | let mut out = slot.value.write(); 60 | 61 | if *out != value { 62 | event!( 63 | trace, 64 | "value changed, generation: {}", 65 | slot.generation.fetch_add(1, Ordering::Release) + 1 66 | ); 67 | } 68 | *out = value; 69 | 70 | index 71 | }, 72 | None => { 73 | let mut values = self.values.write(); 74 | let index = values.len() as u32; 75 | values.push(Slot { 76 | value: RwLock::new(value), 77 | generation: AtomicU64::new(0), 78 | }); 79 | event!(trace, "inserting new value"); 80 | self.map.insert(ident, index); 81 | index 82 | }, 83 | } 84 | } 85 | 86 | pub fn get(&self, index: u32) -> Get<'_, T> { 87 | unsafe { 88 | self.values.raw().lock_shared(); 89 | let slot = &(*self.values.data_ptr())[index as usize].value; 90 | slot.raw().lock_shared(); 91 | Get { 92 | slot, 93 | values: &self.values, 94 | } 95 | } 96 | } 97 | } 98 | 99 | impl<'a> dyn ErasedTrackedStorage + 'a { 100 | /// **Safety**: The type of `self` must be `TrackedStorage`. 101 | pub unsafe fn insert(&self, value: T, query: Route) -> u32 { 102 | unsafe { 103 | let storage = self as *const dyn ErasedTrackedStorage as *const TrackedStorage; 104 | (*storage).insert(value, query) 105 | } 106 | } 107 | 108 | /// **Safety**: The type of `self` must be `TrackedStorage`. 109 | pub unsafe fn get(&self, index: u32) -> Get<'_, T> { 110 | unsafe { 111 | let storage = self as *const dyn ErasedTrackedStorage as *const TrackedStorage; 112 | (*storage).get(index) 113 | } 114 | } 115 | } 116 | 117 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 118 | pub(crate) struct Slot { 119 | pub(crate) value: RwLock, 120 | pub(crate) generation: AtomicU64, 121 | } 122 | 123 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 124 | pub(crate) struct TrackedIdent { 125 | pub(crate) id: T::Id, 126 | pub(crate) query: Route, 127 | } 128 | 129 | impl Clone for TrackedIdent { 130 | fn clone(&self) -> Self { 131 | Self { 132 | id: self.id.clone(), 133 | query: self.query, 134 | } 135 | } 136 | } 137 | impl PartialEq for TrackedIdent { 138 | fn eq(&self, other: &Self) -> bool { self.id == other.id && self.query == other.query } 139 | } 140 | impl Eq for TrackedIdent {} 141 | impl std::hash::Hash for TrackedIdent { 142 | fn hash(&self, state: &mut H) { 143 | self.id.hash(state); 144 | self.query.hash(state); 145 | } 146 | } 147 | 148 | impl Default for TrackedStorage { 149 | fn default() -> Self { 150 | Self { 151 | map: DashMap::default(), 152 | values: RwLock::new(Vec::new()), 153 | } 154 | } 155 | } 156 | 157 | impl Deref for Get<'_, T> { 158 | type Target = T; 159 | 160 | fn deref(&self) -> &Self::Target { unsafe { &*self.slot.data_ptr() } } 161 | } 162 | 163 | impl Drop for Get<'_, T> { 164 | fn drop(&mut self) { 165 | unsafe { 166 | self.slot.raw().unlock_shared_fair(); 167 | self.values.raw().unlock_shared_fair(); 168 | } 169 | } 170 | } 171 | 172 | #[cfg(test)] 173 | mod tests { 174 | use crate::internal::storage::Route; 175 | 176 | #[test] 177 | fn id() { 178 | use crate::Id; 179 | 180 | let id = Id::::new(1, Route { storage: 2, index: 3 }); 181 | let ret = id.get(); 182 | assert_eq!(ret.index, 1); 183 | assert_eq!(ret.route.storage, 2); 184 | assert_eq!(ret.route.index, 3); 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/traits.rs: -------------------------------------------------------------------------------- 1 | use std::hash::Hash; 2 | 3 | use crate::internal::storage::{ 4 | ErasedInternedStorage, 5 | ErasedPushableStorage, 6 | ErasedQueryStorage, 7 | ErasedTrackedStorage, 8 | InternedStorage, 9 | PushableStorage, 10 | QueryStorage, 11 | RouteBuilder, 12 | TrackedStorage, 13 | }; 14 | 15 | /// A struct that contains [`TrackedStorage`] or [`QueryStorage`] 16 | /// depending on whether `T` is a tracked struct or a query. 17 | pub trait StorageOf { 18 | /// Get the route index of `T::ToStore` in this storage struct. 19 | fn storage_index(&self) -> u16; 20 | } 21 | 22 | /// A struct that contains type storages. Must be implemented in conjunction with [`StorageOf`] 23 | pub trait Storage { 24 | fn init_routing(table: &mut RouteBuilder) 25 | where 26 | Self: Sized; 27 | 28 | /// Get a `&dyn TrackedStorage` if the route with `index` is a tracked struct. 29 | fn tracked_storage(&self, index: u16) -> Option<&dyn ErasedTrackedStorage>; 30 | 31 | /// Get a `&dyn QueryStorage` if the route with `index` is a query. 32 | fn query_storage(&self, index: u16) -> Option<&dyn ErasedQueryStorage>; 33 | 34 | /// Get a `&dyn PushableStorage` if the route with `index` is a pushable. 35 | fn pushable_storage(&self, index: u16) -> Option<&dyn ErasedPushableStorage>; 36 | 37 | /// Get a `&dyn InternedStorage` if the route with `index` is interned. 38 | fn interned_storage(&self, index: u16) -> Option<&dyn ErasedInternedStorage>; 39 | } 40 | 41 | /// A database that contains a storage struct `S`, 42 | /// implementing [`StorageOf`] for each type `T` stored in `S`. 43 | pub trait DbWith { 44 | /// The route index of `S` in this database. 45 | fn storage_struct_index(&self) -> u16; 46 | } 47 | 48 | /// A type that can be tracked by the database. 49 | /// 50 | /// Can be automatically derived using the `#[derive(Tracked)]` attribute. Use `#[id]` on a field to 51 | /// specify the field that uniquely identifies each tracked instance. 52 | pub trait Tracked: Eq + Storable> { 53 | #[cfg(feature = "serde")] 54 | type Id: Eq + Hash + Clone + Send + Sync + serde::Serialize + for<'de> serde::Deserialize<'de>; 55 | 56 | #[cfg(not(feature = "serde"))] 57 | type Id: Eq + Hash + Clone + Send + Sync; 58 | 59 | fn id(&self) -> &Self::Id; 60 | } 61 | 62 | /// A type that can be 'pushed' into the database. Pushed values are stored globally and can be used for diagnostics or 63 | /// similar tracking. 64 | /// 65 | /// Can be automatically derived using the `#[derive(Pushable)]` attribute. 66 | pub trait Pushable: Storable> {} 67 | 68 | /// A query that can execute on the database. 69 | /// 70 | /// Can be automatically derived using the `#[query]` attribute on a `fn`. 71 | pub trait Query: Storable> { 72 | #[cfg(feature = "serde")] 73 | type Input: Eq + Hash + Send + Sync + serde::Serialize + for<'de> serde::Deserialize<'de>; 74 | 75 | #[cfg(not(feature = "serde"))] 76 | type Input: Eq + Hash + Send + Sync; 77 | 78 | type Output: Tracked + Send + Sync; 79 | } 80 | 81 | /// A type that is interned inside the database. 82 | pub trait Interned: Clone + Eq + Hash + Storable> {} 83 | 84 | /// A type that is either a [`Tracked`] struct or a query. 85 | /// Types that implement this trait can be stored inside `Storage`. 86 | pub trait Storable: Sized + Send + 'static { 87 | /// The type that should actually be stored inside the `Storage`. 88 | /// If the type is a `Tracked` struct, this should be [`TrackedStorage`]. 89 | /// If the type is a query, this should be [`QueryStorage`]. 90 | type Storage: Default + extra::ExtraBound; 91 | 92 | /// Cast to a `&dyn TrackedStorage` if `Self` is a tracked struct. 93 | fn tracked_storage(store: &Self::Storage) -> Option<&dyn ErasedTrackedStorage>; 94 | 95 | /// Cast to a `&dyn QueryStorage` if `Self` is a query. 96 | fn query_storage(store: &Self::Storage) -> Option<&dyn ErasedQueryStorage>; 97 | 98 | /// Cast to a `&dyn PushableStorage` if `Self` is a pushable. 99 | fn pushable_storage(store: &Self::Storage) -> Option<&dyn ErasedPushableStorage>; 100 | 101 | /// Cast to a `&dyn InternedStorage` if `Self` is interned. 102 | fn interned_storage(store: &Self::Storage) -> Option<&dyn ErasedInternedStorage>; 103 | } 104 | 105 | #[cfg(feature = "test")] 106 | mod extra { 107 | use crate::test::StorageType; 108 | 109 | pub trait ExtraBound: Into {} 110 | 111 | impl> ExtraBound for T {} 112 | } 113 | 114 | #[cfg(not(feature = "test"))] 115 | mod extra { 116 | pub trait ExtraBound {} 117 | 118 | impl ExtraBound for T {} 119 | } 120 | 121 | macro_rules! intern { 122 | ($t:ty) => { 123 | impl Interned for $t {} 124 | 125 | impl Storable for $t { 126 | type Storage = InternedStorage; 127 | 128 | fn tracked_storage(_store: &Self::Storage) -> Option<&dyn ErasedTrackedStorage> { None } 129 | 130 | fn query_storage(_store: &Self::Storage) -> Option<&dyn ErasedQueryStorage> { None } 131 | 132 | fn pushable_storage(_store: &Self::Storage) -> Option<&dyn ErasedPushableStorage> { None } 133 | 134 | fn interned_storage(store: &Self::Storage) -> Option<&dyn ErasedInternedStorage> { Some(store) } 135 | } 136 | }; 137 | } 138 | 139 | intern!(String); 140 | -------------------------------------------------------------------------------- /compiler/verde/tests/test.rs: -------------------------------------------------------------------------------- 1 | use std::sync::atomic::{AtomicBool, Ordering}; 2 | 3 | use verde::{db, query, storage, test::TestDatabase, Ctx, Db, Id, Pushable, Tracked}; 4 | 5 | #[derive(Copy, Clone, Eq, PartialEq, Hash, Tracked)] 6 | #[cfg_attr(feature = "serde", derive(::verde::serde::Serialize, ::verde::serde::Deserialize))] 7 | struct TrackedStruct { 8 | #[id] 9 | id: u32, 10 | value: u32, 11 | } 12 | 13 | #[derive(Clone, Pushable)] 14 | #[cfg_attr(feature = "serde", derive(::verde::serde::Serialize, ::verde::serde::Deserialize))] 15 | struct Accum; 16 | 17 | #[query] 18 | fn double(db: &Ctx, id: Id) -> TrackedStruct { 19 | db.push(Accum); 20 | let s = db.get(id); 21 | TrackedStruct { 22 | id: s.id, 23 | value: s.value * 2, 24 | } 25 | } 26 | 27 | #[query] 28 | fn sum(db: &Ctx, id: u32, ids: Vec>) -> TrackedStruct { 29 | db.push(Accum); 30 | let mut value = 0; 31 | for x in ids.into_iter().map(|x| db.get(x)) { 32 | value += x.value; 33 | } 34 | TrackedStruct { id, value } 35 | } 36 | 37 | #[test] 38 | fn correct_result() { 39 | #[storage] 40 | struct Storage(TrackedStruct, Accum, double, sum); 41 | 42 | #[db] 43 | struct Database(Storage); 44 | 45 | let mut db = Database::default(); 46 | let db = &mut db as &mut dyn Db; 47 | let init: Vec<_> = (1..=100) 48 | .map(|x| db.set_input(TrackedStruct { id: x, value: x })) 49 | .collect(); 50 | 51 | db.execute(|ctx| { 52 | let doubled: Vec<_> = init.iter().map(|x| double(ctx, *x)).collect(); 53 | let first_double_then_sum = sum(ctx, 0, doubled); 54 | 55 | let first_sum = sum(ctx, 1, init); 56 | let first_sum_then_double = double(ctx, first_sum); 57 | 58 | let val1 = db.get(first_double_then_sum).value; 59 | let val2 = db.get(first_sum_then_double).value; 60 | assert_eq!(val1, val2); 61 | assert_eq!(val1, 5050 * 2); 62 | 63 | let accums = db.get_all::(); 64 | assert_eq!(accums.count(), 103); 65 | let double_accums = db.get_query::(); 66 | assert_eq!(double_accums.count(), 101); 67 | let sum_accums = db.get_query::(); 68 | assert_eq!(sum_accums.count(), 2); 69 | }); 70 | } 71 | 72 | #[test] 73 | fn simple_memoize() { 74 | static EXECUTED: AtomicBool = AtomicBool::new(false); 75 | 76 | #[query] 77 | fn double(ctx: &Ctx, id: Id) -> TrackedStruct { 78 | if EXECUTED.load(Ordering::Relaxed) { 79 | panic!("double() was executed twice"); 80 | } 81 | EXECUTED.store(true, Ordering::Relaxed); 82 | 83 | let s = ctx.get(id); 84 | TrackedStruct { 85 | id: s.id, 86 | value: s.value * 2, 87 | } 88 | } 89 | 90 | let mut db = TestDatabase::new(); 91 | let db = &mut db as &mut dyn Db; 92 | let id = db.set_input(TrackedStruct { id: 0, value: 1 }); 93 | 94 | db.execute(|ctx| { 95 | let check = || { 96 | let doubled = double(ctx, id); 97 | assert_eq!(db.get(doubled).value, 2); 98 | }; 99 | 100 | check(); 101 | check(); 102 | }); 103 | } 104 | 105 | #[test] 106 | fn memoizes_after_old_dependency() { 107 | static EXECUTED: AtomicBool = AtomicBool::new(false); 108 | #[query] 109 | fn triple(ctx: &Ctx, id: Id) -> TrackedStruct { 110 | if EXECUTED.load(Ordering::Relaxed) { 111 | panic!("triple() was executed twice"); 112 | } 113 | EXECUTED.store(true, Ordering::Relaxed); 114 | 115 | let s = ctx.get(id); 116 | TrackedStruct { 117 | id: s.id, 118 | value: s.value * 3, 119 | } 120 | } 121 | 122 | let mut db = TestDatabase::new(); 123 | let db = &mut db as &mut dyn Db; 124 | 125 | let input = db.set_input(TrackedStruct { id: 0, value: 1 }); 126 | 127 | let first = db.execute(|ctx| double(ctx, input)); 128 | let trip = db.execute(|ctx| triple(ctx, first)); 129 | assert_eq!(db.get(trip).value, 6); 130 | assert_eq!(input, db.set_input(TrackedStruct { id: 0, value: 2 })); 131 | 132 | let dub = || db.execute(|ctx| double(ctx, input)); 133 | let second = dub(); 134 | let third = dub(); 135 | assert_eq!(db.get(second).value, db.get(third).value); 136 | 137 | EXECUTED.store(false, Ordering::Relaxed); 138 | let check = || { 139 | let trip = db.execute(|ctx| triple(ctx, third)); 140 | assert_eq!(db.get(trip).value, 12); 141 | }; 142 | check(); 143 | check(); 144 | } 145 | 146 | #[test] 147 | fn executes_after_old_dependency() { 148 | let mut db = TestDatabase::new(); 149 | let db = &mut db as &mut dyn Db; 150 | 151 | let mut input1 = db.set_input(TrackedStruct { id: 1, value: 1 }); 152 | let mut doubled = db.execute(|ctx| double(ctx, input1)); 153 | 154 | let mut input2 = db.set_input(TrackedStruct { id: 2, value: 2 }); 155 | let mut s = db.execute(|ctx| sum(ctx, 0, vec![doubled, input2])); 156 | assert_eq!(db.get(s).value, 4); 157 | 158 | for value in 2..=10 { 159 | input1 = db.set_input(TrackedStruct { id: 1, value }); 160 | doubled = db.execute(|ctx| double(ctx, input1)); 161 | s = db.execute(|ctx| sum(ctx, 0, vec![doubled, input2])); 162 | assert_eq!(db.get(s).value, value * 2 + 2); 163 | } 164 | 165 | input2 = db.set_input(TrackedStruct { id: 2, value: 1 }); 166 | let trip = db.execute(|ctx| sum(ctx, 0, vec![doubled, input2])); 167 | assert_eq!(db.get(trip).value, 21); 168 | } 169 | 170 | #[test] 171 | fn interning() { 172 | let mut db = TestDatabase::new(); 173 | let db = &mut db as &mut dyn Db; 174 | 175 | let id = db.add("string".to_string()); 176 | let id2 = db.add("string".to_string()); 177 | let id3 = db.add("string2".to_string()); 178 | let id4 = db.add_ref("string"); 179 | 180 | assert_eq!(id, id2); 181 | assert_eq!(id, id4); 182 | assert_ne!(id, id3); 183 | } 184 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/storage/pushable.rs: -------------------------------------------------------------------------------- 1 | use std::ops::Deref; 2 | 3 | use dashmap::mapref::{multiple::RefMulti, one::Ref}; 4 | use parking_lot::{lock_api::RawMutex, Mutex}; 5 | 6 | use crate::{ 7 | internal::{ 8 | storage::{query::ErasedQueryId, DashMap, Route}, 9 | ErasedVec, 10 | }, 11 | Pushable, 12 | }; 13 | 14 | pub trait ErasedPushableStorage { 15 | /// **Safety**: The type of `self` must be `PushableStorage`, where `T` matches the type stored in the 16 | /// `ErasedVec`.. 17 | unsafe fn push(&self, query: ErasedQueryId, values: ErasedVec); 18 | } 19 | 20 | impl<'a> dyn ErasedPushableStorage + 'a { 21 | /// **Safety**: The type of `self` must be `PushableStorage`. 22 | pub unsafe fn get_all(&self) -> impl Iterator { 23 | unsafe { 24 | let storage = self as *const dyn ErasedPushableStorage as *const PushableStorage; 25 | (*storage).get_all() 26 | } 27 | } 28 | 29 | /// **Safety**: The type of `self` must be `PushableStorage`. 30 | pub unsafe fn get_query(&self, query: Route) -> impl Iterator { 31 | unsafe { 32 | let storage = self as *const dyn ErasedPushableStorage as *const PushableStorage; 33 | (*storage).get_query(query) 34 | } 35 | } 36 | 37 | /// **Safety**: The type of `self` must be `PushableStorage`. 38 | pub unsafe fn get_query_invocation(&self, query: ErasedQueryId) -> impl Iterator { 39 | unsafe { 40 | let storage = self as *const dyn ErasedPushableStorage as *const PushableStorage; 41 | (*storage).get_query_invocation(query) 42 | } 43 | } 44 | } 45 | 46 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 47 | pub struct PushableStorage { 48 | pub(crate) map: DashMap>>>, 49 | } 50 | 51 | impl ErasedPushableStorage for PushableStorage { 52 | unsafe fn push(&self, query: ErasedQueryId, values: ErasedVec) { 53 | let mut data = self.map.entry(query.route).or_insert_with(Vec::new); 54 | Self::expand_to(&mut data, query.index); 55 | *data[query.index as usize].lock() = values.into_inner(); 56 | } 57 | } 58 | 59 | impl PushableStorage { 60 | pub fn new() -> Self { 61 | Self { 62 | map: DashMap::default(), 63 | } 64 | } 65 | 66 | pub fn get_all(&self) -> impl Iterator { 67 | self.map 68 | .iter() 69 | .flat_map(|x| { 70 | let value = unsafe { std::mem::transmute::<_, std::slice::Iter<'_, _>>(x.iter()) }; 71 | RefMultiMap::new(x, value) 72 | }) 73 | .flat_map(|vec| VecIter::new(vec)) 74 | } 75 | 76 | pub fn get_query(&self, query: Route) -> impl Iterator { 77 | self.map 78 | .get(&query) 79 | .into_iter() 80 | .flat_map(|x| { 81 | let value = unsafe { std::mem::transmute::<_, std::slice::Iter<'_, _>>(x.iter()) }; 82 | RefMap::new(x, value) 83 | }) 84 | .flat_map(|vec| VecIter::new(vec)) 85 | } 86 | 87 | pub fn get_query_invocation(&self, query: ErasedQueryId) -> impl Iterator { 88 | self.map 89 | .get(&query.route) 90 | .into_iter() 91 | .flat_map(|x| { 92 | let value = unsafe { std::mem::transmute::<_, std::slice::Iter<'_, _>>(x.iter()) }; 93 | RefMap::new(x, value) 94 | }) 95 | .flat_map(|vec| VecIter::new(vec)) 96 | } 97 | 98 | fn expand_to(data: &mut Vec>>, index: u32) { 99 | let len = data.len(); 100 | if len <= index as usize { 101 | data.extend(std::iter::repeat_with(|| Mutex::default()).take(index as usize - len + 1)) 102 | } 103 | } 104 | } 105 | 106 | struct RefMultiMap<'a, K, V, S, T> { 107 | value: T, 108 | _inner: RefMulti<'a, K, V, S>, 109 | } 110 | 111 | impl<'a, K, V, S, T> RefMultiMap<'a, K, V, S, T> { 112 | fn new(_inner: RefMulti<'a, K, V, S>, value: T) -> Self { Self { _inner, value } } 113 | } 114 | 115 | impl<'a, K, V, S, T> Deref for RefMultiMap<'a, K, V, S, T> { 116 | type Target = T; 117 | 118 | fn deref(&self) -> &Self::Target { &self.value } 119 | } 120 | 121 | impl<'a, K, V, S, T: Iterator> Iterator for RefMultiMap<'a, K, V, S, T> { 122 | type Item = T::Item; 123 | 124 | fn next(&mut self) -> Option { self.value.next() } 125 | } 126 | 127 | struct RefMap<'a, K, V, S, T> { 128 | value: T, 129 | _inner: Ref<'a, K, V, S>, 130 | } 131 | 132 | impl<'a, K, V, S, T> RefMap<'a, K, V, S, T> { 133 | fn new(_inner: Ref<'a, K, V, S>, value: T) -> Self { Self { _inner, value } } 134 | } 135 | 136 | impl<'a, K, V, S, T> Deref for RefMap<'a, K, V, S, T> { 137 | type Target = T; 138 | 139 | fn deref(&self) -> &Self::Target { &self.value } 140 | } 141 | 142 | impl<'a, K, V, S, T: Iterator> Iterator for RefMap<'a, K, V, S, T> { 143 | type Item = T::Item; 144 | 145 | fn next(&mut self) -> Option { self.value.next() } 146 | } 147 | 148 | struct VecIter<'a, T> { 149 | vec: &'a Mutex>, 150 | iter: std::slice::Iter<'a, T>, 151 | } 152 | 153 | impl<'a, T> VecIter<'a, T> { 154 | fn new(vec: &'a Mutex>) -> Self { 155 | let iter = unsafe { 156 | vec.raw().lock(); 157 | (*vec.data_ptr()).iter() 158 | }; 159 | Self { vec, iter } 160 | } 161 | } 162 | 163 | impl<'a, T> Iterator for VecIter<'a, T> { 164 | type Item = &'a T; 165 | 166 | fn next(&mut self) -> Option { self.iter.next() } 167 | } 168 | 169 | impl Drop for VecIter<'_, T> { 170 | fn drop(&mut self) { 171 | unsafe { 172 | self.vec.raw().unlock(); 173 | } 174 | } 175 | } 176 | 177 | impl Default for PushableStorage { 178 | fn default() -> Self { 179 | Self { 180 | map: DashMap::default(), 181 | } 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /compiler/parse/src/tests/recovery.rs: -------------------------------------------------------------------------------- 1 | use expect_test::expect; 2 | 3 | use super::harness; 4 | 5 | #[test] 6 | fn expr() { 7 | let source = r#" 8 | fn main() { 9 | 3 + ; 10 | 4 + 5; 11 | -; 12 | f(); 13 | b[; 14 | 10; 15 | } 16 | "#; 17 | 18 | let ast = expect![[r#" 19 | File@0..67 20 | Whitespace@0..3 "\n\t\t" 21 | Item@3..67 22 | Fn@3..67 23 | FnKw@3..5 "fn" 24 | Whitespace@5..6 " " 25 | Name@6..10 26 | Ident@6..10 "main" 27 | ParamList@10..12 28 | LParen@10..11 "(" 29 | RParen@11..12 ")" 30 | Whitespace@12..13 " " 31 | Block@13..65 32 | LBrace@13..14 "{" 33 | SemiExpr@14..23 34 | InfixExpr@14..22 35 | Whitespace@14..18 "\n\t\t\t" 36 | IntLit@18..19 "3" 37 | Whitespace@19..20 " " 38 | Plus@20..21 "+" 39 | Whitespace@21..22 " " 40 | Error@22..22 41 | Semi@22..23 ";" 42 | Whitespace@23..27 "\n\t\t\t" 43 | SemiExpr@27..33 44 | InfixExpr@27..32 45 | IntLit@27..28 "4" 46 | Whitespace@28..29 " " 47 | Plus@29..30 "+" 48 | Whitespace@30..31 " " 49 | IntLit@31..32 "5" 50 | Semi@32..33 ";" 51 | Whitespace@33..37 "\n\t\t\t" 52 | SemiExpr@37..39 53 | PrefixExpr@37..38 54 | Minus@37..38 "-" 55 | Error@38..38 56 | Semi@38..39 ";" 57 | Whitespace@39..43 "\n\t\t\t" 58 | SemiExpr@43..47 59 | CallExpr@43..46 60 | NameExpr@43..44 61 | Name@43..44 62 | Ident@43..44 "f" 63 | ArgList@44..46 64 | LParen@44..45 "(" 65 | RParen@45..46 ")" 66 | Semi@46..47 ";" 67 | Whitespace@47..51 "\n\t\t\t" 68 | SemiExpr@51..54 69 | IndexExpr@51..53 70 | NameExpr@51..52 71 | Name@51..52 72 | Ident@51..52 "b" 73 | LBracket@52..53 "[" 74 | Error@53..53 75 | Semi@53..54 ";" 76 | Whitespace@54..58 "\n\t\t\t" 77 | SemiExpr@58..61 78 | IntLit@58..60 "10" 79 | Semi@60..61 ";" 80 | Whitespace@61..64 "\n\t\t" 81 | RBrace@64..65 "}" 82 | Whitespace@65..67 "\n\t""#]]; 83 | 84 | let diags = expect![[r#" 85 | Error: expected expression 86 | ,-[:3:8] 87 | | 88 | 3 | 3 + ; 89 | | | 90 | | `-- found `;` 91 | ---' 92 | Error: expected expression 93 | ,-[:5:5] 94 | | 95 | 5 | -; 96 | | | 97 | | `-- found `;` 98 | ---' 99 | Error: expected expression 100 | ,-[:7:6] 101 | | 102 | 7 | b[; 103 | | | 104 | | `-- found `;` 105 | ---' 106 | "#]]; 107 | 108 | harness(source, ast, diags); 109 | } 110 | 111 | #[test] 112 | fn item() { 113 | let source = r#" 114 | struct S { 115 | s: u8,, 116 | v: , 117 | 118 | 119 | fn main() { 120 | "#; 121 | 122 | let ast = expect![[r#" 123 | File@0..50 124 | Whitespace@0..3 "\n\t\t" 125 | Item@3..37 126 | Struct@3..37 127 | StructKw@3..9 "struct" 128 | Whitespace@9..10 " " 129 | Name@10..11 130 | Ident@10..11 "S" 131 | Whitespace@11..12 " " 132 | LBrace@12..13 "{" 133 | Whitespace@13..17 "\n\t\t\t" 134 | Param@17..22 135 | Name@17..18 136 | Ident@17..18 "s" 137 | Colon@18..19 ":" 138 | Whitespace@19..20 " " 139 | PathType@20..22 140 | Path@20..22 141 | Name@20..22 142 | Ident@20..22 "u8" 143 | Comma@22..23 "," 144 | Param@23..23 145 | Name@23..23 146 | Error@23..23 147 | Comma@23..24 "," 148 | Whitespace@24..28 "\n\t\t\t" 149 | Param@28..31 150 | Name@28..29 151 | Ident@28..29 "v" 152 | Colon@29..30 ":" 153 | Whitespace@30..31 " " 154 | Error@31..31 155 | Comma@31..32 "," 156 | Whitespace@32..37 "\n\n\n\t\t" 157 | Param@37..37 158 | Name@37..37 159 | Error@37..37 160 | Item@37..50 161 | Fn@37..50 162 | FnKw@37..39 "fn" 163 | Whitespace@39..40 " " 164 | Name@40..44 165 | Ident@40..44 "main" 166 | ParamList@44..46 167 | LParen@44..45 "(" 168 | RParen@45..46 ")" 169 | Whitespace@46..47 " " 170 | Block@47..50 171 | LBrace@47..48 "{" 172 | Whitespace@48..50 "\n\t" 173 | Error@50..50"#]]; 174 | 175 | let diags = expect![[r#" 176 | Error: expected struct field 177 | ,-[:3:10] 178 | | 179 | 3 | s: u8,, 180 | | | 181 | | `-- found `,` 182 | ---' 183 | Error: expected type 184 | ,-[:4:7] 185 | | 186 | 4 | v: , 187 | | | 188 | | `-- found `,` 189 | ---' 190 | Error: expected struct field 191 | ,-[:7:3] 192 | | 193 | 7 | fn main() { 194 | | ^| 195 | | `-- found `fn` 196 | ---' 197 | Error: expected block 198 | ,-[:8:1] 199 | | 200 | 8 | 201 | | | 202 | | `-- found 203 | ---' 204 | "#]]; 205 | 206 | harness(source, ast, diags); 207 | } 208 | -------------------------------------------------------------------------------- /compiler/verde/derive/src/query.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::{Ident, TokenStream}; 2 | use quote::{format_ident, quote, ToTokens}; 3 | use syn::{ 4 | spanned::Spanned, 5 | Block, 6 | FnArg, 7 | GenericParam, 8 | ItemFn, 9 | LifetimeParam, 10 | Meta, 11 | Pat, 12 | ReturnType, 13 | Type, 14 | TypeTuple, 15 | Visibility, 16 | }; 17 | 18 | use crate::{Error, Result}; 19 | 20 | pub(crate) fn query(input: ItemFn) -> Result { 21 | if input.sig.asyncness.is_some() { 22 | return Err(Error::new(input.sig.fn_token.span(), "query must not be `async`")); 23 | } 24 | 25 | let ret_ty = match &input.sig.output { 26 | ReturnType::Type(_, ty) if !matches!(ty.as_ref(), Type::Tuple(TypeTuple { elems, .. }) if elems.is_empty()) => { 27 | quote!(#ty) 28 | }, 29 | _ => { 30 | return Err(Error::new(input.sig.output.span(), "query must return a value")); 31 | }, 32 | }; 33 | 34 | let Query { 35 | vis, 36 | name, 37 | ctx, 38 | inputs, 39 | lifetimes, 40 | block, 41 | } = generate(&input)?; 42 | 43 | let input_type_name = format_ident!("__verde_internal_input_type_of_{}", name); 44 | 45 | let arg_types: Vec<_> = inputs.iter().map(|x| &x.ty).collect(); 46 | let query_inputs: Vec<_> = inputs.iter().filter(|x| !x.ignore).collect(); 47 | let query_input_names: Vec<_> = query_inputs.iter().map(|x| &x.name).collect(); 48 | let query_input_types: Vec<_> = query_inputs 49 | .iter() 50 | .map(|x| match &x.ty { 51 | Type::Reference(r) => r.elem.as_ref(), 52 | x => x, 53 | }) 54 | .collect(); 55 | 56 | let ctx_name = &ctx.name; 57 | let ctx_ty = match ctx.ty { 58 | Type::Reference(r) => r.elem, 59 | _ => return Err(Error::new(ctx.ty.span(), "context must be a `&Ctx`")), 60 | }; 61 | 62 | let derive = if cfg!(feature = "serde") { 63 | quote! { 64 | #[derive(::verde::serde::Serialize, ::verde::serde::Deserialize)] 65 | #[serde(crate = "::verde::serde")] 66 | } 67 | } else { 68 | quote! {} 69 | }; 70 | let fn_ty = quote! { for<#(#lifetimes,)*> fn(&#ctx_ty, #(#arg_types,)*) -> ::verde::Id<#ret_ty> }; 71 | 72 | Ok(quote! { 73 | #[allow(non_camel_case_types)] 74 | #[derive(Copy, Clone)] 75 | #derive 76 | #vis struct #name; 77 | 78 | #[allow(non_camel_case_types)] 79 | #[derive(Clone, PartialEq, Eq, Hash)] 80 | #derive 81 | #vis struct #input_type_name { 82 | #(#query_input_names: <#query_input_types as ::std::borrow::ToOwned>::Owned,)* 83 | } 84 | 85 | impl ::std::ops::Deref for #name { 86 | type Target = #fn_ty; 87 | 88 | fn deref(&self) -> &Self::Target { 89 | fn inner<#(#lifetimes)*>(#ctx_name: &#ctx_ty, #(#inputs,)*) -> ::verde::Id<#ret_ty> { 90 | let __verde_internal_query_input = #input_type_name { 91 | #(#query_input_names: <#query_input_types as ::std::borrow::ToOwned>::to_owned(&#query_input_names),)* 92 | }; 93 | let __verde_internal_ctx = #ctx_name.start_query::<#name>(__verde_internal_query_input); 94 | let #ctx_name = &__verde_internal_ctx; 95 | __verde_internal_ctx.end_query::<#name>(move || #block) 96 | } 97 | 98 | const F: #fn_ty = inner; 99 | &F 100 | } 101 | } 102 | 103 | impl ::verde::internal::Query for #name { 104 | type Input = #input_type_name; 105 | type Output = #ret_ty; 106 | } 107 | 108 | impl ::verde::internal::Storable for #name { 109 | type Storage = ::verde::internal::storage::QueryStorage; 110 | 111 | fn tracked_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedTrackedStorage> { 112 | None 113 | } 114 | 115 | fn query_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedQueryStorage> { 116 | Some(store) 117 | } 118 | 119 | fn pushable_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedPushableStorage> { 120 | None 121 | } 122 | 123 | fn interned_storage(store: &Self::Storage) -> Option<&dyn ::verde::internal::storage::ErasedInternedStorage> { 124 | None 125 | } 126 | } 127 | }) 128 | } 129 | 130 | #[derive(Hash)] 131 | struct Arg { 132 | name: Ident, 133 | ty: Type, 134 | ignore: bool, 135 | } 136 | 137 | impl ToTokens for Arg { 138 | fn to_tokens(&self, tokens: &mut TokenStream) { 139 | let Arg { name, ty, .. } = self; 140 | tokens.extend(quote!(#name: #ty)); 141 | } 142 | } 143 | 144 | struct Query { 145 | vis: Visibility, 146 | name: Ident, 147 | ctx: Arg, 148 | inputs: Vec, 149 | lifetimes: Vec, 150 | block: Box, 151 | } 152 | 153 | fn generate(input: &ItemFn) -> Result { 154 | let vis = input.vis.clone(); 155 | let name = input.sig.ident.clone(); 156 | 157 | let lifetimes = input 158 | .sig 159 | .generics 160 | .params 161 | .iter() 162 | .map(|x| match x { 163 | GenericParam::Lifetime(l) => Ok(l.clone()), 164 | _ => Err(Error::new(x.span(), "query functions cannot be generic")), 165 | }) 166 | .collect::>>()?; 167 | 168 | let mut args = input.sig.inputs.iter().map(|x| match x { 169 | FnArg::Receiver(_) => Err(Error::new(x.span(), "query functions cannot take `self`")), 170 | FnArg::Typed(pat) => match *pat.pat { 171 | Pat::Ident(ref ident) => Ok(Arg { 172 | name: ident.ident.clone(), 173 | ty: *pat.ty.clone(), 174 | ignore: { 175 | let mut iter = pat.attrs.iter(); 176 | let x = iter 177 | .next() 178 | .map(|x| matches!(&x.meta, Meta::Path(p) if p.is_ident("ignore"))) 179 | .unwrap_or(false); 180 | if iter.next().is_some() { 181 | return Err(Error::new(x.span(), "query arguments must have at most one attribute")); 182 | } 183 | x 184 | }, 185 | }), 186 | _ => Err(Error::new(pat.pat.span(), "query arguments must be `ident` patterns")), 187 | }, 188 | }); 189 | 190 | let ctx = args.next().ok_or_else(|| { 191 | Error::new( 192 | input.sig.fn_token.span(), 193 | "query functions must take a `&Ctx` as their first argument", 194 | ) 195 | })??; 196 | let inputs = args.collect::>()?; 197 | let block = input.block.clone(); 198 | 199 | Ok(Query { 200 | vis, 201 | name, 202 | ctx, 203 | inputs, 204 | lifetimes, 205 | block, 206 | }) 207 | } 208 | -------------------------------------------------------------------------------- /compiler/verde/derive/src/database.rs: -------------------------------------------------------------------------------- 1 | use std::num::TryFromIntError; 2 | 3 | use proc_macro2::{Ident, TokenStream}; 4 | use quote::{format_ident, quote}; 5 | use syn::{spanned::Spanned, GenericArgument, Index, ItemStruct, PathArguments, Type, Visibility}; 6 | 7 | use crate::{Error, Result}; 8 | 9 | pub(crate) fn storage(input: ItemStruct) -> Result { 10 | let Storage { vis, name, fields } = generate(input)?; 11 | let field_indices = (0..fields.len()) 12 | .map(|x| { 13 | let x: u16 = x.try_into()?; 14 | Ok(Index::from(x as usize)) 15 | }) 16 | .collect::, _>>() 17 | .map_err(|_: TryFromIntError| Error::new(name.span(), "how do you have more than 65536 fields?"))?; 18 | let derive = if cfg!(feature = "serde") { 19 | quote! { 20 | #[derive(::verde::serde::Serialize, ::verde::serde::Deserialize)] 21 | #[serde(crate = "::verde::serde")] 22 | } 23 | } else { 24 | quote! {} 25 | }; 26 | 27 | Ok(quote! { 28 | #[derive(Default)] 29 | #derive 30 | #vis struct #name( 31 | #(<#fields as ::verde::internal::Storable>::Storage,)* 32 | ); 33 | 34 | impl ::verde::internal::Storage for #name { 35 | fn init_routing(table: &mut ::verde::internal::storage::RouteBuilder) { 36 | #(table.add::<#fields>(#field_indices);)* 37 | } 38 | 39 | fn tracked_storage(&self, index: u16) -> Option<&dyn ::verde::internal::storage::ErasedTrackedStorage> { 40 | match index { 41 | #(#field_indices => <#fields as ::verde::internal::Storable>::tracked_storage(&self.#field_indices),)* 42 | _ => panic!("invalid route index"), 43 | } 44 | } 45 | 46 | fn query_storage(&self, index: u16) -> Option<&dyn ::verde::internal::storage::ErasedQueryStorage> { 47 | match index { 48 | #(#field_indices => <#fields as ::verde::internal::Storable>::query_storage(&self.#field_indices),)* 49 | _ => panic!("invalid route index"), 50 | } 51 | } 52 | 53 | fn pushable_storage(&self, index: u16) -> Option<&dyn ::verde::internal::storage::ErasedPushableStorage> { 54 | match index { 55 | #(#field_indices => <#fields as ::verde::internal::Storable>::pushable_storage(&self.#field_indices),)* 56 | _ => panic!("invalid route index"), 57 | } 58 | } 59 | 60 | fn interned_storage(&self, index: u16) -> Option<&dyn ::verde::internal::storage::ErasedInternedStorage> { 61 | match index { 62 | #(#field_indices => <#fields as ::verde::internal::Storable>::interned_storage(&self.#field_indices),)* 63 | _ => panic!("invalid route index"), 64 | } 65 | } 66 | } 67 | 68 | #( 69 | impl ::verde::internal::StorageOf<#fields> for #name { 70 | fn storage_index(&self) -> u16 { 71 | #field_indices 72 | } 73 | } 74 | )* 75 | }) 76 | } 77 | 78 | pub(crate) fn database(input: ItemStruct) -> Result { 79 | let Storage { vis, name, fields } = generate(input)?; 80 | let ty_idents = fields.iter().map(ty_to_ident).collect::>>()?; 81 | let field_names = ty_idents 82 | .iter() 83 | .map(|field| format_ident!("__verde_internal_storage_{}", field)) 84 | .collect::>(); 85 | let field_indices = (1usize..) 86 | .take(fields.len()) 87 | .map(|x| x.try_into()) 88 | .collect::, _>>() 89 | .map_err(|_| Error::new(name.span(), "how do you have more than 65536 fields?"))?; 90 | 91 | let derive = if cfg!(feature = "serde") { 92 | quote! { 93 | #[derive(::verde::serde::Serialize, ::verde::serde::Deserialize)] 94 | #[serde(crate = "::verde::serde")] 95 | } 96 | } else { 97 | quote! {} 98 | }; 99 | let skip = if cfg!(feature = "serde") { 100 | quote! { #[serde(skip, default = "__verde_internal_generate_routing_table")] } 101 | } else { 102 | quote! {} 103 | }; 104 | 105 | Ok(quote! { 106 | fn __verde_internal_generate_routing_table() -> ::verde::internal::storage::RoutingTable { 107 | ::verde::internal::storage::RoutingTable::generate_for_db::<#name>() 108 | } 109 | 110 | #derive 111 | #vis struct #name { 112 | #skip 113 | __verde_internal_routing_table: ::verde::internal::storage::RoutingTable, 114 | #(#field_names: #fields,)* 115 | } 116 | 117 | impl ::std::default::Default for #name { 118 | fn default() -> Self { 119 | Self { 120 | __verde_internal_routing_table: ::verde::internal::storage::RoutingTable::generate_for_db::<#name>(), 121 | #(#field_names: #fields::default()),* 122 | } 123 | } 124 | } 125 | 126 | impl ::verde::Db for #name { 127 | fn init_routing(table: &mut ::verde::internal::storage::RoutingTableBuilder) { 128 | #(<#fields as ::verde::internal::Storage>::init_routing(&mut table.start_route(#field_indices));)* 129 | } 130 | 131 | fn routing_table(&self) -> &::verde::internal::storage::RoutingTable { 132 | &self.__verde_internal_routing_table 133 | } 134 | 135 | fn storage_struct(&self, storage: u16) -> &dyn ::verde::internal::Storage { 136 | match storage { 137 | #(#field_indices => &self.#field_names),*, 138 | _ => panic!("invalid route storage"), 139 | } 140 | } 141 | } 142 | 143 | #( 144 | impl ::verde::internal::DbWith<#fields> for #name { 145 | fn storage_struct_index(&self) -> u16 { 146 | #field_indices 147 | } 148 | } 149 | )* 150 | }) 151 | } 152 | 153 | struct Storage { 154 | vis: Visibility, 155 | name: Ident, 156 | fields: Vec, 157 | } 158 | 159 | fn generate(input: ItemStruct) -> Result { 160 | if !input.generics.params.is_empty() { 161 | return Err(Error::new( 162 | input.generics.span(), 163 | "`storage` does not support generic types", 164 | )); 165 | } 166 | 167 | let vis = input.vis; 168 | let name = input.ident; 169 | let fields = match input.fields { 170 | syn::Fields::Named(_) => { 171 | return Err(Error::new( 172 | input.fields.span(), 173 | "`storage` does not support named fields", 174 | )); 175 | }, 176 | syn::Fields::Unnamed(x) => x.unnamed.into_iter().map(|x| x.ty).collect(), 177 | syn::Fields::Unit => { 178 | return Err(Error::new( 179 | input.fields.span(), 180 | "`storage` does not support unit structs", 181 | )); 182 | }, 183 | }; 184 | 185 | Ok(Storage { vis, name, fields }) 186 | } 187 | 188 | fn ty_to_ident(x: &Type) -> Result { 189 | let ret = match x { 190 | Type::Path(x) => x 191 | .path 192 | .segments 193 | .iter() 194 | .map(|x| { 195 | let ident = x.ident.to_string(); 196 | match &x.arguments { 197 | PathArguments::None => Ok(ident), 198 | PathArguments::AngleBracketed(x) => Ok(x 199 | .args 200 | .iter() 201 | .map(|x| match x { 202 | GenericArgument::Type(x) => ty_to_ident(x), 203 | _ => Err(Error::new(x.span(), "`database` does not non-type generics")), 204 | }) 205 | .collect::>>()? 206 | .join("_")), 207 | PathArguments::Parenthesized(_) => { 208 | Err(Error::new(x.span(), "`database` does not support function traits")) 209 | }, 210 | } 211 | }) 212 | .collect::>>()? 213 | .join("_"), 214 | _ => return Err(Error::new(x.span(), "`database` does not support non-path types")), 215 | }; 216 | Ok(ret) 217 | } 218 | -------------------------------------------------------------------------------- /compiler/verde/src/internal/storage/routing.rs: -------------------------------------------------------------------------------- 1 | //! ### Storage routing 2 | //! 3 | //! The database has two tiers of storing data: 4 | //! - The database itself, storing storage structs. 5 | //! - Storage structs, which store the actual type storages. 6 | //! 7 | //! This allows for multi-crate compilation, where each crate exposes a storage struct, with only the main driver crate 8 | //! using the database. 9 | 10 | /// A type-erased route through the database storage. 11 | /// Uniquely identifies the storage for a particular [`Tracked`](crate::Tracked) type. 12 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 13 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 14 | pub struct Route { 15 | /// The index of the storage struct in the database. 16 | /// A storage of `0` is reserved for fake queries generated by `Db::set_input`. 17 | pub storage: u16, 18 | /// The index of the type storage in the storage struct. 19 | pub index: u16, 20 | } 21 | 22 | impl Route { 23 | pub(crate) fn input() -> Self { Self { storage: 0, index: 1 } } 24 | } 25 | 26 | #[cfg(not(any(feature = "test", test)))] 27 | mod normal { 28 | use std::any::TypeId; 29 | 30 | use rustc_hash::FxHashMap; 31 | 32 | use crate::{ 33 | internal::{storage::Route, Storable}, 34 | Db, 35 | }; 36 | 37 | /// A static table that maps [`TypeId`]s to [`Route`]s, generated at database initialization. 38 | /// This is required because `TypeId`s are not guaranteed to be stable across compilations, while `Route`s are. 39 | #[derive(Default)] 40 | pub struct RoutingTable { 41 | routes: FxHashMap, 42 | type_names: FxHashMap, 43 | } 44 | 45 | impl RoutingTable { 46 | pub fn route(&self) -> Route { self.route_for(TypeId::of::(), std::any::type_name::()) } 47 | 48 | pub fn route_for(&self, id: TypeId, name: &'static str) -> Route { 49 | match self.routes.get(&id) { 50 | Some(route) => *route, 51 | None => panic!("Database does not contain `{}`", name), 52 | } 53 | } 54 | 55 | pub fn name(&self, route: Route) -> &str { self.type_names.get(&route).unwrap() } 56 | 57 | pub fn generate_for_db() -> Self { 58 | #[cfg(feature = "debug")] 59 | { 60 | use std::{thread, time::Duration}; 61 | 62 | use parking_lot::deadlock; 63 | 64 | thread::spawn(move || loop { 65 | thread::sleep(Duration::from_secs(2)); 66 | let deadlocks = deadlock::check_deadlock(); 67 | if deadlocks.is_empty() { 68 | continue; 69 | } 70 | 71 | eprintln!("{} deadlocks detected", deadlocks.len()); 72 | for (i, threads) in deadlocks.iter().enumerate() { 73 | eprintln!("Deadlock #{}", i); 74 | for t in threads { 75 | eprintln!("Thread {:#?}", t.thread_id()); 76 | eprintln!("{:#?}", t.backtrace()); 77 | } 78 | eprintln!() 79 | } 80 | std::process::abort(); 81 | }); 82 | } 83 | 84 | let mut builder = RoutingTableBuilder::default(); 85 | T::init_routing(&mut builder); 86 | builder.finish() 87 | } 88 | } 89 | 90 | #[derive(Default)] 91 | pub struct RoutingTableBuilder { 92 | routes: FxHashMap, 93 | type_names: FxHashMap, 94 | } 95 | 96 | impl RoutingTableBuilder { 97 | pub fn start_route(&mut self, storage: u16) -> RouteBuilder { 98 | RouteBuilder { 99 | routes: &mut self.routes, 100 | type_names: &mut self.type_names, 101 | storage, 102 | } 103 | } 104 | 105 | pub fn finish(self) -> RoutingTable { 106 | RoutingTable { 107 | routes: self.routes, 108 | type_names: self.type_names, 109 | } 110 | } 111 | } 112 | 113 | pub struct RouteBuilder<'a> { 114 | routes: &'a mut FxHashMap, 115 | type_names: &'a mut FxHashMap, 116 | storage: u16, 117 | } 118 | 119 | impl RouteBuilder<'_> { 120 | pub fn add(&mut self, index: u16) { 121 | let route = Route { 122 | storage: self.storage, 123 | index, 124 | }; 125 | let id = TypeId::of::(); 126 | 127 | if self.routes.insert(id, route).is_some() { 128 | panic!("Duplicate route for type `{}`", std::any::type_name::()); 129 | } 130 | self.type_names.insert(route, std::any::type_name::()); 131 | } 132 | } 133 | } 134 | 135 | #[cfg(any(feature = "test", test))] 136 | mod test { 137 | use std::{ 138 | any::TypeId, 139 | sync::atomic::{AtomicU16, Ordering}, 140 | }; 141 | 142 | use parking_lot::{Mutex, RwLock}; 143 | use rustc_hash::FxHashMap; 144 | 145 | use crate::{ 146 | internal::{storage::Route, Storable}, 147 | test::StorageType, 148 | Db, 149 | }; 150 | 151 | type GenFunc = Box (StorageType, u16) + Send>; 152 | 153 | /// A static table that maps [`TypeId`]s to [`Route`]s, generated at database initialization. 154 | /// This is required because `TypeId`s are not guaranteed to be stable across compilations, while `Route`s are. 155 | #[derive(Default)] 156 | pub struct RoutingTable { 157 | routes: RwLock>, 158 | type_names: RwLock>, 159 | dynamic_storage_index: u16, 160 | next_route_index: AtomicU16, 161 | make: Mutex>, 162 | } 163 | 164 | impl RoutingTable { 165 | pub fn route(&self) -> Route 166 | where 167 | StorageType: From<::Storage>, 168 | { 169 | let route = self.route_for(TypeId::of::(), ""); 170 | self.make 171 | .lock() 172 | .push(Box::new(move || (T::Storage::default().into(), route.index))); 173 | route 174 | } 175 | 176 | pub fn route_for(&self, id: TypeId, _: &'static str) -> Route { 177 | *self.routes.write().entry(id).or_insert_with(|| Route { 178 | storage: self.dynamic_storage_index, 179 | index: self.next_route_index.fetch_add(1, Ordering::Relaxed), 180 | }) 181 | } 182 | 183 | pub fn name(&self, route: Route) -> &str { self.type_names.read().get(&route).unwrap() } 184 | 185 | pub fn make(&self) -> Vec { 186 | let mut m = self.make.lock(); 187 | std::mem::take(&mut *m) 188 | } 189 | 190 | pub fn generate_for_db() -> Self { 191 | let mut builder = RoutingTableBuilder::default(); 192 | T::init_routing(&mut builder); 193 | builder.finish() 194 | } 195 | } 196 | 197 | pub struct RoutingTableBuilder { 198 | routes: FxHashMap, 199 | type_names: FxHashMap, 200 | dynamic_storage_index: u16, 201 | } 202 | 203 | impl Default for RoutingTableBuilder { 204 | fn default() -> Self { 205 | Self { 206 | routes: FxHashMap::default(), 207 | type_names: FxHashMap::default(), 208 | dynamic_storage_index: 1, 209 | } 210 | } 211 | } 212 | 213 | impl RoutingTableBuilder { 214 | pub fn start_route(&mut self, storage: u16) -> RouteBuilder { 215 | self.dynamic_storage_index = self.dynamic_storage_index.max(storage + 1); 216 | RouteBuilder { 217 | routes: &mut self.routes, 218 | type_names: &mut self.type_names, 219 | storage, 220 | } 221 | } 222 | 223 | pub fn finish(self) -> RoutingTable { 224 | RoutingTable { 225 | routes: RwLock::new(self.routes), 226 | type_names: RwLock::new(self.type_names), 227 | dynamic_storage_index: self.dynamic_storage_index, 228 | next_route_index: AtomicU16::new(0), 229 | make: Mutex::new(Vec::new()), 230 | } 231 | } 232 | } 233 | 234 | pub struct RouteBuilder<'a> { 235 | routes: &'a mut FxHashMap, 236 | type_names: &'a mut FxHashMap, 237 | storage: u16, 238 | } 239 | 240 | impl RouteBuilder<'_> { 241 | pub fn add(&mut self, index: u16) { 242 | let route = Route { 243 | storage: self.storage, 244 | index, 245 | }; 246 | let id = TypeId::of::(); 247 | 248 | if self.routes.insert(id, route).is_some() { 249 | panic!("Duplicate route for type `{}`", std::any::type_name::()); 250 | } 251 | self.type_names.insert(route, std::any::type_name::()); 252 | } 253 | } 254 | } 255 | 256 | #[cfg(not(any(feature = "test", test)))] 257 | pub use normal::*; 258 | #[cfg(any(feature = "test", test))] 259 | pub use test::*; 260 | -------------------------------------------------------------------------------- /compiler/lex/src/token.rs: -------------------------------------------------------------------------------- 1 | use diagnostics::FileSpan; 2 | use logos::Logos; 3 | 4 | #[derive(Clone, Copy, Default)] 5 | pub struct Token { 6 | pub kind: TokenKind, 7 | pub span: FileSpan, 8 | } 9 | 10 | #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash, Logos)] 11 | pub enum TokenKind { 12 | #[regex("true|false")] 13 | BoolLit, 14 | #[regex(r"'(\.|[^'\\])*'")] 15 | CharLit, 16 | #[regex(r"(\d*[.])?\d+")] 17 | FloatLit, 18 | #[regex(r"(\d+)|(0x[0-9a-fA-F]+)|(0b[01]+)", priority = 2)] 19 | IntLit, 20 | #[regex(r#""(\.|[^"\\])*""#)] 21 | StringLit, 22 | #[regex(r"(\p{XID_Start}\p{XID_Continue}*)|(_\p{XID_Continue}+)", priority = 2)] 23 | Ident, 24 | #[token("@")] 25 | At, 26 | #[token("(")] 27 | LParen, 28 | #[token("{")] 29 | LBrace, 30 | #[token("[")] 31 | LBracket, 32 | #[token(")")] 33 | RParen, 34 | #[token("}")] 35 | RBrace, 36 | #[token("]")] 37 | RBracket, 38 | #[token("=")] 39 | Eq, 40 | #[token(".")] 41 | Dot, 42 | #[token(":")] 43 | Colon, 44 | #[token(",")] 45 | Comma, 46 | #[token(";")] 47 | Semi, 48 | #[token("->")] 49 | Arrow, 50 | #[token("=>")] 51 | FatArrow, 52 | #[token("_")] 53 | Underscore, 54 | #[token("||")] 55 | PipePipe, 56 | #[token("&&")] 57 | AmpAmp, 58 | #[token("!")] 59 | Not, 60 | #[token("==")] 61 | EqEq, 62 | #[token("!=")] 63 | Neq, 64 | #[token("<")] 65 | Lt, 66 | #[token(">")] 67 | Gt, 68 | #[token("<=")] 69 | Leq, 70 | #[token(">=")] 71 | Geq, 72 | #[token("+")] 73 | Plus, 74 | #[token("-")] 75 | Minus, 76 | #[token("*")] 77 | Star, 78 | #[token("/")] 79 | Slash, 80 | #[token("%")] 81 | Percent, 82 | #[token("^")] 83 | Caret, 84 | #[token("&")] 85 | Amp, 86 | #[token("|")] 87 | Pipe, 88 | #[token("<<")] 89 | Shl, 90 | #[token(">>")] 91 | Shr, 92 | #[token("+=")] 93 | PlusEq, 94 | #[token("-=")] 95 | MinusEq, 96 | #[token("*=")] 97 | StarEq, 98 | #[token("/=")] 99 | SlashEq, 100 | #[token("%=")] 101 | PercentEq, 102 | #[token("^=")] 103 | CaretEq, 104 | #[token("&=")] 105 | AmpEq, 106 | #[token("|=")] 107 | PipeEq, 108 | #[token("<<=")] 109 | ShlEq, 110 | #[token(">>=")] 111 | ShrEq, 112 | #[regex("[ \t\n\r]+")] 113 | Whitespace, 114 | #[regex("//[^\n]*")] 115 | Comment, 116 | #[token("fn")] 117 | FnKw, 118 | #[token("let")] 119 | LetKw, 120 | #[token("if")] 121 | IfKw, 122 | #[token("else")] 123 | ElseKw, 124 | #[token("while")] 125 | WhileKw, 126 | #[token("for")] 127 | ForKw, 128 | #[token("loop")] 129 | LoopKw, 130 | #[token("in")] 131 | InKw, 132 | #[token("return")] 133 | ReturnKw, 134 | #[token("break")] 135 | BreakKw, 136 | #[token("continue")] 137 | ContinueKw, 138 | #[token("match")] 139 | MatchKw, 140 | #[token("struct")] 141 | StructKw, 142 | #[token("enum")] 143 | EnumKw, 144 | #[token("type")] 145 | TypeKw, 146 | #[token("pub")] 147 | PubKw, 148 | #[token("extern")] 149 | ExternKw, 150 | #[token("static")] 151 | StaticKw, 152 | #[token("import")] 153 | ImportKw, 154 | #[token("as")] 155 | AsKw, 156 | #[token("mut")] 157 | MutKw, 158 | Eof, 159 | #[default] 160 | Error, 161 | } 162 | 163 | #[macro_export] 164 | macro_rules! T { 165 | (bool) => { 166 | $crate::token::TokenKind::BoolLit 167 | }; 168 | (char) => { 169 | $crate::token::TokenKind::CharLit 170 | }; 171 | (float) => { 172 | $crate::token::TokenKind::FloatLit 173 | }; 174 | (int) => { 175 | $crate::token::TokenKind::IntLit 176 | }; 177 | (string) => { 178 | $crate::token::TokenKind::StringLit 179 | }; 180 | (ident) => { 181 | $crate::token::TokenKind::Ident 182 | }; 183 | (op) => { 184 | $crate::token::TokenKind::Operator 185 | }; 186 | (@) => { 187 | $crate::token::TokenKind::At 188 | }; 189 | ('(') => { 190 | $crate::token::TokenKind::LParen 191 | }; 192 | (')') => { 193 | $crate::token::TokenKind::RParen 194 | }; 195 | ('{') => { 196 | $crate::token::TokenKind::LBrace 197 | }; 198 | ('}') => { 199 | $crate::token::TokenKind::RBrace 200 | }; 201 | ('[') => { 202 | $crate::token::TokenKind::LBracket 203 | }; 204 | (']') => { 205 | $crate::token::TokenKind::RBracket 206 | }; 207 | (:) => { 208 | $crate::token::TokenKind::Colon 209 | }; 210 | (;) => { 211 | $crate::token::TokenKind::Semi 212 | }; 213 | (,) => { 214 | $crate::token::TokenKind::Comma 215 | }; 216 | (->) => { 217 | $crate::token::TokenKind::Arrow 218 | }; 219 | (=>) => { 220 | $crate::token::TokenKind::FatArrow 221 | }; 222 | (=) => { 223 | $crate::token::TokenKind::Eq 224 | }; 225 | (.) => { 226 | $crate::token::TokenKind::Dot 227 | }; 228 | (||) => { 229 | $crate::token::TokenKind::PipePipe 230 | }; 231 | (&&) => { 232 | $crate::token::TokenKind::AmpAmp 233 | }; 234 | (!) => { 235 | $crate::token::TokenKind::Not 236 | }; 237 | (==) => { 238 | $crate::token::TokenKind::EqEq 239 | }; 240 | (!=) => { 241 | $crate::token::TokenKind::Neq 242 | }; 243 | (<) => { 244 | $crate::token::TokenKind::Lt 245 | }; 246 | (>) => { 247 | $crate::token::TokenKind::Gt 248 | }; 249 | (<=) => { 250 | $crate::token::TokenKind::Leq 251 | }; 252 | (>=) => { 253 | $crate::token::TokenKind::Geq 254 | }; 255 | (+) => { 256 | $crate::token::TokenKind::Plus 257 | }; 258 | (-) => { 259 | $crate::token::TokenKind::Minus 260 | }; 261 | (*) => { 262 | $crate::token::TokenKind::Star 263 | }; 264 | (/) => { 265 | $crate::token::TokenKind::Slash 266 | }; 267 | (%) => { 268 | $crate::token::TokenKind::Percent 269 | }; 270 | (^) => { 271 | $crate::token::TokenKind::Caret 272 | }; 273 | (&) => { 274 | $crate::token::TokenKind::Amp 275 | }; 276 | (|) => { 277 | $crate::token::TokenKind::Pipe 278 | }; 279 | (<<) => { 280 | $crate::token::TokenKind::Shl 281 | }; 282 | (>>) => { 283 | $crate::token::TokenKind::Shr 284 | }; 285 | (+=) => { 286 | $crate::token::TokenKind::PlusEq 287 | }; 288 | (-=) => { 289 | $crate::token::TokenKind::MinusEq 290 | }; 291 | (*=) => { 292 | $crate::token::TokenKind::StarEq 293 | }; 294 | (/=) => { 295 | $crate::token::TokenKind::SlashEq 296 | }; 297 | (%=) => { 298 | $crate::token::TokenKind::PercentEq 299 | }; 300 | (^=) => { 301 | $crate::token::TokenKind::CaretEq 302 | }; 303 | (&=) => { 304 | $crate::token::TokenKind::AmpEq 305 | }; 306 | (|=) => { 307 | $crate::token::TokenKind::PipeEq 308 | }; 309 | (<<=) => { 310 | $crate::token::TokenKind::ShlEq 311 | }; 312 | (>>=) => { 313 | $crate::token::TokenKind::ShrEq 314 | }; 315 | (_) => { 316 | $crate::token::TokenKind::Underscore 317 | }; 318 | (err) => { 319 | $crate::token::TokenKind::Error 320 | }; 321 | (ws) => { 322 | $crate::token::TokenKind::Whitespace 323 | }; 324 | (comment) => { 325 | $crate::token::TokenKind::Comment 326 | }; 327 | (fn) => { 328 | $crate::token::TokenKind::FnKw 329 | }; 330 | (let) => { 331 | $crate::token::TokenKind::LetKw 332 | }; 333 | (if) => { 334 | $crate::token::TokenKind::IfKw 335 | }; 336 | (else) => { 337 | $crate::token::TokenKind::ElseKw 338 | }; 339 | (while) => { 340 | $crate::token::TokenKind::WhileKw 341 | }; 342 | (for) => { 343 | $crate::token::TokenKind::ForKw 344 | }; 345 | (loop) => { 346 | $crate::token::TokenKind::LoopKw 347 | }; 348 | (in) => { 349 | $crate::token::TokenKind::InKw 350 | }; 351 | (return) => { 352 | $crate::token::TokenKind::ReturnKw 353 | }; 354 | (break) => { 355 | $crate::token::TokenKind::BreakKw 356 | }; 357 | (continue) => { 358 | $crate::token::TokenKind::ContinueKw 359 | }; 360 | (match) => { 361 | $crate::token::TokenKind::MatchKw 362 | }; 363 | (struct) => { 364 | $crate::token::TokenKind::StructKw 365 | }; 366 | (enum) => { 367 | $crate::token::TokenKind::EnumKw 368 | }; 369 | (type) => { 370 | $crate::token::TokenKind::TypeKw 371 | }; 372 | (pub) => { 373 | $crate::token::TokenKind::PubKw 374 | }; 375 | (extern) => { 376 | $crate::token::TokenKind::ExternKw 377 | }; 378 | (static) => { 379 | $crate::token::TokenKind::StaticKw 380 | }; 381 | (import) => { 382 | $crate::token::TokenKind::ImportKw 383 | }; 384 | (as) => { 385 | $crate::token::TokenKind::AsKw 386 | }; 387 | (mut) => { 388 | $crate::token::TokenKind::MutKw 389 | }; 390 | (eof) => { 391 | $crate::token::TokenKind::Eof 392 | }; 393 | } 394 | -------------------------------------------------------------------------------- /compiler/hir/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | use arena::{Arena, Ix}; 4 | use diagnostics::Diagnostic; 5 | use ident::AbsPath; 6 | pub use lang_item::LangItem; 7 | use syntax::{ast as a, token::StringLit}; 8 | use text::Text; 9 | use verde::{storage, Id, Tracked}; 10 | 11 | use crate::ast::{AstId, ErasedAstId}; 12 | 13 | pub mod ast; 14 | pub mod ident; 15 | pub mod lang_item; 16 | 17 | #[storage] 18 | pub struct Storage( 19 | AbsPath, 20 | Item, 21 | ItemDiagnostic, 22 | lang_item::LangItemMap, 23 | lang_item::build_lang_item_map, 24 | ); 25 | 26 | pub type ItemDiagnostic = Diagnostic; 27 | 28 | #[derive(Copy, Clone, PartialEq, Eq, Debug)] 29 | pub struct Name { 30 | pub name: Text, 31 | pub id: AstId, 32 | } 33 | 34 | #[derive(Copy, Clone, PartialEq, Eq)] 35 | pub enum AttrKind { 36 | LangItem(LangItem), 37 | } 38 | 39 | #[derive(Copy, Clone, PartialEq, Eq)] 40 | pub struct Attr { 41 | pub kind: AttrKind, 42 | pub id: AstId, 43 | } 44 | 45 | #[derive(Tracked, Clone, PartialEq, Eq)] 46 | pub struct Item { 47 | #[id] 48 | pub path: Id, 49 | pub name: Name, 50 | pub attrs: Vec, 51 | pub exprs: Arena, 52 | pub types: Arena, 53 | pub locals: Arena, 54 | pub kind: ItemKind, 55 | } 56 | 57 | #[derive(Clone, PartialEq, Eq)] 58 | pub enum ItemKind { 59 | Struct(Struct), 60 | Enum(Enum), 61 | Fn(Fn), 62 | TypeAlias(TypeAlias), 63 | Static(Static), 64 | } 65 | 66 | impl Debug for ItemKind { 67 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 68 | match self { 69 | ItemKind::Struct(_) => write!(f, "struct"), 70 | ItemKind::Enum(_) => write!(f, "enum"), 71 | ItemKind::Fn(_) => write!(f, "fn"), 72 | ItemKind::TypeAlias(_) => write!(f, "type alias"), 73 | ItemKind::Static(_) => write!(f, "static"), 74 | } 75 | } 76 | } 77 | 78 | #[derive(Clone, PartialEq, Eq)] 79 | pub struct Fn { 80 | pub abi: Option, 81 | pub name: Name, 82 | pub params: Arena, 83 | pub ret: Option>, 84 | pub body: Option, 85 | } 86 | 87 | #[derive(Clone, PartialEq, Eq)] 88 | pub struct Param { 89 | pub name: Name, 90 | pub ty: Ix, 91 | pub id: AstId, 92 | } 93 | 94 | #[derive(Clone, PartialEq, Eq)] 95 | pub struct AbiDecl { 96 | pub abi: &'static str, 97 | pub id: AstId, 98 | } 99 | 100 | #[derive(Clone, PartialEq, Eq)] 101 | pub struct Abi { 102 | pub abi: Option, 103 | pub id: AstId, 104 | } 105 | 106 | #[derive(Clone, PartialEq, Eq)] 107 | pub struct Struct { 108 | pub name: Name, 109 | pub fields: Arena, 110 | } 111 | 112 | #[derive(Clone, PartialEq, Eq)] 113 | pub struct Enum { 114 | pub name: Name, 115 | pub variants: Arena, 116 | } 117 | 118 | #[derive(Clone, PartialEq, Eq)] 119 | pub struct Variant(pub Name); 120 | 121 | #[derive(Clone, PartialEq, Eq)] 122 | pub struct TypeAlias { 123 | pub name: Name, 124 | pub ty: Ix, 125 | } 126 | 127 | #[derive(Copy, Clone, PartialEq, Eq)] 128 | pub struct Static { 129 | pub name: Name, 130 | pub ty: Ix, 131 | pub init: Ix, 132 | } 133 | 134 | #[derive(Clone, PartialEq, Eq)] 135 | pub struct Type { 136 | pub kind: TypeKind, 137 | pub id: AstId, 138 | } 139 | 140 | #[derive(Clone, PartialEq, Eq)] 141 | pub enum TypeKind { 142 | Array(ArrayType), 143 | Fn(FnType), 144 | Infer, 145 | Struct(Id), 146 | Enum(Id), 147 | Alias(Id), 148 | Ptr(PtrType), 149 | Error, 150 | } 151 | 152 | impl Debug for TypeKind { 153 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 154 | match self { 155 | TypeKind::Array(_) => write!(f, "array"), 156 | TypeKind::Fn(_) => write!(f, "fn"), 157 | TypeKind::Infer => write!(f, ""), 158 | TypeKind::Struct(_) => write!(f, "struct"), 159 | TypeKind::Enum(_) => write!(f, "enum"), 160 | TypeKind::Alias(_) => write!(f, "alias"), 161 | TypeKind::Ptr(_) => write!(f, "pointer"), 162 | TypeKind::Error => write!(f, ""), 163 | } 164 | } 165 | } 166 | 167 | #[derive(Copy, Clone, PartialEq, Eq)] 168 | pub struct ArrayType { 169 | pub ty: Ix, 170 | pub len: Ix, 171 | } 172 | 173 | #[derive(Clone, PartialEq, Eq)] 174 | pub struct FnType { 175 | pub abi: Option, 176 | pub params: Vec>, 177 | pub ret: Option>, 178 | } 179 | 180 | #[derive(Copy, Clone, PartialEq, Eq)] 181 | pub struct PtrType { 182 | pub mutable: bool, 183 | pub ty: Ix, 184 | } 185 | 186 | #[derive(Clone, PartialEq, Eq)] 187 | pub struct Expr { 188 | pub kind: ExprKind, 189 | pub id: AstId, 190 | } 191 | 192 | #[derive(Clone, PartialEq, Eq)] 193 | pub enum ExprKind { 194 | Continue, 195 | Array(ArrayExpr), 196 | Let(LetExpr), 197 | Block(Block), 198 | Infix(InfixExpr), 199 | Break(Option>), 200 | Call(CallExpr), 201 | Struct(StructExpr), 202 | Cast(CastExpr), 203 | Field(FieldExpr), 204 | Index(IndexExpr), 205 | Literal(Literal), 206 | Loop(LoopExpr), 207 | Match(MatchExpr), 208 | Fn(Id), 209 | Static(Id), 210 | Local(Ix), 211 | Param(Ix), 212 | EnumVariant(VariantExpr), 213 | Ref(RefExpr), 214 | Prefix(PrefixExpr), 215 | Return(Option>), 216 | Error, 217 | } 218 | 219 | #[derive(Clone, PartialEq, Eq)] 220 | pub struct ArrayExpr { 221 | pub elems: Vec>, 222 | pub repeat: bool, 223 | } 224 | 225 | #[derive(Copy, Clone, PartialEq, Eq)] 226 | pub struct Local { 227 | pub decl: Name, 228 | } 229 | 230 | #[derive(Clone, PartialEq, Eq)] 231 | pub struct LetExpr { 232 | pub name: Name, 233 | pub ty: Option>, 234 | pub init: Option>, 235 | pub local: Ix, 236 | } 237 | 238 | #[derive(Clone, PartialEq, Eq, Default)] 239 | pub struct Block { 240 | pub discard: Vec>, 241 | pub value: Option>, 242 | } 243 | 244 | #[derive(Copy, Clone, PartialEq, Eq)] 245 | pub struct InfixExpr { 246 | pub lhs: Ix, 247 | pub op: InfixOp, 248 | pub op_id: AstId, 249 | pub rhs: Ix, 250 | } 251 | 252 | #[derive(Copy, Clone, PartialEq, Eq, Debug)] 253 | pub enum InfixOp { 254 | Or, 255 | And, 256 | Eq, 257 | NotEq, 258 | Lt, 259 | Leq, 260 | Gt, 261 | Geq, 262 | Add, 263 | Sub, 264 | Mul, 265 | Div, 266 | Mod, 267 | Shl, 268 | Shr, 269 | Xor, 270 | BitOr, 271 | BitAnd, 272 | Assign, 273 | AddAssign, 274 | SubAssign, 275 | MulAssign, 276 | DivAssign, 277 | ModAssign, 278 | ShlAssign, 279 | ShrAssign, 280 | XorAssign, 281 | BitOrAssign, 282 | BitAndAssign, 283 | Error, 284 | } 285 | 286 | #[derive(Clone, PartialEq, Eq)] 287 | pub struct CallExpr { 288 | pub callee: Ix, 289 | pub args: Vec>, 290 | } 291 | 292 | #[derive(Clone, PartialEq, Eq)] 293 | pub struct StructExpr { 294 | pub struct_: Id, 295 | pub args: Vec>, 296 | } 297 | 298 | #[derive(Copy, Clone, PartialEq, Eq)] 299 | pub struct CastExpr { 300 | pub expr: Ix, 301 | pub ty: Ix, 302 | } 303 | 304 | #[derive(Copy, Clone, PartialEq, Eq)] 305 | pub struct FieldExpr { 306 | pub expr: Ix, 307 | pub field: Name, 308 | } 309 | 310 | #[derive(Copy, Clone, PartialEq, Eq)] 311 | pub struct IndexExpr { 312 | pub expr: Ix, 313 | pub index: Ix, 314 | } 315 | 316 | #[derive(Copy, Clone, PartialEq, Eq)] 317 | pub enum Literal { 318 | Bool(bool), 319 | Char(u8), 320 | Float(u64), 321 | Int(i64), 322 | String(&'static str), 323 | } 324 | 325 | #[derive(Clone, PartialEq, Eq)] 326 | pub struct LoopExpr { 327 | pub body: Block, 328 | } 329 | 330 | #[derive(Clone, PartialEq, Eq)] 331 | pub struct MatchExpr { 332 | pub expr: Ix, 333 | pub arms: Vec, 334 | } 335 | 336 | #[derive(Copy, Clone, PartialEq, Eq)] 337 | pub struct MatchArm { 338 | pub value: Ix, 339 | pub then: Ix, 340 | } 341 | 342 | #[derive(Copy, Clone, PartialEq, Eq)] 343 | pub struct VariantExpr { 344 | pub path: Id, 345 | pub variant: Name, 346 | } 347 | 348 | #[derive(Copy, Clone, PartialEq, Eq)] 349 | pub struct RefExpr { 350 | pub expr: Ix, 351 | pub mutable: bool, 352 | } 353 | 354 | #[derive(Copy, Clone, PartialEq, Eq)] 355 | pub struct PrefixExpr { 356 | pub op: PrefixOp, 357 | pub op_id: AstId, 358 | pub expr: Ix, 359 | } 360 | 361 | #[derive(Copy, Clone, PartialEq, Eq, Debug)] 362 | pub enum PrefixOp { 363 | Not, 364 | Neg, 365 | Deref, 366 | Error, 367 | } 368 | -------------------------------------------------------------------------------- /compiler/driver/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Mutex; 2 | 3 | pub use codegen::{target, CodegenOptions}; 4 | use diagnostics::{emit, DiagKind, FileCache, FilePath, FullDiagnostic}; 5 | use hir::{ 6 | ast::AstMap, 7 | ident::{AbsPath, PackageId}, 8 | lang_item::{build_lang_item_map, LangItemMap}, 9 | ItemDiagnostic, 10 | }; 11 | use hir_lower::{ 12 | index::{build_ast_map, build_package_tree, generate_index, Index, ModuleMap, PackageTree, TempMap}, 13 | lower::{build_hir_sea, lower_to_hir}, 14 | prelude::{get_prelude, Prelude}, 15 | Module, 16 | TempDiagnostic, 17 | VisiblePackages, 18 | }; 19 | use parse::ParseContext; 20 | use rayon::prelude::*; 21 | use rustc_hash::FxHashMap; 22 | use text::Text; 23 | use tracing::{span, Level, Span}; 24 | use tycheck::type_check; 25 | use verde::{db, Db, Id}; 26 | 27 | #[db] 28 | pub struct Database(hir::Storage, hir_lower::Storage, thir::Storage, tycheck::Storage); 29 | 30 | pub struct SourceFile { 31 | pub path: FilePath, 32 | pub source: String, 33 | } 34 | 35 | /// The input to the compiler. 36 | pub struct CompileInput { 37 | /// The incremental database. If there was no previous compilation, use `Database::default()`. 38 | pub db: Database, 39 | /// The files to compile. The root file is the first one. The order of the others doesn't matter. 40 | pub files: Vec, 41 | /// Options controlling code generation. 42 | pub codegen_options: CodegenOptions, 43 | /// Output HIR. 44 | pub emit_hir: bool, 45 | /// If `None`, only check the code, emitting any errors. 46 | pub output: Option, 47 | } 48 | 49 | /// The output of the compilation 50 | pub struct CompileOutput { 51 | /// The incremental database to cache between compilations. 52 | pub db: Database, 53 | } 54 | 55 | pub fn compile(input: CompileInput) -> CompileOutput { 56 | let s = span!(Level::INFO, "compile"); 57 | let _e = s.enter(); 58 | 59 | assert!(!input.files.is_empty(), "no files to compile"); 60 | 61 | let dbc = input.db; 62 | let db = &dbc as &(dyn Db + Send + Sync); 63 | 64 | let (modules, cache) = parse(&s, db, input.files); 65 | let (indices, maps) = index_gen(&s, db, &modules); 66 | let (tree, prelude, packages) = packages(&s, db, &indices); 67 | let (items, lang_item_map, amap, tmap) = hir(&s, db, &modules, maps, tree, prelude, packages); 68 | let thir = tyck(&s, db, items, lang_item_map); 69 | 70 | if input.emit_hir { 71 | for (p, &hir) in thir.hir.iter() { 72 | let thir = thir.items[p]; 73 | println!("{}", pretty::pretty_print(db, hir, thir)); 74 | } 75 | } 76 | 77 | if should_codegen(db) { 78 | if let Some(path) = input.output { 79 | let package = codegen(&s, db, &input.codegen_options, &thir); 80 | std::fs::write(path.path(), package).unwrap(); 81 | } 82 | } 83 | 84 | emit_all(db, &cache, &amap, &tmap); 85 | 86 | CompileOutput { db: dbc } 87 | } 88 | 89 | fn parse(compile: &Span, db: &(dyn Db + Send + Sync), files: Vec) -> (Vec>, FileCache) { 90 | let f = files.get(0).expect("No source files provided"); 91 | let parser = Parser::new(db, f.path); 92 | let modules: Vec<_> = files 93 | .into_par_iter() 94 | .map(|x| { 95 | let s = span!(parent: compile, Level::TRACE, "thread"); 96 | let _e = s.enter(); 97 | parser.parse(x) 98 | }) 99 | .collect(); 100 | let cache = parser.finish(); 101 | (modules, cache) 102 | } 103 | 104 | fn index_gen(compile: &Span, db: &(dyn Db + Send + Sync), modules: &[Id]) -> (Vec>, Vec) { 105 | let mut indices = Vec::with_capacity(modules.len()); 106 | let mut maps = Vec::with_capacity(modules.len()); 107 | modules 108 | .par_iter() 109 | .map(|&x| { 110 | db.execute(|ctx| { 111 | let s = span!(parent: compile, Level::TRACE, "thread"); 112 | let _e = s.enter(); 113 | 114 | let m = db.get(x); 115 | let mut map = ModuleMap::new(m.path, m.file); 116 | drop(m); 117 | 118 | let index = generate_index(ctx, x, &mut map); 119 | (index, map) 120 | }) 121 | }) 122 | .unzip_into_vecs(&mut indices, &mut maps); 123 | 124 | (indices, maps) 125 | } 126 | 127 | fn packages( 128 | _: &Span, db: &(dyn Db + Send + Sync), indices: &[Id], 129 | ) -> (Id, Id, Id) { 130 | let tree = db.execute(|ctx| build_package_tree(ctx, &indices)); 131 | let prelude = db.execute(|ctx| get_prelude(ctx, tree)); 132 | let packages = db.set_input(VisiblePackages { 133 | package: PackageId(0), 134 | packages: { 135 | let mut p = FxHashMap::default(); 136 | p.insert(Text::new("root"), PackageId(0)); 137 | p 138 | }, 139 | }); 140 | (tree, prelude, packages) 141 | } 142 | 143 | fn hir( 144 | compile: &Span, db: &(dyn Db + Send + Sync), modules: &[Id], mut maps: Vec, 145 | tree: Id, prelude: Id, packages: Id, 146 | ) -> (FxHashMap, Id>, Id, AstMap, TempMap) { 147 | let modules: Vec<_> = modules 148 | .into_par_iter() 149 | .zip(maps.par_iter_mut()) 150 | .map(move |(&x, map)| { 151 | let s = span!(parent: compile, Level::TRACE, "thread"); 152 | let _e = s.enter(); 153 | 154 | db.execute(|ctx| lower_to_hir(ctx, x, tree, prelude, packages, map)) 155 | }) 156 | .collect(); 157 | let (amap, tmap) = build_ast_map(maps); 158 | let items = build_hir_sea(db, modules); 159 | let lang_item_map = db.execute(|ctx| build_lang_item_map(ctx, &items)); 160 | (items, lang_item_map, amap, tmap) 161 | } 162 | 163 | fn tyck( 164 | compile: &Span, db: &(dyn Db + Send + Sync), hir: FxHashMap, Id>, 165 | lang_item_map: Id, 166 | ) -> thir::Thir { 167 | let decls: FxHashMap<_, _> = hir 168 | .par_iter() 169 | .map(|(&path, &item)| { 170 | let s = span!(parent: compile, Level::TRACE, "thread"); 171 | let _e = s.enter(); 172 | 173 | ( 174 | path, 175 | db.execute(|ctx| tycheck::decl::type_decl(ctx, item, lang_item_map, &hir)), 176 | ) 177 | }) 178 | .collect(); 179 | let items = hir 180 | .par_iter() 181 | .map(|(&path, &item)| { 182 | let s = span!(parent: compile, Level::TRACE, "thread"); 183 | let _e = s.enter(); 184 | 185 | ( 186 | path, 187 | db.execute(|ctx| type_check(ctx, item, decls[&path], lang_item_map, &hir, &decls)), 188 | ) 189 | }) 190 | .collect(); 191 | thir::Thir { hir, decls, items } 192 | } 193 | 194 | fn should_codegen(db: &dyn Db) -> bool { 195 | let r = db.get_all::().any(|x| x.kind == DiagKind::Error) 196 | || db.get_all::().any(|x| x.kind == DiagKind::Error) 197 | || db.get_all::().any(|x| x.kind == DiagKind::Error); 198 | !r 199 | } 200 | 201 | fn codegen(compile: &Span, db: &(dyn Db + Send + Sync), options: &CodegenOptions, thir: &thir::Thir) -> Vec { 202 | let s = span!(Level::DEBUG, "codegen"); 203 | let _e = s.enter(); 204 | 205 | let decls = codegen::declare(db, options, thir); 206 | thir.hir.par_iter().for_each(|(id, &hir)| { 207 | let s = span!(parent: compile, Level::TRACE, "thread"); 208 | let _e = s.enter(); 209 | 210 | codegen::item(db, options, &decls, thir, hir, thir.items[id]) 211 | }); 212 | decls.finish() 213 | } 214 | 215 | fn emit_all(db: &(dyn Db + Send + Sync), cache: &FileCache, amap: &AstMap, tmap: &TempMap) { 216 | emit(db.get_all::().cloned(), &cache, &()); 217 | emit(db.get_all::().cloned(), &cache, &tmap); 218 | emit(db.get_all::().cloned(), &cache, &amap); 219 | } 220 | 221 | struct Parser<'a> { 222 | db: &'a (dyn Db + Send + Sync), 223 | root: FilePath, 224 | cache: Mutex, 225 | } 226 | 227 | impl<'a> Parser<'a> { 228 | fn new(db: &'a (dyn Db + Send + Sync), root: FilePath) -> Self { 229 | Self { 230 | db, 231 | root, 232 | cache: Mutex::new(FileCache::new()), 233 | } 234 | } 235 | 236 | fn parse(&self, file: SourceFile) -> Id { 237 | let s = span !(Level::DEBUG, "parse", path = %file.path); 238 | let _e = s.enter(); 239 | 240 | let (ast, diags) = ParseContext::new().parse_file(&file.source); 241 | { 242 | let mut cache = self.cache.lock().unwrap(); 243 | cache.set_file(file.path, file.source); 244 | emit(diags, &cache, &file.path); 245 | } 246 | 247 | self.db 248 | .set_input(Module::from_file(self.db, self.root, ast, file.path, PackageId(0))) 249 | } 250 | 251 | fn finish(self) -> FileCache { self.cache.into_inner().unwrap() } 252 | } 253 | -------------------------------------------------------------------------------- /compiler/parse/src/parse/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::hash_map::Entry, fmt::Write}; 2 | 3 | use diagnostics::{FileDiagnostic, Span}; 4 | use lex::{token::TokenKind, T}; 5 | use rustc_hash::FxHashMap; 6 | use syntax::builder::{TreeBuilder, TreeBuilderContext}; 7 | 8 | use crate::{ 9 | api::Api, 10 | parse::{ 11 | recovery::{ParseRule, Recovery, Rule}, 12 | rules::Item, 13 | }, 14 | }; 15 | 16 | mod recovery; 17 | mod rules; 18 | 19 | #[derive(Copy, Clone, Debug)] 20 | struct RuleData { 21 | rule: ParseRule, 22 | /// If we're still trying to parse the leading token of the rule. 23 | beginning: bool, 24 | /// CST node depth at the beginning of the rule. 25 | node_depth: usize, 26 | } 27 | 28 | impl PartialEq for RuleData { 29 | fn eq(&self, other: &Self) -> bool { self.rule == other.rule && self.node_depth == other.node_depth } 30 | } 31 | 32 | pub struct Parser<'c, 's> { 33 | api: Api<'c, 's>, 34 | diags: Vec, 35 | rule_stack: Vec, 36 | } 37 | 38 | impl<'c, 's> Parser<'c, 's> { 39 | pub fn new(source: &'s str, ctx: &'c mut TreeBuilderContext) -> Self { 40 | Self { 41 | api: Api::new(source, ctx), 42 | diags: Vec::new(), 43 | rule_stack: Vec::new(), 44 | } 45 | } 46 | 47 | pub fn parse(mut self) -> (TreeBuilder<'c>, Vec) { 48 | self.parse_inner(); 49 | (self.api.finish(), self.diags) 50 | } 51 | 52 | fn parse_inner(&mut self) { 53 | let _ = self.repeat(|p| { 54 | while !p.is_empty() { 55 | let _ = p.run(Item); 56 | } 57 | 58 | Recovery::ok() 59 | }); 60 | } 61 | } 62 | 63 | impl Parser<'_, '_> { 64 | fn run(&mut self, rule: T) -> Recovery { 65 | let data = RuleData { 66 | rule: rule.rule(), 67 | beginning: true, 68 | node_depth: self.api.node_depth(), 69 | }; 70 | self.rule_stack.push(data); 71 | let recovery = rule.parse(self); 72 | assert_eq!(data, self.rule_stack.pop().unwrap()); 73 | recovery 74 | } 75 | 76 | fn repeat(&mut self, f: impl FnOnce(&mut Self) -> Recovery) -> Recovery { 77 | let data = RuleData { 78 | rule: ParseRule::Repeat, 79 | beginning: false, 80 | node_depth: self.api.node_depth(), 81 | }; 82 | self.rule_stack.push(data); 83 | 84 | let ret = f(self); 85 | assert_eq!(data, self.rule_stack.pop().unwrap()); 86 | ret 87 | } 88 | 89 | fn is_empty(&self) -> bool { matches!(self.api.peek().kind, T![eof]) } 90 | 91 | /// Expect a token of a certain kind, and recover if it's not found. 92 | fn expect(&mut self, kind: TokenKind) -> Recovery { 93 | let token = self.api.peek(); 94 | 95 | if token.kind != kind { 96 | self.diags.push( 97 | token 98 | .span 99 | .error({ 100 | let last = self.rule_stack.last_mut().expect("rule stack is empty"); 101 | if last.beginning { 102 | last.beginning = false; 103 | format!("expected {}", last.rule) 104 | } else { 105 | format!("expected {}", SyntaxKind::from(kind)) 106 | } 107 | }) 108 | .label(token.span.label(format!("found {}", SyntaxKind::from(token.kind)))), 109 | ); 110 | self.recover() 111 | } else { 112 | self.api.bump(); 113 | Recovery::ok() 114 | } 115 | } 116 | 117 | fn comma_sep_list(&mut self, end: TokenKind, rule: impl Rule) -> Recovery { 118 | self.rule_stack.push(RuleData { 119 | rule: ParseRule::List, 120 | beginning: false, 121 | node_depth: self.api.node_depth(), 122 | }); 123 | let ret = self.comma_sep_list_inner(end, rule); 124 | self.rule_stack.pop(); 125 | ret 126 | } 127 | 128 | fn comma_sep_list_inner(&mut self, end: TokenKind, rule: impl Rule) -> Recovery { 129 | let mut end_comma = true; 130 | loop { 131 | let next = self.api.peek(); 132 | if next.kind == end { 133 | break; 134 | } 135 | 136 | if !end_comma { 137 | self.diags.push( 138 | next.span 139 | .error(format!("expected `,` or {}", SyntaxKind::from(end))) 140 | .label(next.span.label(format!("found {}", SyntaxKind::from(next.kind)))), 141 | ); 142 | 143 | p!(self.recover()); 144 | } 145 | 146 | p!(self.run(rule)); 147 | 148 | let next = self.api.peek(); 149 | if next.kind == T![,] { 150 | self.api.bump(); 151 | end_comma = true; 152 | } else { 153 | end_comma = false; 154 | } 155 | } 156 | Recovery::ok() 157 | } 158 | 159 | fn recover(&mut self) -> Recovery { 160 | self.api.start_node(SyntaxKind::Error); 161 | 162 | let map = self.generate_recovery_map(); 163 | 164 | let ret = loop { 165 | let curr = self.api.peek(); 166 | if let Some(x) = map.get(&curr.kind) { 167 | if x.consume { 168 | self.api.bump(); 169 | } 170 | break x.recovery; 171 | } 172 | 173 | match curr.kind { 174 | T!['('] => { 175 | while !matches!(self.api.peek().kind, T![')'] | T![eof]) { 176 | self.api.bump(); 177 | } 178 | }, 179 | T!['{'] => { 180 | while !matches!(self.api.peek().kind, T!['}'] | T![eof]) { 181 | self.api.bump(); 182 | } 183 | }, 184 | T!['['] => { 185 | while !matches!(self.api.peek().kind, T![']'] | T![eof]) { 186 | self.api.bump(); 187 | } 188 | }, 189 | T![eof] => break Recovery::to(0), 190 | _ => self.api.bump(), 191 | } 192 | }; 193 | 194 | self.api.finish_node(); 195 | 196 | ret 197 | } 198 | 199 | fn generate_recovery_map(&mut self) -> FxHashMap { 200 | let mut map: FxHashMap = FxHashMap::default(); 201 | let mut insert = |kind: TokenKind, target: RecoveryTarget| match map.entry(kind) { 202 | Entry::Occupied(mut o) => { 203 | if o.get().recovery.get() < target.recovery.get() { 204 | o.insert(target); 205 | } 206 | }, 207 | Entry::Vacant(v) => { 208 | v.insert(target); 209 | }, 210 | }; 211 | 212 | let mut iter = self.rule_stack.windows(2).enumerate().rev(); 213 | while let Some((parent_i, [parent, child, ..])) = iter.next() { 214 | if matches!(parent.rule, ParseRule::Repeat | ParseRule::List) { 215 | if matches!(parent.rule, ParseRule::List) { 216 | insert( 217 | T![,], 218 | RecoveryTarget { 219 | recovery: Recovery::to(parent_i), 220 | consume: false, 221 | }, 222 | ); 223 | } 224 | for &kind in child.rule.start() { 225 | insert( 226 | kind, 227 | RecoveryTarget { 228 | recovery: Recovery::to(parent_i), 229 | consume: false, 230 | }, 231 | ); 232 | } 233 | 234 | iter.next(); 235 | } 236 | 237 | let nearest_repeat_or_list = self.rule_stack[..=parent_i] 238 | .iter() 239 | .enumerate() 240 | .rev() 241 | .find_map(|(i, x)| matches!(x.rule, ParseRule::Repeat | ParseRule::List).then_some(i)); 242 | 243 | for &(kind, consume) in child.rule.end() { 244 | if !consume { 245 | insert( 246 | kind, 247 | RecoveryTarget { 248 | recovery: Recovery::to(nearest_repeat_or_list.expect("no repeat or list")), 249 | consume, 250 | }, 251 | ); 252 | } else { 253 | insert( 254 | kind, 255 | RecoveryTarget { 256 | recovery: Recovery::to(parent_i), 257 | consume, 258 | }, 259 | ); 260 | } 261 | } 262 | } 263 | 264 | map 265 | } 266 | } 267 | 268 | struct RecoveryTarget { 269 | recovery: Recovery, 270 | consume: bool, 271 | } 272 | 273 | fn fmt_kinds(kinds: &[TokenKind]) -> String { 274 | let mut s = String::new(); 275 | for (i, kind) in kinds.iter().copied().enumerate() { 276 | if i != 0 { 277 | s.push_str(", "); 278 | } 279 | write!(s, "{}", SyntaxKind::from(kind)).unwrap(); 280 | } 281 | s 282 | } 283 | 284 | macro_rules! p { 285 | ($self:ident . $f:ident $args:tt) => { 286 | $self . $f $args . check($self)? 287 | }; 288 | 289 | ($f:ident ($self:ident $(, $($args:tt),+)? $(,)?)) => { 290 | $f($self, $($($args)*)?).check($self)? 291 | }; 292 | } 293 | pub(crate) use p; 294 | 295 | macro_rules! select { 296 | ($self:ident, $(T!$kind:tt => $value:expr,)*) => {{ 297 | let tok = $self.api.peek(); 298 | let expected = [$(T!$kind,)*]; 299 | match tok.kind { 300 | $(T!$kind => $value,)* 301 | _ => { 302 | $self.diags.push( 303 | tok.span 304 | .error({ 305 | let last = $self.rule_stack.last_mut().expect("rule stack is empty"); 306 | if last.beginning { 307 | last.beginning = false; 308 | format!("expected {}", last.rule) 309 | } else { 310 | format!("expected one of: {}", crate::parse::fmt_kinds(&expected)) 311 | } 312 | }) 313 | .label(tok.span.label(format!("found {}", SyntaxKind::from(tok.kind)))), 314 | ); 315 | p!($self.recover()); 316 | }, 317 | } 318 | }}; 319 | } 320 | pub(crate) use select; 321 | use syntax::SyntaxKind; 322 | -------------------------------------------------------------------------------- /compiler/syntax/src/generated/kind.rs: -------------------------------------------------------------------------------- 1 | 2 | #![allow(clippy::all)] 3 | // This file is generated by build.rs 4 | // Do not edit 5 | 6 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, cstree :: Syntax)] 7 | #[repr(u32)] 8 | pub enum SyntaxKind { 9 | /// Terminal tokens 10 | Dot, 11 | Ident, 12 | Semi, 13 | At, 14 | FnKw, 15 | LParen, 16 | Comma, 17 | RParen, 18 | Colon, 19 | Arrow, 20 | PubKw, 21 | ExternKw, 22 | StringLit, 23 | StructKw, 24 | LBrace, 25 | RBrace, 26 | EnumKw, 27 | TypeKw, 28 | Eq, 29 | StaticKw, 30 | AsKw, 31 | ImportKw, 32 | LBracket, 33 | RBracket, 34 | Underscore, 35 | Star, 36 | MutKw, 37 | ContinueKw, 38 | LetKw, 39 | PipePipe, 40 | AmpAmp, 41 | EqEq, 42 | Neq, 43 | Leq, 44 | Geq, 45 | Lt, 46 | Gt, 47 | Plus, 48 | Minus, 49 | Slash, 50 | Percent, 51 | Shl, 52 | Shr, 53 | Caret, 54 | Pipe, 55 | Amp, 56 | PlusEq, 57 | SlashEq, 58 | StarEq, 59 | PercentEq, 60 | ShrEq, 61 | ShlEq, 62 | MinusEq, 63 | PipeEq, 64 | AmpEq, 65 | CaretEq, 66 | BreakKw, 67 | ForKw, 68 | InKw, 69 | IfKw, 70 | ElseKw, 71 | BoolLit, 72 | CharLit, 73 | FloatLit, 74 | IntLit, 75 | LoopKw, 76 | WhileKw, 77 | MatchKw, 78 | FatArrow, 79 | Not, 80 | ReturnKw, 81 | Whitespace, 82 | Comment, 83 | Error, 84 | /// Non-terminal nodes 85 | Path, 86 | Name, 87 | File, 88 | Item, 89 | TokenTree, 90 | Attribute, 91 | Visibility, 92 | Fn, 93 | Struct, 94 | Enum, 95 | TypeAlias, 96 | Static, 97 | Import, 98 | Abi, 99 | ParamList, 100 | RetTy, 101 | Block, 102 | Param, 103 | VariantList, 104 | Rename, 105 | ListImport, 106 | RenameImport, 107 | ImportTreeList, 108 | ArrayType, 109 | FnType, 110 | InferType, 111 | PathType, 112 | PtrType, 113 | TyParamList, 114 | SemiExpr, 115 | ArrayExpr, 116 | InfixExpr, 117 | BreakExpr, 118 | CallExpr, 119 | CastExpr, 120 | FieldExpr, 121 | ForExpr, 122 | IfExpr, 123 | IndexExpr, 124 | LoopExpr, 125 | MatchExpr, 126 | ParenExpr, 127 | NameExpr, 128 | PrefixExpr, 129 | RefExpr, 130 | ReturnExpr, 131 | WhileExpr, 132 | LetExpr, 133 | ArrayList, 134 | ArrayRepeat, 135 | ArgList, 136 | MatchArm, 137 | #[doc(hidden)] 138 | Eof, 139 | } 140 | 141 | impl std::fmt::Display for SyntaxKind { 142 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 143 | match self { 144 | Self::Dot => write!(f, "`.`"), 145 | Self::Ident => write!(f, "identifier"), 146 | Self::Semi => write!(f, "`;`"), 147 | Self::At => write!(f, "`@`"), 148 | Self::FnKw => write!(f, "`fn`"), 149 | Self::LParen => write!(f, "`(`"), 150 | Self::Comma => write!(f, "`,`"), 151 | Self::RParen => write!(f, "`)`"), 152 | Self::Colon => write!(f, "`:`"), 153 | Self::Arrow => write!(f, "`->`"), 154 | Self::PubKw => write!(f, "`pub`"), 155 | Self::ExternKw => write!(f, "`extern`"), 156 | Self::StringLit => write!(f, "string"), 157 | Self::StructKw => write!(f, "`struct`"), 158 | Self::LBrace => write!(f, "`{{`"), 159 | Self::RBrace => write!(f, "`}}`"), 160 | Self::EnumKw => write!(f, "`enum`"), 161 | Self::TypeKw => write!(f, "`type`"), 162 | Self::Eq => write!(f, "`=`"), 163 | Self::StaticKw => write!(f, "`static`"), 164 | Self::AsKw => write!(f, "`as`"), 165 | Self::ImportKw => write!(f, "`import`"), 166 | Self::LBracket => write!(f, "`[`"), 167 | Self::RBracket => write!(f, "`]`"), 168 | Self::Underscore => write!(f, "`_`"), 169 | Self::Star => write!(f, "`*`"), 170 | Self::MutKw => write!(f, "`mut`"), 171 | Self::ContinueKw => write!(f, "`continue`"), 172 | Self::LetKw => write!(f, "`let`"), 173 | Self::PipePipe => write!(f, "`||`"), 174 | Self::AmpAmp => write!(f, "`&&`"), 175 | Self::EqEq => write!(f, "`==`"), 176 | Self::Neq => write!(f, "`!=`"), 177 | Self::Leq => write!(f, "`<=`"), 178 | Self::Geq => write!(f, "`>=`"), 179 | Self::Lt => write!(f, "`<`"), 180 | Self::Gt => write!(f, "`>`"), 181 | Self::Plus => write!(f, "`+`"), 182 | Self::Minus => write!(f, "`-`"), 183 | Self::Slash => write!(f, "`/`"), 184 | Self::Percent => write!(f, "`%`"), 185 | Self::Shl => write!(f, "`<<`"), 186 | Self::Shr => write!(f, "`>>`"), 187 | Self::Caret => write!(f, "`^`"), 188 | Self::Pipe => write!(f, "`|`"), 189 | Self::Amp => write!(f, "`&`"), 190 | Self::PlusEq => write!(f, "`+=`"), 191 | Self::SlashEq => write!(f, "`/=`"), 192 | Self::StarEq => write!(f, "`*=`"), 193 | Self::PercentEq => write!(f, "`%=`"), 194 | Self::ShrEq => write!(f, "`>>=`"), 195 | Self::ShlEq => write!(f, "`<<=`"), 196 | Self::MinusEq => write!(f, "`-=`"), 197 | Self::PipeEq => write!(f, "`|=`"), 198 | Self::AmpEq => write!(f, "`&=`"), 199 | Self::CaretEq => write!(f, "`^=`"), 200 | Self::BreakKw => write!(f, "`break`"), 201 | Self::ForKw => write!(f, "`for`"), 202 | Self::InKw => write!(f, "`in`"), 203 | Self::IfKw => write!(f, "`if`"), 204 | Self::ElseKw => write!(f, "`else`"), 205 | Self::BoolLit => write!(f, "boolean"), 206 | Self::CharLit => write!(f, "character"), 207 | Self::FloatLit => write!(f, "floating-point number"), 208 | Self::IntLit => write!(f, "integer"), 209 | Self::LoopKw => write!(f, "`loop`"), 210 | Self::WhileKw => write!(f, "`while`"), 211 | Self::MatchKw => write!(f, "`match`"), 212 | Self::FatArrow => write!(f, "`=>`"), 213 | Self::Not => write!(f, "`!`"), 214 | Self::ReturnKw => write!(f, "`return`"), 215 | Self::Whitespace => write!(f, "whitespace"), 216 | Self::Comment => write!(f, "comment"), 217 | Self::Error => write!(f, "error"), 218 | Self::Path => write!(f, "Path"), 219 | Self::Name => write!(f, "Name"), 220 | Self::File => write!(f, "File"), 221 | Self::Item => write!(f, "Item"), 222 | Self::TokenTree => write!(f, "TokenTree"), 223 | Self::Attribute => write!(f, "Attribute"), 224 | Self::Visibility => write!(f, "Visibility"), 225 | Self::Fn => write!(f, "Fn"), 226 | Self::Struct => write!(f, "Struct"), 227 | Self::Enum => write!(f, "Enum"), 228 | Self::TypeAlias => write!(f, "TypeAlias"), 229 | Self::Static => write!(f, "Static"), 230 | Self::Import => write!(f, "Import"), 231 | Self::Abi => write!(f, "Abi"), 232 | Self::ParamList => write!(f, "ParamList"), 233 | Self::RetTy => write!(f, "RetTy"), 234 | Self::Block => write!(f, "Block"), 235 | Self::Param => write!(f, "Param"), 236 | Self::VariantList => write!(f, "VariantList"), 237 | Self::Rename => write!(f, "Rename"), 238 | Self::ListImport => write!(f, "ListImport"), 239 | Self::RenameImport => write!(f, "RenameImport"), 240 | Self::ImportTreeList => write!(f, "ImportTreeList"), 241 | Self::ArrayType => write!(f, "ArrayType"), 242 | Self::FnType => write!(f, "FnType"), 243 | Self::InferType => write!(f, "InferType"), 244 | Self::PathType => write!(f, "PathType"), 245 | Self::PtrType => write!(f, "PtrType"), 246 | Self::TyParamList => write!(f, "TyParamList"), 247 | Self::SemiExpr => write!(f, "SemiExpr"), 248 | Self::ArrayExpr => write!(f, "ArrayExpr"), 249 | Self::InfixExpr => write!(f, "InfixExpr"), 250 | Self::BreakExpr => write!(f, "BreakExpr"), 251 | Self::CallExpr => write!(f, "CallExpr"), 252 | Self::CastExpr => write!(f, "CastExpr"), 253 | Self::FieldExpr => write!(f, "FieldExpr"), 254 | Self::ForExpr => write!(f, "ForExpr"), 255 | Self::IfExpr => write!(f, "IfExpr"), 256 | Self::IndexExpr => write!(f, "IndexExpr"), 257 | Self::LoopExpr => write!(f, "LoopExpr"), 258 | Self::MatchExpr => write!(f, "MatchExpr"), 259 | Self::ParenExpr => write!(f, "ParenExpr"), 260 | Self::NameExpr => write!(f, "NameExpr"), 261 | Self::PrefixExpr => write!(f, "PrefixExpr"), 262 | Self::RefExpr => write!(f, "RefExpr"), 263 | Self::ReturnExpr => write!(f, "ReturnExpr"), 264 | Self::WhileExpr => write!(f, "WhileExpr"), 265 | Self::LetExpr => write!(f, "LetExpr"), 266 | Self::ArrayList => write!(f, "ArrayList"), 267 | Self::ArrayRepeat => write!(f, "ArrayRepeat"), 268 | Self::ArgList => write!(f, "ArgList"), 269 | Self::MatchArm => write!(f, "MatchArm"), 270 | Self::Eof => write!(f, ""), 271 | } 272 | } 273 | } 274 | 275 | impl From for SyntaxKind { 276 | fn from(kind: lex::token::TokenKind) -> Self { 277 | match kind { 278 | lex::token::TokenKind::Dot => Self::Dot, 279 | lex::token::TokenKind::Ident => Self::Ident, 280 | lex::token::TokenKind::Semi => Self::Semi, 281 | lex::token::TokenKind::At => Self::At, 282 | lex::token::TokenKind::FnKw => Self::FnKw, 283 | lex::token::TokenKind::LParen => Self::LParen, 284 | lex::token::TokenKind::Comma => Self::Comma, 285 | lex::token::TokenKind::RParen => Self::RParen, 286 | lex::token::TokenKind::Colon => Self::Colon, 287 | lex::token::TokenKind::Arrow => Self::Arrow, 288 | lex::token::TokenKind::PubKw => Self::PubKw, 289 | lex::token::TokenKind::ExternKw => Self::ExternKw, 290 | lex::token::TokenKind::StringLit => Self::StringLit, 291 | lex::token::TokenKind::StructKw => Self::StructKw, 292 | lex::token::TokenKind::LBrace => Self::LBrace, 293 | lex::token::TokenKind::RBrace => Self::RBrace, 294 | lex::token::TokenKind::EnumKw => Self::EnumKw, 295 | lex::token::TokenKind::TypeKw => Self::TypeKw, 296 | lex::token::TokenKind::Eq => Self::Eq, 297 | lex::token::TokenKind::StaticKw => Self::StaticKw, 298 | lex::token::TokenKind::AsKw => Self::AsKw, 299 | lex::token::TokenKind::ImportKw => Self::ImportKw, 300 | lex::token::TokenKind::LBracket => Self::LBracket, 301 | lex::token::TokenKind::RBracket => Self::RBracket, 302 | lex::token::TokenKind::Underscore => Self::Underscore, 303 | lex::token::TokenKind::Star => Self::Star, 304 | lex::token::TokenKind::MutKw => Self::MutKw, 305 | lex::token::TokenKind::ContinueKw => Self::ContinueKw, 306 | lex::token::TokenKind::LetKw => Self::LetKw, 307 | lex::token::TokenKind::PipePipe => Self::PipePipe, 308 | lex::token::TokenKind::AmpAmp => Self::AmpAmp, 309 | lex::token::TokenKind::EqEq => Self::EqEq, 310 | lex::token::TokenKind::Neq => Self::Neq, 311 | lex::token::TokenKind::Leq => Self::Leq, 312 | lex::token::TokenKind::Geq => Self::Geq, 313 | lex::token::TokenKind::Lt => Self::Lt, 314 | lex::token::TokenKind::Gt => Self::Gt, 315 | lex::token::TokenKind::Plus => Self::Plus, 316 | lex::token::TokenKind::Minus => Self::Minus, 317 | lex::token::TokenKind::Slash => Self::Slash, 318 | lex::token::TokenKind::Percent => Self::Percent, 319 | lex::token::TokenKind::Shl => Self::Shl, 320 | lex::token::TokenKind::Shr => Self::Shr, 321 | lex::token::TokenKind::Caret => Self::Caret, 322 | lex::token::TokenKind::Pipe => Self::Pipe, 323 | lex::token::TokenKind::Amp => Self::Amp, 324 | lex::token::TokenKind::PlusEq => Self::PlusEq, 325 | lex::token::TokenKind::SlashEq => Self::SlashEq, 326 | lex::token::TokenKind::StarEq => Self::StarEq, 327 | lex::token::TokenKind::PercentEq => Self::PercentEq, 328 | lex::token::TokenKind::ShrEq => Self::ShrEq, 329 | lex::token::TokenKind::ShlEq => Self::ShlEq, 330 | lex::token::TokenKind::MinusEq => Self::MinusEq, 331 | lex::token::TokenKind::PipeEq => Self::PipeEq, 332 | lex::token::TokenKind::AmpEq => Self::AmpEq, 333 | lex::token::TokenKind::CaretEq => Self::CaretEq, 334 | lex::token::TokenKind::BreakKw => Self::BreakKw, 335 | lex::token::TokenKind::ForKw => Self::ForKw, 336 | lex::token::TokenKind::InKw => Self::InKw, 337 | lex::token::TokenKind::IfKw => Self::IfKw, 338 | lex::token::TokenKind::ElseKw => Self::ElseKw, 339 | lex::token::TokenKind::BoolLit => Self::BoolLit, 340 | lex::token::TokenKind::CharLit => Self::CharLit, 341 | lex::token::TokenKind::FloatLit => Self::FloatLit, 342 | lex::token::TokenKind::IntLit => Self::IntLit, 343 | lex::token::TokenKind::LoopKw => Self::LoopKw, 344 | lex::token::TokenKind::WhileKw => Self::WhileKw, 345 | lex::token::TokenKind::MatchKw => Self::MatchKw, 346 | lex::token::TokenKind::FatArrow => Self::FatArrow, 347 | lex::token::TokenKind::Not => Self::Not, 348 | lex::token::TokenKind::ReturnKw => Self::ReturnKw, 349 | lex::token::TokenKind::Whitespace => Self::Whitespace, 350 | lex::token::TokenKind::Comment => Self::Comment, 351 | lex::token::TokenKind::Error => Self::Error, 352 | lex::token::TokenKind::Eof => Self::Eof, 353 | } 354 | } 355 | } 356 | --------------------------------------------------------------------------------