├── .envrc ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ └── question.md ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml └── workflows │ └── ci.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── benches ├── all-packages.nix └── all-packages.rs ├── default.nix ├── examples ├── dump-ast.rs ├── error-report.rs ├── from-stdin.rs ├── list-fns.rs ├── preserve.rs └── test-nixpkgs.rs ├── flake.lock ├── flake.nix ├── fuzz ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── fuzz_targets │ ├── lexer.rs │ └── parser.rs └── rust-toolchain.toml ├── rustfmt.toml ├── shell.nix ├── src ├── ast.rs ├── ast │ ├── expr_ext.rs │ ├── interpol.rs │ ├── nodes.rs │ ├── operators.rs │ ├── path_util.rs │ ├── str_util.rs │ └── tokens.rs ├── kinds.rs ├── lib.rs ├── macros.rs ├── parser.rs ├── tests.rs ├── token_set.rs └── tokenizer.rs └── test_data ├── parser ├── error │ ├── error.expect │ ├── error.nix │ ├── extra_comma.expect │ ├── extra_comma.nix │ ├── formals_double_bind.expect │ ├── formals_double_bind.nix │ ├── inherit_from_late.expect │ ├── inherit_from_late.nix │ ├── inherit_incomplete.expect │ ├── inherit_incomplete.nix │ ├── path_bare_tilde.expect │ ├── path_bare_tilde.nix │ ├── path_interp_no_separator.expect │ ├── path_interp_no_separator.nix │ ├── path_interp_trailing_slash.expect │ ├── path_interp_trailing_slash.nix │ ├── path_store_interp.expect │ ├── path_store_interp.nix │ ├── path_tilde.expect │ ├── path_tilde.nix │ ├── path_tilde_slash.expect │ ├── path_tilde_slash.nix │ ├── path_trailing_slash.expect │ ├── path_trailing_slash.nix │ ├── select_both_errors.expect │ ├── select_both_errors.nix │ ├── ws_belongs_to_root.expect │ ├── ws_belongs_to_root.nix │ ├── ws_belongs_to_root2.expect │ └── ws_belongs_to_root2.nix └── success │ ├── apply.expect │ ├── apply.nix │ ├── assert.expect │ ├── assert.nix │ ├── attrpath_ident.expect │ ├── attrpath_ident.nix │ ├── attrset.expect │ ├── attrset.nix │ ├── attrset_dynamic.expect │ ├── attrset_dynamic.nix │ ├── attrset_empty.expect │ ├── attrset_empty.nix │ ├── attrset_rec.expect │ ├── attrset_rec.nix │ ├── bool_arith_ops.expect │ ├── bool_arith_ops.nix │ ├── bool_ops.expect │ ├── bool_ops.nix │ ├── bool_ops_eq.expect │ ├── bool_ops_eq.nix │ ├── docs.expect │ ├── docs.nix │ ├── has_attr_prec.expect │ ├── has_attr_prec.nix │ ├── if_elseif_else.expect │ ├── if_elseif_else.nix │ ├── import_nixpkgs.expect │ ├── import_nixpkgs.nix │ ├── inherit.expect │ ├── inherit.nix │ ├── inherit_dynamic.expect │ ├── inherit_dynamic.nix │ ├── interpolation.expect │ ├── interpolation.nix │ ├── lambda_is_not_uri.expect │ ├── lambda_is_not_uri.nix │ ├── lambda_list.expect │ ├── lambda_list.nix │ ├── lambda_nested.expect │ ├── lambda_nested.nix │ ├── let.expect │ ├── let.nix │ ├── let_legacy.expect │ ├── let_legacy.nix │ ├── list.expect │ ├── list.nix │ ├── list_concat.expect │ ├── list_concat.nix │ ├── math.expect │ ├── math.nix │ ├── math2.expect │ ├── math2.nix │ ├── math_no_ws.expect │ ├── math_no_ws.nix │ ├── merge.expect │ ├── merge.nix │ ├── multiple.expect │ ├── multiple.nix │ ├── operators_right_assoc.expect │ ├── operators_right_assoc.nix │ ├── or-as-ident.expect │ ├── or-as-ident.nix │ ├── or_in_attr.expect │ ├── or_in_attr.nix │ ├── path.expect │ ├── path.nix │ ├── path_interp.expect │ ├── path_interp.nix │ ├── path_interp_no_prefix.expect │ ├── path_interp_no_prefix.nix │ ├── path_no_newline.expect │ ├── path_no_newline.nix │ ├── pattern_bind_left.expect │ ├── pattern_bind_left.nix │ ├── pattern_bind_right.expect │ ├── pattern_bind_right.nix │ ├── pattern_default.expect │ ├── pattern_default.nix │ ├── pattern_default_attrset.expect │ ├── pattern_default_attrset.nix │ ├── pattern_default_ellipsis.expect │ ├── pattern_default_ellipsis.nix │ ├── pattern_ellipsis.expect │ ├── pattern_ellipsis.nix │ ├── pattern_trailing_comma.expect │ ├── pattern_trailing_comma.nix │ ├── pipe_left.expect │ ├── pipe_left.nix │ ├── pipe_left_assoc.expect │ ├── pipe_left_assoc.nix │ ├── pipe_left_math.expect │ ├── pipe_left_math.nix │ ├── pipe_mixed.expect │ ├── pipe_mixed.nix │ ├── pipe_mixed_math.expect │ ├── pipe_mixed_math.nix │ ├── pipe_right.expect │ ├── pipe_right.nix │ ├── pipe_right_assoc.expect │ ├── pipe_right_assoc.nix │ ├── pipe_right_math.expect │ ├── pipe_right_math.nix │ ├── select_default.expect │ ├── select_default.nix │ ├── select_ident.expect │ ├── select_ident.nix │ ├── select_string_dynamic.expect │ ├── select_string_dynamic.nix │ ├── string.expect │ ├── string.nix │ ├── string_complex_url.expect │ ├── string_complex_url.nix │ ├── string_interp_ident.expect │ ├── string_interp_ident.nix │ ├── string_interp_nested.expect │ ├── string_interp_nested.nix │ ├── string_interp_select.expect │ ├── string_interp_select.nix │ ├── trivia.expect │ ├── trivia.nix │ ├── with-import-let-in.expect │ ├── with-import-let-in.nix │ ├── with.expect │ └── with.nix └── tokenizer ├── error ├── path_interp_trailing_slash.expect ├── path_interp_trailing_slash.nix ├── path_tilde.expect ├── path_tilde.nix ├── path_tilde_slash.expect ├── path_tilde_slash.nix ├── path_trailing_slash.expect └── path_trailing_slash.nix └── success ├── comment_interspersed.expect ├── comment_interspersed.nix ├── comment_multiline.expect ├── comment_multiline.nix ├── comment_multiline2.expect ├── comment_multiline2.nix ├── float.expect ├── float.nix ├── float_no_leading_part.expect ├── float_no_leading_part.nix ├── float_no_trailing_part.expect ├── float_no_trailing_part.nix ├── float_scientific_lower.expect ├── float_scientific_lower.nix ├── float_scientific_neg.expect ├── float_scientific_neg.nix ├── float_scientific_pos.expect ├── float_scientific_pos.nix ├── float_scientific_upper.expect ├── float_scientific_upper.nix ├── int.expect ├── int.nix ├── lambda.expect ├── lambda.nix ├── lambda_arg_underscore.expect ├── lambda_arg_underscore.nix ├── let.expect ├── let.nix ├── list.expect ├── list.nix ├── list_concat.expect ├── list_concat.nix ├── math.expect ├── math.nix ├── math_div_isnt_path.expect ├── math_div_isnt_path.nix ├── math_no_ws.expect ├── math_no_ws.nix ├── op_geq_isnt_path.expect ├── op_geq_isnt_path.nix ├── op_update.expect ├── op_update.nix ├── path_absolute.expect ├── path_absolute.nix ├── path_home.expect ├── path_home.nix ├── path_interp.expect ├── path_interp.nix ├── path_interp_apply.expect ├── path_interp_apply.nix ├── path_interp_multiple.expect ├── path_interp_multiple.nix ├── path_interp_multiple2.expect ├── path_interp_multiple2.nix ├── path_interp_then_plain.expect ├── path_interp_then_plain.nix ├── path_isnt_math.expect ├── path_isnt_math.nix ├── path_no_newline.expect ├── path_no_newline.nix ├── path_relative.expect ├── path_relative.nix ├── path_relative_prefix.expect ├── path_relative_prefix.nix ├── path_store.expect ├── path_store.nix ├── path_underscore.expect ├── path_underscore.nix ├── patterns.expect ├── patterns.nix ├── select_dynamic.expect ├── select_dynamic.nix ├── string.expect ├── string.nix ├── string_dollar_escape.expect ├── string_dollar_escape.nix ├── string_escape.expect ├── string_escape.nix ├── string_interp.expect ├── string_interp.nix ├── string_interp_dollar_escape.expect ├── string_interp_dollar_escape.nix ├── string_interp_nested.expect ├── string_interp_nested.nix ├── string_interp_select.expect ├── string_interp_select.nix ├── string_isnt_path.expect ├── string_isnt_path.nix ├── string_multiline.expect ├── string_multiline.nix ├── string_multiline_dollar_escape.expect ├── string_multiline_dollar_escape.nix ├── string_multiline_interp_escape.expect ├── string_multiline_interp_escape.nix ├── uri.expect ├── uri.nix ├── uri_with_underscore.expect ├── uri_with_underscore.nix ├── with.expect └── with.nix /.envrc: -------------------------------------------------------------------------------- 1 | use flake 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: 'bug' 6 | assignees: '' 7 | --- 8 | 9 | 12 | 13 | ### Describe the bug 14 | 15 | 16 | 17 | ### Code Snippet to reproduce 18 | 19 | 22 | 23 | ```nix 24 | 25 | ``` 26 | 27 | ### Expected behavior 28 | 29 | 30 | 31 | ### Additional context 32 | 33 | 36 | 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | --- 8 | 9 | 12 | 13 | ### Description 14 | 15 | 18 | 19 | ### Considered alternatives 20 | 21 | 24 | 25 | ### Additional context 26 | 27 | 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Ask a general question about the project 4 | title: '' 5 | labels: 'question' 6 | assignees: '' 7 | --- 8 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | ### Summary & Motivation 7 | 8 | 11 | 12 | ### Backwards-incompatible changes 13 | 14 | 18 | 19 | ### Further context 20 | 21 | 26 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | test: 10 | name: test 11 | runs-on: ${{ matrix.os }} 12 | 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | os: 17 | - ubuntu-latest 18 | - macos-latest 19 | 20 | steps: 21 | - name: Checkout repository 22 | uses: actions/checkout@v3 23 | 24 | - name: Install Rust toolchain 25 | uses: actions-rs/toolchain@v1 26 | with: 27 | toolchain: stable 28 | profile: minimal 29 | override: true 30 | 31 | - name: Cache Dependencies 32 | uses: Swatinem/rust-cache@v2.0.0 33 | 34 | - name: Compile 35 | uses: actions-rs/cargo@v1 36 | with: 37 | command: test 38 | args: --no-run --locked 39 | 40 | - name: Test 41 | uses: actions-rs/cargo@v1 42 | with: 43 | command: test 44 | args: -- --nocapture 45 | 46 | rustfmt: 47 | name: rustfmt 48 | runs-on: ubuntu-latest 49 | steps: 50 | - name: Checkout repository 51 | uses: actions/checkout@v3 52 | 53 | - name: Install Rust 54 | uses: actions-rs/toolchain@v1 55 | with: 56 | toolchain: stable 57 | override: true 58 | profile: minimal 59 | components: rustfmt 60 | 61 | - name: Check formatting 62 | uses: actions-rs/cargo@v1 63 | with: 64 | command: fmt 65 | args: --all -- --check 66 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | 3 | /.direnv/ 4 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes between releases will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). 6 | 7 | ## [v0.12.0] - 2025-01-09 8 | 9 | * Add support for pipe operators 10 | 11 | ## [v0.11.0] - 2022-11-11 12 | 13 | * removes the `types` module and replaces it with `ast` 14 | 15 | * the `ast` modules has better types and sum types 16 | 17 | * removes `ParsedType` in favor of `Expr` 18 | 19 | * removes the `value` module. methods on `Value` go on the individual types. For example, string related methods go on `ast::Str` 20 | 21 | * `parse` changed to `Root::parse` 22 | 23 | * `NODE_OR_DEFAULT` is removed 24 | 25 | * `NODE_KEY` -> `NODE_ATTRPATH`, `NODE_KEY_VALUE` -> `NODE_ATTRPATH_VALUE` 26 | 27 | * `BinOp::IsSet` is removed since it's actually not a normal binary operator. 28 | 29 | * `NODE_SELECT` is flattened in favor of `NODE_ATTRPATH` which consists of multiple Attrs, including `a.b.c` and `a.b.c or expr` 30 | 31 | * `a or b` is Apply, not "OrDefault", which matches the result of Nix. Fixes #23 32 | 33 | * `${` is considered invalid at Expr places now, which matches the result of Nix. 34 | 35 | * remove `TOKEN_DYNAMIC_START` and `TOKEN_DYNAMIC_END` 36 | 37 | * rowan 15.0 38 | 39 | * various other parsing fixes 40 | 41 | ## [v0.10.2] - 2022-06-14 42 | 43 | * Correctly parse every possible case of path-interpolation in Nix code ([#85](https://github.com/nix-community/rnix-parser/issues/85)). 44 | 45 | (from [@ncfavier](https://github.com/ncfavier)). 46 | 47 | ## [v0.10.1] - 2021-12-06 48 | 49 | ### Fixed 50 | 51 | * Trivia tokens (i.e. ` `, `\r`, `\n` etc.) are no longer misplaced around path-expressions. 52 | This is a regression from `v0.10.0` which would've broken `nixpkgs-fmt`. 53 | 54 | (from [@Ma27](https://github.com/Ma27)). 55 | 56 | ## [v0.10.0] - 2021-11-30 57 | 58 | ### Added 59 | 60 | * Support for the new path-interpolation syntax from Nix 2.4 was added. The parser 61 | now correctly detects 62 | 63 | ```nix 64 | ./foo/${bar} 65 | ``` 66 | 67 | as valid code and parses it down to 68 | 69 | ``` 70 | NODE_ROOT 0..13 { 71 | NODE_PATH_WITH_INTERPOL 0..12 { 72 | TOKEN_PATH("./foo/") 0..6 73 | NODE_STRING_INTERPOL 6..12 { 74 | TOKEN_INTERPOL_START("${") 6..8 75 | NODE_IDENT 8..11 { 76 | TOKEN_IDENT("bar") 8..11 77 | } 78 | TOKEN_INTERPOL_END("}") 11..12 79 | } 80 | } 81 | TOKEN_WHITESPACE("\n") 12..13 82 | } 83 | ``` 84 | 85 | Paths without interpolation are still within `NODE_LITERAL` for backwards-compatibility. 86 | 87 | (from [@Ma27](https://github.com/Ma27)). 88 | 89 | * `rnix::types::ParsedTypeError` now implements the `Debug`, `Copy`, `Clone`, `fmt::Display` traits (from [@NerdyPepper](https://github.com/NerdyPepper)). 90 | 91 | * During development, the parser can be tested like this: 92 | 93 | ``` 94 | $ cargo run --example from-stdin <<< 'builtins.map (x: x * x) [ 1 2 3 ]' 95 | ``` 96 | 97 | (from [@efx](https://github.com/efx)). 98 | 99 | ### Changed 100 | 101 | * A few more examples of invalid `inherit`-expressions are correctly marked as such (from [@oberblastmeister](https://github.com/oberblastmeister)). 102 | 103 | * `rnix::types::BinOp::operator()` now returns `Option` rather than unwrapping the 104 | value (from [@NerdyPepper](https://github.com/NerdyPepper)). 105 | 106 | If your code worked fine before, it will be sufficient to add a `.unwrap()` since this was 107 | what happened before within `::operator()`. 108 | 109 | * Duplicated arguments in the argument-pattern syntax are now marked as parser error. E.g. 110 | 111 | ```nix 112 | { a, a }: a 113 | ``` 114 | 115 | now produces an error (from [@Ma27](https://github.com/Ma27)). 116 | 117 | * Floats without trailing zeroes (e.g. `.5`) are no longer marked as syntax error (from [@Ma27](https://github.com/Ma27)). 118 | 119 | ## [v0.9.1] - 2021-09-23 120 | 121 | ### Changed 122 | 123 | * Fixed a memory leak while parsing `inherit`-expressions with invalid syntax (from [@Ma27](https://github.com/Ma27/)). 124 | 125 | [Unreleased]: https://github.com/nix-community/rnix-parser/compare/v0.11.0...master 126 | [v0.11.0]: https://github.com/nix-community/rnix-parser/compare/v0.10.2...v0.11.0 127 | [v0.10.2]: https://github.com/nix-community/rnix-parser/compare/v0.10.1...v0.10.2 128 | [v0.10.1]: https://github.com/nix-community/rnix-parser/compare/v0.10.0...v0.10.1 129 | [v0.10.0]: https://github.com/nix-community/rnix-parser/compare/v0.9.1...v0.10.0 130 | [v0.9.1]: https://github.com/nix-community/rnix-parser/compare/v0.9.0...v0.9.1 131 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["jD91mZM2 "] 3 | description = "A Nix parser written in Rust" 4 | edition = "2021" 5 | license = "MIT" 6 | name = "rnix" 7 | readme = "README.md" 8 | repository = "https://github.com/nix-community/rnix-parser" 9 | version = "0.12.0" 10 | 11 | [[bench]] 12 | harness = false 13 | name = "all-packages" 14 | 15 | [dependencies] 16 | rowan = "0.15.0" 17 | 18 | [dev-dependencies] 19 | criterion = "0.3.0" 20 | expect-test = "1.4.0" 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 jD91mZM2 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # rnix-parser [![Crates.io](https://img.shields.io/crates/v/rnix.svg)](http://crates.io/crates/rnix) [![Chat on Matrix](https://matrix.to/img/matrix-badge.svg)](https://matrix.to/#/#rnix-lsp:matrix.org) 2 | 3 | rnix is a parser for the [Nix language](https://nixos.org/nix/) written in Rust. 4 | 5 | This can be used to manipulate the Nix AST and can for example be used for: 6 | 7 | - Interactively render Nix on a GUI 8 | - Formatting Nix code 9 | - Rename identifiers 10 | 11 | and a lot more! 12 | 13 | rnix nowadays uses [@matklad](https://github.com/matklad)'s 14 | [rowan](https://crates.io/crates/rowan) crate to ensure: 15 | 16 | - all span information is preserved, meaning you can use the AST to for 17 | example apply highlighting 18 | - printing out the AST prints out 100% the original code. This is not an 19 | over-exaggeration, even completely invalid nix code such as this README can 20 | be intact after a parsing session (though the AST will mark errnous nodes) 21 | - easy ways to walk the tree without resorting to recursion 22 | 23 | ## Demo 24 | 25 | Examples can be found in the `examples/` directory. 26 | 27 | You may also want to see 28 | [nix-explorer](https://gitlab.com/jD91mZM2/nix-explorer): An example 29 | that highlights AST nodes in Nix code. This demonstrates how 30 | whitespaces and commands are preserved. 31 | 32 | ## Hacking 33 | 34 | Tests can be run with `cargo test`. 35 | 36 | In order to update all `.expect`-files to the currently expected results, 37 | you may run `UPDATE_TESTS=1 cargo test`. 38 | 39 | You can parse Nix expressions from standard input using the `from-stdin` example. 40 | To try that, run the following in your shell: 41 | 42 | ```sh 43 | echo "[hello nix]" | cargo run --quiet --example from-stdin 44 | ``` 45 | 46 | ## Release Checklist 47 | 48 | * Ensure that all PRs that were scheduled for the release are merged (or optionally move 49 | them to another milestone). 50 | * Close the milestone for the release (if any). 51 | * Run `cargo test` on `master` (or the branch to release from) with all changes being pulled in. 52 | * Apply the following patch to [nixpkgs-fmt](https://github.com/nix-community/nixpkgs-fmt): 53 | ```diff 54 | diff --git a/Cargo.toml b/Cargo.toml 55 | index 0891350..edad471 100644 56 | --- a/Cargo.toml 57 | +++ b/Cargo.toml 58 | @@ -13,6 +13,9 @@ repository = "https://github.com/nix-community/nixpkgs-fmt" 59 | [workspace] 60 | members = [ "./wasm" ] 61 | 62 | +[patch.crates-io] 63 | +rnix = { path = "/home/ma27/Projects/rnix-parser" } 64 | + 65 | [dependencies] 66 | rnix = "0.9.0" 67 | smol_str = "0.1.17" 68 | ``` 69 | 70 | and run `cargo test`. 71 | 72 | While it's planned to add [fuzzing to `rnix-parser` as well](https://github.com/nix-community/rnix-parser/issues/32), 73 | `nixpkgs-fmt` has a decent test-suite that would've helped to catch regressions in the past. 74 | 75 | __Note:__ API changes are OK (and fixes should be contributed to `nixpkgs-fmt`), behavioral changes 76 | are not unless explicitly discussed before. 77 | * Update the [CHANGELOG.md](https://github.com/nix-community/rnix-parser/blob/master/CHANGELOG.md). 78 | * Bump the version number in [Cargo.toml](https://github.com/nix-community/rnix-parser/blob/master/Cargo.toml) & re-run `cargo build` to refresh the lockfile. 79 | * Commit & run `nix build`. 80 | * Tag the release and push everything. 81 | * As soon as the CI has completed, run `cargo publish`. 82 | 83 | # RIP jd91mzm2 84 | 85 | Sadly, the original author of this project, [@jD91mZM2 has passed 86 | away](https://www.redox-os.org/news/open-source-mental-health/). His online 87 | presence was anonymous and what we have left is his code. This is but one of 88 | his many repos that he contributed to. 89 | -------------------------------------------------------------------------------- /benches/all-packages.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, Criterion, Throughput}; 2 | 3 | fn all_packages(c: &mut Criterion) { 4 | let input = include_str!("all-packages.nix"); 5 | let mut group = c.benchmark_group("all-packages"); 6 | group.throughput(Throughput::Bytes(input.len() as u64)); 7 | group.sample_size(30); 8 | group.bench_with_input("all-packages", input, move |b, input| { 9 | b.iter(|| rnix::Root::parse(input)) 10 | }); 11 | group.finish(); 12 | } 13 | 14 | criterion_group!(benches, all_packages); 15 | criterion_main!(benches); 16 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | let 2 | flake-compat = builtins.fetchTarball { 3 | url = "https://github.com/edolstra/flake-compat/archive/12c64ca55c1014cdc1b16ed5a804aa8576601ff2.tar.gz"; 4 | sha256 = "0jm6nzb83wa6ai17ly9fzpqc40wg1viib8klq8lby54agpl213w5"; 5 | }; 6 | in 7 | (import flake-compat { 8 | src = ./.; 9 | }).defaultNix.default 10 | -------------------------------------------------------------------------------- /examples/dump-ast.rs: -------------------------------------------------------------------------------- 1 | use std::{env, fs}; 2 | 3 | fn main() { 4 | let mut iter = env::args().skip(1).peekable(); 5 | if iter.peek().is_none() { 6 | eprintln!("Usage: dump-ast "); 7 | return; 8 | } 9 | for file in iter { 10 | let content = match fs::read_to_string(file) { 11 | Ok(content) => content, 12 | Err(err) => { 13 | eprintln!("error reading file: {}", err); 14 | return; 15 | } 16 | }; 17 | let parse = rnix::Root::parse(&content); 18 | 19 | for error in parse.errors() { 20 | println!("error: {}", error); 21 | } 22 | println!("{:#?}", parse.tree()); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /examples/error-report.rs: -------------------------------------------------------------------------------- 1 | use rnix::parser::ParseError; 2 | use std::{env, fs}; 3 | 4 | fn main() { 5 | let file = match env::args().nth(1) { 6 | Some(file) => file, 7 | None => { 8 | eprintln!("Usage: error-report "); 9 | return; 10 | } 11 | }; 12 | let content = match fs::read_to_string(&file) { 13 | Ok(content) => content, 14 | Err(err) => { 15 | eprintln!("error reading file: {}", err); 16 | return; 17 | } 18 | }; 19 | let ast = rnix::Root::parse(&content); 20 | for error in ast.errors() { 21 | let range = match error { 22 | ParseError::Unexpected(range) => range, 23 | ParseError::UnexpectedExtra(range) => range, 24 | ParseError::UnexpectedWanted(_, range, _) => range, 25 | ParseError::UnexpectedDoubleBind(range) => range, 26 | ParseError::DuplicatedArgs(range, _) => range, 27 | err => { 28 | eprintln!("error: {}", err); 29 | continue; 30 | } 31 | }; 32 | eprintln!("----- ERROR -----"); 33 | eprintln!("{}", error); 34 | let start = usize::from(range.start()); 35 | let start_row = content[..start].lines().count() - 1; 36 | let start_line = content[..start].rfind('\n').map(|i| i + 1).unwrap_or(0); 37 | let start_col = content[start_line..start].chars().count(); 38 | let end = usize::from(range.end()); 39 | let end_row = content[..end].lines().count() - 1; 40 | let end_line = content[..end].rfind('\n').map(|i| i + 1).unwrap_or(0); 41 | let end_col = content[end_line..end].chars().count(); 42 | 43 | let mut line_len = 1; 44 | let mut line = end_row; 45 | while line >= 10 { 46 | line /= 10; 47 | line_len += 1; 48 | } 49 | 50 | let i = start_row.saturating_sub(1); 51 | for (i, line) in content.lines().enumerate().skip(i).take(end_row - i + 1) { 52 | println!("{:line_len$} {}", i + 1, line, line_len = line_len); 53 | if i >= start_row && i <= end_row { 54 | print!("{:line_len$} ", "", line_len = line_len); 55 | let mut end_col = if i == end_row { end_col } else { line.chars().count() }; 56 | if i == start_row { 57 | print!("{:indent$}", "", indent = start_col); 58 | end_col -= start_col; 59 | } 60 | for _ in 0..end_col { 61 | print!("^"); 62 | } 63 | println!(); 64 | } 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /examples/from-stdin.rs: -------------------------------------------------------------------------------- 1 | use std::{io, io::Read}; 2 | 3 | fn main() { 4 | let mut content = String::new(); 5 | io::stdin().read_to_string(&mut content).expect("could not read nix from stdin"); 6 | let ast = rnix::Root::parse(&content); 7 | 8 | for error in ast.errors() { 9 | println!("error: {}", error); 10 | } 11 | 12 | println!("{:#?}", ast.tree()); 13 | } 14 | -------------------------------------------------------------------------------- /examples/list-fns.rs: -------------------------------------------------------------------------------- 1 | use std::{env, error::Error, fs}; 2 | 3 | use rnix::{ 4 | ast::{self, AstToken, HasEntry}, 5 | match_ast, NodeOrToken, SyntaxNode, 6 | }; 7 | use rowan::ast::AstNode; 8 | 9 | macro_rules! single_match { 10 | ($expression:expr, $(|)? $( $pattern:pat_param )|+ $( if $guard: expr )? => $captured:expr) => { 11 | match $expression { 12 | $( $pattern )|+ $( if $guard )? => Some($captured), 13 | _ => None 14 | } 15 | }; 16 | } 17 | 18 | fn main() -> Result<(), Box> { 19 | let file = match env::args().nth(1) { 20 | Some(file) => file, 21 | None => { 22 | eprintln!("Usage: list-fns "); 23 | return Ok(()); 24 | } 25 | }; 26 | let content = fs::read_to_string(&file)?; 27 | let ast = rnix::Root::parse(&content).ok()?; 28 | let expr = ast.expr().unwrap(); 29 | let set = match expr { 30 | ast::Expr::AttrSet(set) => set, 31 | _ => return Err("root isn't a set".into()), 32 | }; 33 | for entry in set.entries() { 34 | if let ast::Entry::AttrpathValue(attrpath_value) = entry { 35 | if let Some(ast::Expr::Lambda(lambda)) = attrpath_value.value() { 36 | let attrpath = attrpath_value.attrpath().unwrap(); 37 | let ident = attrpath.attrs().last().and_then(|attr| match attr { 38 | ast::Attr::Ident(ident) => Some(ident), 39 | _ => None, 40 | }); 41 | let s = ident.as_ref().map_or_else( 42 | || "error".to_string(), 43 | |ident| ident.ident_token().unwrap().text().to_string(), 44 | ); 45 | println!("Function name: {}", s); 46 | { 47 | let comments = comments_before(attrpath_value.syntax()); 48 | if !comments.is_empty() { 49 | println!("--> Doc: {comments}"); 50 | } 51 | } 52 | 53 | let mut value = Some(lambda); 54 | while let Some(lambda) = value { 55 | let s = lambda 56 | .param() 57 | .as_ref() 58 | .map_or_else(|| "error".to_string(), |param| param.to_string()); 59 | println!("-> Param: {}", s); 60 | { 61 | let comments = comments_before(lambda.syntax()); 62 | if !comments.is_empty() { 63 | println!("--> Doc: {comments}"); 64 | } 65 | } 66 | value = 67 | single_match!(lambda.body().unwrap(), ast::Expr::Lambda(lambda) => lambda); 68 | } 69 | println!(); 70 | } 71 | } 72 | } 73 | 74 | Ok(()) 75 | } 76 | 77 | fn comments_before(node: &SyntaxNode) -> String { 78 | node.siblings_with_tokens(rowan::Direction::Prev) 79 | // rowan always returns the first node for some reason 80 | .skip(1) 81 | .map_while(|element| match element { 82 | NodeOrToken::Token(token) => match_ast! { 83 | match token { 84 | ast::Comment(it) => Some(Some(it)), 85 | ast::Whitespace(_) => Some(None), 86 | _ => None, 87 | } 88 | }, 89 | _ => None, 90 | }) 91 | .flatten() 92 | .map(|s| s.text().trim().to_string()) 93 | .collect::>() 94 | .join("\n ") 95 | } 96 | -------------------------------------------------------------------------------- /examples/preserve.rs: -------------------------------------------------------------------------------- 1 | use std::{env, fs}; 2 | 3 | fn main() { 4 | let mut iter = env::args().skip(1).peekable(); 5 | if iter.peek().is_none() { 6 | eprintln!("Usage: preserve "); 7 | return; 8 | } 9 | for file in iter { 10 | let content = match fs::read_to_string(file) { 11 | Ok(content) => content, 12 | Err(err) => { 13 | eprintln!("error reading file: {}", err); 14 | return; 15 | } 16 | }; 17 | print!("{}", rnix::Root::parse(&content).tree()); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /examples/test-nixpkgs.rs: -------------------------------------------------------------------------------- 1 | use std::{env, error::Error, fs, path::Path}; 2 | 3 | fn main() -> Result<(), Box> { 4 | let path = env::var("NIX_PATH")?; 5 | let nixpkgs = 6 | path.split(':').find(|s| s.starts_with("nixpkgs=")).ok_or("no store path found")?; 7 | 8 | println!("Nix store path: {}", nixpkgs); 9 | 10 | recurse(Path::new(&nixpkgs["nixpkgs=".len()..])) 11 | } 12 | fn recurse(path: &Path) -> Result<(), Box> { 13 | if path.metadata()?.is_file() { 14 | if path.extension().and_then(|s| s.to_str()) != Some("nix") { 15 | return Ok(()); 16 | } 17 | 18 | println!("Checking {}", path.display()); 19 | let original = fs::read_to_string(path)?; 20 | if original.trim().is_empty() { 21 | return Ok(()); 22 | } 23 | 24 | let parsed = rnix::Root::parse(&original).tree().to_string(); 25 | if original != parsed { 26 | eprintln!("Original input does not match parsed output!"); 27 | println!("Input:"); 28 | println!("----------"); 29 | println!("{}", original); 30 | println!("----------"); 31 | println!("Output:"); 32 | println!("----------"); 33 | println!("{}", parsed); 34 | println!("----------"); 35 | return Err("parsing error".into()); 36 | } 37 | return Ok(()); 38 | } else { 39 | for entry in path.read_dir()? { 40 | let entry = entry?; 41 | if entry.file_type()?.is_symlink() { 42 | continue; 43 | } 44 | recurse(&entry.path())?; 45 | } 46 | } 47 | Ok(()) 48 | } 49 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "import-cargo": { 4 | "locked": { 5 | "lastModified": 1594305518, 6 | "narHash": "sha256-frtArgN42rSaEcEOYWg8sVPMUK+Zgch3c+wejcpX3DY=", 7 | "owner": "edolstra", 8 | "repo": "import-cargo", 9 | "rev": "25d40be4a73d40a2572e0cc233b83253554f06c5", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "edolstra", 14 | "repo": "import-cargo", 15 | "type": "github" 16 | } 17 | }, 18 | "nixpkgs": { 19 | "locked": { 20 | "lastModified": 1656461576, 21 | "narHash": "sha256-rlmmw6lIlkMQIiB+NsnO8wQYWTfle8TA41UREPLP5VY=", 22 | "owner": "NixOS", 23 | "repo": "nixpkgs", 24 | "rev": "cf3ab54b4afe2b7477faa1dd0b65bf74c055d70c", 25 | "type": "github" 26 | }, 27 | "original": { 28 | "owner": "NixOS", 29 | "ref": "nixos-unstable", 30 | "repo": "nixpkgs", 31 | "type": "github" 32 | } 33 | }, 34 | "root": { 35 | "inputs": { 36 | "import-cargo": "import-cargo", 37 | "nixpkgs": "nixpkgs", 38 | "utils": "utils" 39 | } 40 | }, 41 | "utils": { 42 | "locked": { 43 | "lastModified": 1656065134, 44 | "narHash": "sha256-oc6E6ByIw3oJaIyc67maaFcnjYOz1mMcOtHxbEf9NwQ=", 45 | "owner": "numtide", 46 | "repo": "flake-utils", 47 | "rev": "bee6a7250dd1b01844a2de7e02e4df7d8a0a206c", 48 | "type": "github" 49 | }, 50 | "original": { 51 | "owner": "numtide", 52 | "repo": "flake-utils", 53 | "type": "github" 54 | } 55 | } 56 | }, 57 | "root": "root", 58 | "version": 7 59 | } 60 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Rust-based parser for nix files"; 3 | 4 | inputs = { 5 | utils.url = "github:numtide/flake-utils"; 6 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; 7 | import-cargo.url = "github:edolstra/import-cargo"; 8 | }; 9 | 10 | # first comment 11 | # second comment 12 | outputs = { self, nixpkgs, utils, import-cargo }: 13 | { 14 | overlay = final: prev: let 15 | target = final.rust.toRustTarget final.stdenv.hostPlatform; 16 | flags = "--release --offline --target ${target}"; 17 | inherit (import-cargo.builders) importCargo; 18 | in { 19 | rnix-parser = final.stdenv.mkDerivation { 20 | pname = "rnix-parser"; 21 | version = (builtins.fromTOML (builtins.readFile ./Cargo.toml)).package.version; 22 | src = final.lib.cleanSource ./.; 23 | nativeBuildInputs = with final; [ 24 | rustc cargo 25 | (importCargo { lockFile = ./Cargo.lock; inherit (final) pkgs; }).cargoHome 26 | ]; 27 | 28 | outputs = [ "out" "doc" ]; 29 | doCheck = true; 30 | 31 | buildPhase = '' 32 | cargo build ${flags} 33 | cargo doc ${flags} 34 | ''; 35 | 36 | checkPhase = '' 37 | cargo test ${flags} 38 | cargo bench 39 | ''; 40 | 41 | installPhase = '' 42 | mkdir -p $out/lib 43 | mkdir -p $doc/share/doc/rnix 44 | 45 | cp -r ./target/${target}/doc $doc/share/doc/rnix 46 | cp ./target/${target}/release/librnix.rlib $out/lib/ 47 | ''; 48 | }; 49 | }; 50 | } 51 | // utils.lib.eachDefaultSystem (system: 52 | let 53 | pkgs = import nixpkgs { inherit system; overlays = [ self.overlay ]; }; 54 | in 55 | rec { 56 | # `nix develop` 57 | devShell = pkgs.mkShell { 58 | buildInputs = with pkgs; [ rustfmt rustc cargo clippy ]; 59 | }; 60 | 61 | packages.rnix-parser = pkgs.rnix-parser; 62 | defaultPackage = packages.rnix-parser; 63 | } 64 | ); 65 | } 66 | -------------------------------------------------------------------------------- /fuzz/.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | corpus 3 | artifacts 4 | -------------------------------------------------------------------------------- /fuzz/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "arbitrary" 7 | version = "1.0.2" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "577b08a4acd7b99869f863c50011b01eb73424ccc798ecd996f2e24817adfca7" 10 | 11 | [[package]] 12 | name = "autocfg" 13 | version = "1.0.1" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" 16 | 17 | [[package]] 18 | name = "cc" 19 | version = "1.0.70" 20 | source = "registry+https://github.com/rust-lang/crates.io-index" 21 | checksum = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0" 22 | 23 | [[package]] 24 | name = "countme" 25 | version = "3.0.1" 26 | source = "registry+https://github.com/rust-lang/crates.io-index" 27 | checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" 28 | 29 | [[package]] 30 | name = "hashbrown" 31 | version = "0.12.3" 32 | source = "registry+https://github.com/rust-lang/crates.io-index" 33 | checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" 34 | 35 | [[package]] 36 | name = "libfuzzer-sys" 37 | version = "0.4.2" 38 | source = "registry+https://github.com/rust-lang/crates.io-index" 39 | checksum = "36a9a84a6e8b55dfefb04235e55edb2b9a2a18488fcae777a6bdaa6f06f1deb3" 40 | dependencies = [ 41 | "arbitrary", 42 | "cc", 43 | "once_cell", 44 | ] 45 | 46 | [[package]] 47 | name = "memoffset" 48 | version = "0.6.4" 49 | source = "registry+https://github.com/rust-lang/crates.io-index" 50 | checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" 51 | dependencies = [ 52 | "autocfg", 53 | ] 54 | 55 | [[package]] 56 | name = "once_cell" 57 | version = "1.8.0" 58 | source = "registry+https://github.com/rust-lang/crates.io-index" 59 | checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" 60 | 61 | [[package]] 62 | name = "rnix" 63 | version = "0.11.0-dev" 64 | dependencies = [ 65 | "rowan", 66 | ] 67 | 68 | [[package]] 69 | name = "rnix-fuzz" 70 | version = "0.0.0" 71 | dependencies = [ 72 | "libfuzzer-sys", 73 | "rnix", 74 | ] 75 | 76 | [[package]] 77 | name = "rowan" 78 | version = "0.15.8" 79 | source = "registry+https://github.com/rust-lang/crates.io-index" 80 | checksum = "e88acf7b001007e9e8c989fe7449f6601d909e5dd2c56399fc158977ad6c56e8" 81 | dependencies = [ 82 | "countme", 83 | "hashbrown", 84 | "memoffset", 85 | "rustc-hash", 86 | "text-size", 87 | ] 88 | 89 | [[package]] 90 | name = "rustc-hash" 91 | version = "1.1.0" 92 | source = "registry+https://github.com/rust-lang/crates.io-index" 93 | checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" 94 | 95 | [[package]] 96 | name = "text-size" 97 | version = "1.1.0" 98 | source = "registry+https://github.com/rust-lang/crates.io-index" 99 | checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" 100 | -------------------------------------------------------------------------------- /fuzz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rnix-fuzz" 3 | version = "0.0.0" 4 | authors = ["Automatically generated"] 5 | publish = false 6 | edition = "2021" 7 | 8 | [package.metadata] 9 | cargo-fuzz = true 10 | 11 | [dependencies] 12 | libfuzzer-sys = "0.4" 13 | 14 | [dependencies.rnix] 15 | path = ".." 16 | 17 | # Prevent this from interfering with workspaces 18 | [workspace] 19 | members = ["."] 20 | 21 | [[bin]] 22 | name = "parser" 23 | path = "fuzz_targets/parser.rs" 24 | test = false 25 | doc = false 26 | 27 | [[bin]] 28 | name = "lexer" 29 | path = "fuzz_targets/lexer.rs" 30 | test = false 31 | doc = false 32 | 33 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/lexer.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use libfuzzer_sys::fuzz_target; 3 | 4 | fuzz_target!(|data: &[u8]| { 5 | if let Ok(text) = std::str::from_utf8(data) { 6 | let _ = rnix::tokenizer::tokenize(text); 7 | } 8 | }); 9 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/parser.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use libfuzzer_sys::fuzz_target; 3 | 4 | use std::io::{self, Write}; 5 | 6 | fuzz_target!(|data: &[u8]| { 7 | if let Ok(text) = std::str::from_utf8(data) { 8 | let _ = rnix::Root::parse(text); 9 | } 10 | }); 11 | 12 | -------------------------------------------------------------------------------- /fuzz/rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "nightly" 3 | profile = "minimal" 4 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | use_small_heuristics = "Max" 2 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | let 2 | flake-compat = builtins.fetchTarball { 3 | url = "https://github.com/edolstra/flake-compat/archive/99f1c2157fba4bfe6211a321fd0ee43199025dbf.tar.gz"; 4 | sha256 = "0x2jn3vrawwv9xp15674wjz9pixwjyj3j771izayl962zziivbx2"; 5 | }; 6 | in 7 | (import flake-compat { 8 | src = ./.; 9 | }).shellNix.default 10 | -------------------------------------------------------------------------------- /src/ast.rs: -------------------------------------------------------------------------------- 1 | //! Provides a type system for the AST, in some sense 2 | 3 | mod expr_ext; 4 | mod interpol; 5 | mod nodes; 6 | mod operators; 7 | mod path_util; 8 | mod str_util; 9 | mod tokens; 10 | 11 | use crate::{NixLanguage, SyntaxKind, SyntaxToken}; 12 | 13 | pub use expr_ext::LiteralKind; 14 | pub use interpol::*; 15 | pub use nodes::*; 16 | pub use operators::{BinOpKind, UnaryOpKind}; 17 | pub use tokens::*; 18 | 19 | pub trait AstNode: rowan::ast::AstNode {} 20 | 21 | impl AstNode for T where T: rowan::ast::AstNode {} 22 | 23 | pub trait AstToken { 24 | fn can_cast(from: SyntaxKind) -> bool 25 | where 26 | Self: Sized; 27 | 28 | /// Cast an untyped token into this strongly-typed token. This will return 29 | /// None if the type was not correct. 30 | fn cast(from: SyntaxToken) -> Option 31 | where 32 | Self: Sized; 33 | 34 | fn syntax(&self) -> &SyntaxToken; 35 | } 36 | 37 | mod support { 38 | use rowan::ast::AstChildren; 39 | 40 | use super::{AstNode, AstToken}; 41 | use crate::{SyntaxElement, SyntaxKind, SyntaxToken}; 42 | 43 | pub(super) fn nth(parent: &N, n: usize) -> Option { 44 | parent.syntax().children().flat_map(NN::cast).nth(n) 45 | } 46 | 47 | pub(super) fn children(parent: &N) -> AstChildren { 48 | rowan::ast::support::children(parent.syntax()) 49 | } 50 | 51 | pub(super) fn token(parent: &N) -> Option { 52 | children_tokens(parent).nth(0) 53 | } 54 | 55 | /// Token untyped 56 | pub(super) fn token_u(parent: &N, kind: SyntaxKind) -> Option { 57 | children_tokens_u(parent).find(|it| it.kind() == kind) 58 | } 59 | 60 | pub(super) fn children_tokens(parent: &N) -> impl Iterator { 61 | parent 62 | .syntax() 63 | .children_with_tokens() 64 | .filter_map(SyntaxElement::into_token) 65 | .filter_map(T::cast) 66 | } 67 | 68 | pub(super) fn children_tokens_u(parent: &N) -> impl Iterator { 69 | parent.syntax().children_with_tokens().filter_map(SyntaxElement::into_token) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/ast/expr_ext.rs: -------------------------------------------------------------------------------- 1 | use crate::ast::{self, support::*}; 2 | 3 | // this is a separate type because it mixes tokens and nodes 4 | // for example, a Str is a node because it can contain nested subexpressions but an Integer is a token. 5 | // This means that we have to write it out manually instead of using the macro to create the type for us. 6 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 7 | pub enum LiteralKind { 8 | Float(ast::Float), 9 | Integer(ast::Integer), 10 | Uri(ast::Uri), 11 | } 12 | 13 | impl ast::Literal { 14 | pub fn kind(&self) -> LiteralKind { 15 | if let Some(it) = token(self) { 16 | return LiteralKind::Float(it); 17 | } 18 | 19 | if let Some(it) = token(self) { 20 | return LiteralKind::Integer(it); 21 | } 22 | 23 | if let Some(it) = token(self) { 24 | return LiteralKind::Uri(it); 25 | } 26 | 27 | unreachable!() 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/ast/interpol.rs: -------------------------------------------------------------------------------- 1 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 2 | pub enum InterpolPart { 3 | Literal(T), 4 | Interpolation(super::Interpol), 5 | } 6 | -------------------------------------------------------------------------------- /src/ast/nodes.rs: -------------------------------------------------------------------------------- 1 | use crate::{NixLanguage, SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxToken}; 2 | 3 | use super::{operators::BinOpKind, support::*, AstNode, UnaryOpKind}; 4 | use rowan::ast::{AstChildren, AstNode as OtherAstNode}; 5 | 6 | pub trait HasEntry: AstNode { 7 | fn entries(&self) -> AstChildren 8 | where 9 | Self: Sized, 10 | { 11 | children(self) 12 | } 13 | 14 | fn attrpath_values(&self) -> AstChildren 15 | where 16 | Self: Sized, 17 | { 18 | children(self) 19 | } 20 | 21 | fn inherits(&self) -> AstChildren 22 | where 23 | Self: Sized, 24 | { 25 | children(self) 26 | } 27 | } 28 | 29 | macro_rules! node { 30 | ( 31 | #[from($kind:ident)] 32 | $(#[$meta:meta])* 33 | struct $name:ident; 34 | ) => { 35 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 36 | $(#[$meta])* 37 | pub struct $name(pub(super) SyntaxNode); 38 | 39 | impl std::fmt::Display for $name { 40 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 41 | std::fmt::Display::fmt(self.syntax(), f) 42 | } 43 | } 44 | 45 | impl rowan::ast::AstNode for $name { 46 | type Language = crate::NixLanguage; 47 | 48 | fn can_cast(kind: crate::SyntaxKind) -> bool { 49 | kind == $kind 50 | } 51 | 52 | fn cast(from: SyntaxNode) -> Option { 53 | if Self::can_cast(from.kind()) { 54 | Some(Self(from)) 55 | } else { 56 | None 57 | } 58 | } 59 | fn syntax(&self) -> &SyntaxNode { 60 | &self.0 61 | } 62 | } 63 | 64 | impl $name { 65 | pub const KIND: SyntaxKind = $kind; 66 | } 67 | }; 68 | ( 69 | #[from($($variant:ident),* $(,)?)] 70 | $(#[$meta:meta])* 71 | enum $name:ident; 72 | ) => { 73 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 74 | $(#[$meta])* 75 | pub enum $name { 76 | $( 77 | $variant($variant), 78 | )* 79 | } 80 | 81 | impl std::fmt::Display for $name { 82 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 83 | std::fmt::Display::fmt(self.syntax(), f) 84 | } 85 | } 86 | 87 | impl rowan::ast::AstNode for $name { 88 | type Language = NixLanguage; 89 | 90 | fn can_cast(kind: SyntaxKind) -> bool { 91 | matches!(kind, $($variant::KIND)|*) 92 | } 93 | 94 | fn cast(syntax: SyntaxNode) -> Option { 95 | let res = match syntax.kind() { 96 | $( 97 | $variant::KIND => $name::$variant($variant(syntax)) 98 | ),*, 99 | _ => return None, 100 | }; 101 | Some(res) 102 | } 103 | 104 | fn syntax(&self) -> &SyntaxNode { 105 | match self { 106 | $( 107 | $name::$variant(it) => &it.0, 108 | )* 109 | } 110 | } 111 | } 112 | 113 | $( 114 | impl From<$variant> for $name { 115 | fn from(node: $variant) -> $name { $name::$variant(node) } 116 | } 117 | 118 | impl TryFrom<$name> for $variant { 119 | type Error = (); 120 | 121 | fn try_from(node: $name) -> Result<$variant, ()> { 122 | match node { 123 | $name::$variant(it) => Ok(it), 124 | _ => Err(()), 125 | } 126 | } 127 | } 128 | )* 129 | }; 130 | } 131 | 132 | macro_rules! tg { 133 | ( 134 | $(#[doc=$doc:tt])? 135 | $name:ident, 136 | $token:tt 137 | ) => { 138 | $(#[doc=$doc])? 139 | pub fn $name(&self) -> Option { 140 | token_u(self, T![$token]) 141 | } 142 | }; 143 | } 144 | 145 | macro_rules! ng { 146 | ( 147 | $(#[$meta:meta])* 148 | $name:ident, 149 | $ty:ty, 150 | $i:expr 151 | ) => { 152 | $(#[$meta])* 153 | pub fn $name(&self) -> Option<$ty> { 154 | nth(self, $i) 155 | } 156 | }; 157 | ( 158 | $(#[$meta:meta])* 159 | $name:ident, 160 | [$ty:ty] 161 | ) => { 162 | $(#[$meta])* 163 | pub fn $name(&self) -> AstChildren<$ty> { 164 | children(self) 165 | } 166 | }; 167 | } 168 | 169 | node! { #[from(NODE_LITERAL)] struct Literal; } 170 | 171 | node! { 172 | #[from( 173 | Apply, 174 | Assert, 175 | Error, 176 | IfElse, 177 | Select, 178 | Str, 179 | Path, 180 | Literal, 181 | Lambda, 182 | LegacyLet, 183 | LetIn, 184 | List, 185 | BinOp, 186 | Paren, 187 | Root, 188 | AttrSet, 189 | UnaryOp, 190 | Ident, 191 | With, 192 | HasAttr, 193 | )] 194 | /// An expression. The fundamental nix ast type. 195 | enum Expr; 196 | } 197 | 198 | node! { 199 | #[from( 200 | Ident, 201 | Dynamic, 202 | Str, 203 | )] 204 | enum Attr; 205 | } 206 | 207 | node! { #[from(NODE_IDENT)] struct Ident; } 208 | 209 | impl Ident { 210 | // A NODE_IDENT may either have a nested TOKEN_OR if the identifier was "or" or TOKEN_IDENT for everything else 211 | tg! { ident_token, TOKEN_IDENT } 212 | } 213 | 214 | node! { #[from(NODE_APPLY)] struct Apply; } 215 | 216 | impl Apply { 217 | ng! { lambda, Expr, 0 } 218 | ng! { argument, Expr, 1 } 219 | } 220 | 221 | node! { #[from(NODE_ASSERT)] struct Assert; } 222 | 223 | impl Assert { 224 | tg! { assert_token, assert } 225 | ng! { condition, Expr, 0 } 226 | ng! { body, Expr, 1 } 227 | } 228 | 229 | node! { #[from(NODE_ATTRPATH)] struct Attrpath; } 230 | 231 | impl Attrpath { 232 | ng! { attrs, [Attr] } 233 | } 234 | 235 | node! { #[from(NODE_DYNAMIC)] struct Dynamic; } 236 | 237 | impl Dynamic { 238 | tg! { interpol_start_token, TOKEN_INTERPOL_START } 239 | ng! { expr, Expr, 0 } 240 | tg! { interpol_end_token, TOKEN_INTERPOL_END } 241 | } 242 | 243 | node! { #[from(NODE_ERROR)] struct Error; } 244 | 245 | node! { #[from(NODE_IF_ELSE)] struct IfElse; } 246 | 247 | impl IfElse { 248 | tg! { if_token, if } 249 | ng! { condition, Expr, 0 } 250 | tg! { then_token, then } 251 | ng! { body, Expr, 1 } 252 | tg! { else_token, else } 253 | ng! { else_body, Expr, 2 } 254 | } 255 | 256 | node! { #[from(NODE_SELECT)] struct Select; } 257 | 258 | impl Select { 259 | ng! { expr, Expr, 0 } 260 | tg! { dot_token, . } 261 | ng! { attrpath, Attrpath, 0 } 262 | tg! { or_token, or } 263 | ng! { default_expr, Expr, 1 } 264 | } 265 | 266 | node! { #[from(NODE_INHERIT)] struct Inherit; } 267 | 268 | impl Inherit { 269 | tg! { inherit_token, inherit } 270 | ng! { from, InheritFrom, 0 } 271 | ng! { attrs, [Attr] } 272 | } 273 | 274 | node! { #[from(NODE_INHERIT_FROM)] struct InheritFrom; } 275 | 276 | impl InheritFrom { 277 | tg! { l_paren_token, '(' } 278 | ng! { expr, Expr, 0 } 279 | tg! { r_paren_token, ')' } 280 | } 281 | 282 | node! { #[from(NODE_PATH)] struct Path; } 283 | 284 | node! { #[from(NODE_STRING)] struct Str; } 285 | 286 | node! { #[from(NODE_INTERPOL)] struct Interpol; } 287 | 288 | impl Interpol { 289 | ng! { expr, Expr, 0 } 290 | } 291 | 292 | node! { #[from(NODE_LAMBDA)] struct Lambda; } 293 | 294 | impl Lambda { 295 | ng! { param, Param, 0 } 296 | tg! { token_colon, : } 297 | ng! { body, Expr, 0 } 298 | } 299 | 300 | node! { #[from(NODE_LEGACY_LET)] struct LegacyLet; } 301 | 302 | impl HasEntry for LegacyLet {} 303 | 304 | impl LegacyLet { 305 | tg! { let_token, let } 306 | tg! { curly_open_token, '{' } 307 | tg! { curly_close_token, '}' } 308 | } 309 | 310 | node! { #[from(NODE_LET_IN)] struct LetIn; } 311 | 312 | impl HasEntry for LetIn {} 313 | 314 | impl LetIn { 315 | tg! { let_token, let } 316 | tg! { in_token, in } 317 | ng! { body, Expr, 0 } 318 | } 319 | 320 | node! { #[from(NODE_LIST)] struct List; } 321 | 322 | impl List { 323 | tg! { l_brack_token, '[' } 324 | ng! { items, [Expr] } 325 | tg! { r_brack_token, ']' } 326 | } 327 | 328 | node! { #[from(NODE_BIN_OP)] struct BinOp; } 329 | 330 | impl BinOp { 331 | ng! { lhs, Expr, 0 } 332 | 333 | pub fn operator(&self) -> Option { 334 | children_tokens_u(self).find_map(|t| BinOpKind::from_kind(t.kind())) 335 | } 336 | 337 | ng! { rhs, Expr, 1 } 338 | } 339 | 340 | node! { #[from(NODE_PAREN)] struct Paren; } 341 | 342 | impl Paren { 343 | tg! { l_paren_token, '(' } 344 | ng! { expr, Expr, 0 } 345 | tg! { r_paren_token, ')' } 346 | } 347 | 348 | node! { #[from(NODE_PAT_BIND)] struct PatBind; } 349 | 350 | impl PatBind { 351 | ng! { ident, Ident, 0 } 352 | } 353 | 354 | node! { #[from(NODE_PAT_ENTRY)] struct PatEntry; } 355 | 356 | impl PatEntry { 357 | ng! { ident, Ident, 0 } 358 | tg! { question_token, ? } 359 | ng! { default, Expr, 1 } 360 | } 361 | 362 | node! { #[from(NODE_IDENT_PARAM)] struct IdentParam; } 363 | 364 | impl IdentParam { 365 | ng! { ident, Ident, 0 } 366 | } 367 | 368 | node! { 369 | #[from( 370 | Pattern, 371 | IdentParam, 372 | )] 373 | enum Param; 374 | } 375 | 376 | node! { #[from(NODE_PATTERN)] struct Pattern; } 377 | 378 | impl Pattern { 379 | tg! { at_token, @ } 380 | ng! { pat_entries, [PatEntry] } 381 | tg! { ellipsis_token, ... } 382 | ng! { pat_bind, PatBind, 0 } 383 | } 384 | 385 | node! { #[from(NODE_ROOT)] struct Root; } 386 | 387 | impl Root { 388 | ng! { expr, Expr, 0 } 389 | } 390 | 391 | node! { #[from(NODE_ATTR_SET)] struct AttrSet; } 392 | 393 | impl HasEntry for AttrSet {} 394 | 395 | impl AttrSet { 396 | tg! { rec_token, rec } 397 | tg! { l_curly_token, '{' } 398 | tg! { r_curly_token, '}' } 399 | } 400 | 401 | node! { 402 | #[from( 403 | Inherit, 404 | AttrpathValue, 405 | )] 406 | enum Entry; 407 | } 408 | 409 | node! { #[from(NODE_ATTRPATH_VALUE)] struct AttrpathValue; } 410 | 411 | impl AttrpathValue { 412 | ng! { attrpath, Attrpath, 0 } 413 | tg! { assign_token, = } 414 | ng! { value, Expr, 0 } 415 | } 416 | 417 | node! { #[from(NODE_UNARY_OP)] struct UnaryOp; } 418 | 419 | impl UnaryOp { 420 | pub fn operator(&self) -> Option { 421 | children_tokens_u(self).find_map(|t| UnaryOpKind::from_kind(t.kind())) 422 | } 423 | ng! { expr, Expr, 0 } 424 | } 425 | 426 | node! { #[from(NODE_WITH)] struct With; } 427 | 428 | impl With { 429 | tg! { with_token, with } 430 | ng! { namespace, Expr, 0 } 431 | tg! { semicolon_token, ; } 432 | ng! { body, Expr, 1 } 433 | } 434 | 435 | node! { #[from(NODE_HAS_ATTR)] struct HasAttr; } 436 | 437 | impl HasAttr { 438 | ng! { expr, Expr, 0 } 439 | tg! { question_token, ? } 440 | ng! { attrpath, Attrpath, 0 } 441 | } 442 | -------------------------------------------------------------------------------- /src/ast/operators.rs: -------------------------------------------------------------------------------- 1 | use crate::SyntaxKind::{self, *}; 2 | 3 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] 4 | pub enum BinOpKind { 5 | Concat, 6 | Update, 7 | 8 | Add, 9 | Sub, 10 | Mul, 11 | Div, 12 | 13 | And, 14 | Equal, 15 | Implication, 16 | Less, 17 | LessOrEq, 18 | More, 19 | MoreOrEq, 20 | NotEqual, 21 | Or, 22 | PipeRight, 23 | PipeLeft, 24 | } 25 | 26 | impl BinOpKind { 27 | /// Get the operation kind from a SyntaxKind in the AST 28 | pub fn from_kind(token: SyntaxKind) -> Option { 29 | match token { 30 | TOKEN_CONCAT => Some(BinOpKind::Concat), 31 | TOKEN_UPDATE => Some(BinOpKind::Update), 32 | 33 | TOKEN_ADD => Some(BinOpKind::Add), 34 | TOKEN_SUB => Some(BinOpKind::Sub), 35 | TOKEN_MUL => Some(BinOpKind::Mul), 36 | TOKEN_DIV => Some(BinOpKind::Div), 37 | 38 | TOKEN_AND_AND => Some(BinOpKind::And), 39 | TOKEN_EQUAL => Some(BinOpKind::Equal), 40 | TOKEN_IMPLICATION => Some(BinOpKind::Implication), 41 | TOKEN_LESS => Some(BinOpKind::Less), 42 | TOKEN_LESS_OR_EQ => Some(BinOpKind::LessOrEq), 43 | TOKEN_MORE => Some(BinOpKind::More), 44 | TOKEN_MORE_OR_EQ => Some(BinOpKind::MoreOrEq), 45 | TOKEN_NOT_EQUAL => Some(BinOpKind::NotEqual), 46 | TOKEN_OR_OR => Some(BinOpKind::Or), 47 | 48 | TOKEN_PIPE_RIGHT => Some(BinOpKind::PipeRight), 49 | TOKEN_PIPE_LEFT => Some(BinOpKind::PipeLeft), 50 | 51 | _ => None, 52 | } 53 | } 54 | } 55 | 56 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] 57 | pub enum UnaryOpKind { 58 | Invert, 59 | Negate, 60 | } 61 | 62 | impl UnaryOpKind { 63 | /// Get the operation kind from a token in the AST 64 | pub fn from_kind(kind: SyntaxKind) -> Option { 65 | match kind { 66 | TOKEN_INVERT => Some(UnaryOpKind::Invert), 67 | TOKEN_SUB => Some(UnaryOpKind::Negate), 68 | _ => None, 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/ast/path_util.rs: -------------------------------------------------------------------------------- 1 | use crate::{ast::AstToken, kinds::SyntaxKind::*}; 2 | use rowan::{ast::AstNode as OtherAstNode, NodeOrToken}; 3 | 4 | use crate::ast; 5 | 6 | use super::{InterpolPart, PathContent}; 7 | 8 | impl ast::nodes::Path { 9 | pub fn parts(&self) -> impl Iterator> { 10 | self.syntax().children_with_tokens().map(|child| match child { 11 | NodeOrToken::Token(token) => { 12 | assert_eq!(token.kind(), TOKEN_PATH); 13 | InterpolPart::Literal(PathContent::cast(token).unwrap()) 14 | } 15 | NodeOrToken::Node(node) => { 16 | InterpolPart::Interpolation(ast::Interpol::cast(node.clone()).unwrap()) 17 | } 18 | }) 19 | } 20 | } 21 | 22 | #[cfg(test)] 23 | mod tests { 24 | use rowan::ast::AstNode; 25 | 26 | use crate::{ 27 | ast::{self, AstToken, InterpolPart, PathContent}, 28 | Root, 29 | }; 30 | 31 | #[test] 32 | fn parts() { 33 | fn assert_eq_ast_ctn(it: &mut dyn Iterator>, x: &str) { 34 | let tmp = it.next().expect("unexpected EOF"); 35 | if let InterpolPart::Interpolation(astn) = tmp { 36 | assert_eq!(astn.expr().unwrap().syntax().to_string(), x); 37 | } else { 38 | unreachable!("unexpected literal {:?}", tmp); 39 | } 40 | } 41 | 42 | fn assert_eq_lit(it: &mut dyn Iterator>, x: &str) { 43 | let tmp = it.next().expect("unexpected EOF"); 44 | if let InterpolPart::Literal(astn) = tmp { 45 | assert_eq!(astn.syntax().text(), x); 46 | } else { 47 | unreachable!("unexpected interpol {:?}", tmp); 48 | } 49 | } 50 | 51 | let inp = r#"./a/b/${"c"}/${d}/e/f"#; 52 | let expr = Root::parse(inp).ok().unwrap().expr().unwrap(); 53 | match expr { 54 | ast::Expr::Path(p) => { 55 | let mut it = p.parts(); 56 | assert_eq_lit(&mut it, "./a/b/"); 57 | assert_eq_ast_ctn(&mut it, "\"c\""); 58 | assert_eq_lit(&mut it, "/"); 59 | assert_eq_ast_ctn(&mut it, "d"); 60 | assert_eq_lit(&mut it, "/e/f"); 61 | } 62 | _ => unreachable!(), 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/ast/str_util.rs: -------------------------------------------------------------------------------- 1 | use crate::kinds::SyntaxKind::*; 2 | use rowan::{ast::AstNode as OtherAstNode, NodeOrToken}; 3 | 4 | use crate::ast; 5 | 6 | use super::{support::children_tokens_u, AstToken, InterpolPart, StrContent}; 7 | 8 | impl ast::Str { 9 | pub fn parts(&self) -> impl Iterator> { 10 | self.syntax().children_with_tokens().filter_map(|child| match child { 11 | NodeOrToken::Token(token) if token.kind() == TOKEN_STRING_CONTENT => { 12 | Some(InterpolPart::Literal(StrContent::cast(token).unwrap())) 13 | } 14 | NodeOrToken::Token(token) => { 15 | assert!(token.kind() == TOKEN_STRING_START || token.kind() == TOKEN_STRING_END); 16 | None 17 | } 18 | NodeOrToken::Node(node) => { 19 | assert_eq!(node.kind(), NODE_INTERPOL); 20 | Some(InterpolPart::Interpolation(ast::Interpol::cast(node.clone()).unwrap())) 21 | } 22 | }) 23 | } 24 | 25 | pub fn normalized_parts(&self) -> Vec> { 26 | let multiline = children_tokens_u(self).next().map_or(false, |t| t.text() == "''"); 27 | let mut is_first_literal = true; 28 | let mut at_start_of_line = true; 29 | let mut min_indent = 1000000; 30 | let mut cur_indent = 0; 31 | let mut n = 0; 32 | let mut first_is_literal = false; 33 | 34 | let parts: Vec> = self.parts().collect(); 35 | 36 | if multiline { 37 | for part in &parts { 38 | match part { 39 | InterpolPart::Interpolation(_) => { 40 | if at_start_of_line { 41 | at_start_of_line = false; 42 | min_indent = min_indent.min(cur_indent); 43 | } 44 | n += 1; 45 | } 46 | InterpolPart::Literal(literal) => { 47 | let mut token_text = literal.syntax().text(); 48 | 49 | if n == 0 { 50 | first_is_literal = true; 51 | } 52 | 53 | if is_first_literal && first_is_literal { 54 | is_first_literal = false; 55 | if let Some(p) = token_text.find('\n') { 56 | if token_text[0..p].chars().all(|c| c == ' ') { 57 | token_text = &token_text[p + 1..] 58 | } 59 | } 60 | } 61 | 62 | for c in token_text.chars() { 63 | if at_start_of_line { 64 | if c == ' ' { 65 | cur_indent += 1; 66 | } else if c == '\n' { 67 | cur_indent = 0; 68 | } else { 69 | at_start_of_line = false; 70 | min_indent = min_indent.min(cur_indent); 71 | } 72 | } else if c == '\n' { 73 | at_start_of_line = true; 74 | cur_indent = 0; 75 | } 76 | } 77 | 78 | n += 1; 79 | } 80 | } 81 | } 82 | } 83 | 84 | let mut normalized_parts = Vec::new(); 85 | let mut cur_dropped = 0; 86 | let mut i = 0; 87 | is_first_literal = true; 88 | at_start_of_line = true; 89 | 90 | for part in parts { 91 | match part { 92 | InterpolPart::Interpolation(interpol) => { 93 | at_start_of_line = false; 94 | cur_dropped = 0; 95 | normalized_parts.push(InterpolPart::Interpolation(interpol)); 96 | i += 1; 97 | } 98 | InterpolPart::Literal(literal) => { 99 | let mut token_text = literal.syntax().text(); 100 | 101 | if multiline { 102 | if is_first_literal && first_is_literal { 103 | is_first_literal = false; 104 | if let Some(p) = token_text.find('\n') { 105 | if token_text[0..p].chars().all(|c| c == ' ') { 106 | token_text = &token_text[p + 1..]; 107 | if token_text.is_empty() { 108 | i += 1; 109 | continue; 110 | } 111 | } 112 | } 113 | } 114 | 115 | let mut str = String::new(); 116 | for c in token_text.chars() { 117 | if at_start_of_line { 118 | if c == ' ' { 119 | if cur_dropped >= min_indent { 120 | str.push(c); 121 | } 122 | cur_dropped += 1; 123 | } else if c == '\n' { 124 | cur_dropped = 0; 125 | str.push(c); 126 | } else { 127 | at_start_of_line = false; 128 | cur_dropped = 0; 129 | str.push(c); 130 | } 131 | } else { 132 | str.push(c); 133 | if c == '\n' { 134 | at_start_of_line = true; 135 | } 136 | } 137 | } 138 | 139 | if i == n - 1 { 140 | if let Some(p) = str.rfind('\n') { 141 | if str[p + 1..].chars().all(|c| c == ' ') { 142 | str.truncate(p + 1); 143 | } 144 | } 145 | } 146 | 147 | normalized_parts.push(InterpolPart::Literal(unescape(&str, multiline))); 148 | i += 1; 149 | } else { 150 | normalized_parts 151 | .push(InterpolPart::Literal(unescape(token_text, multiline))); 152 | } 153 | } 154 | } 155 | } 156 | 157 | normalized_parts 158 | } 159 | } 160 | 161 | /// Interpret escape sequences in the nix string and return the converted value 162 | pub fn unescape(input: &str, multiline: bool) -> String { 163 | let mut output = String::new(); 164 | let mut input = input.chars().peekable(); 165 | loop { 166 | match input.next() { 167 | None => break, 168 | Some('"') if !multiline => break, 169 | Some('\\') if !multiline => match input.next() { 170 | None => break, 171 | Some('n') => output.push('\n'), 172 | Some('r') => output.push('\r'), 173 | Some('t') => output.push('\t'), 174 | Some(c) => output.push(c), 175 | }, 176 | Some('\'') if multiline => match input.next() { 177 | None => { 178 | output.push('\''); 179 | } 180 | Some('\'') => match input.peek() { 181 | Some('\'') => { 182 | input.next().unwrap(); 183 | output.push_str("''"); 184 | } 185 | Some('$') => { 186 | input.next().unwrap(); 187 | output.push('$'); 188 | } 189 | Some('\\') => { 190 | input.next().unwrap(); 191 | match input.next() { 192 | None => break, 193 | Some('n') => output.push('\n'), 194 | Some('r') => output.push('\r'), 195 | Some('t') => output.push('\t'), 196 | Some(c) => output.push(c), 197 | } 198 | } 199 | _ => break, 200 | }, 201 | Some(c) => { 202 | output.push('\''); 203 | output.push(c); 204 | } 205 | }, 206 | Some(c) => output.push(c), 207 | } 208 | } 209 | output 210 | } 211 | 212 | #[cfg(test)] 213 | mod tests { 214 | use crate::Root; 215 | 216 | use super::*; 217 | 218 | #[test] 219 | fn string_unescapes() { 220 | assert_eq!(unescape(r#"Hello\n\"World\" :D"#, false), "Hello\n\"World\" :D"); 221 | assert_eq!(unescape(r#"\"Hello\""#, false), "\"Hello\""); 222 | 223 | assert_eq!(unescape(r#"Hello''\n'''World''' :D"#, true), "Hello\n''World'' :D"); 224 | assert_eq!(unescape(r#""Hello""#, true), "\"Hello\""); 225 | } 226 | #[test] 227 | fn parts_leading_ws() { 228 | let inp = "''\n hello\n world''"; 229 | let expr = Root::parse(inp).ok().unwrap().expr().unwrap(); 230 | match expr { 231 | ast::Expr::Str(str) => { 232 | assert_eq!( 233 | str.normalized_parts(), 234 | vec![InterpolPart::Literal("hello\nworld".to_string())] 235 | ) 236 | } 237 | _ => unreachable!(), 238 | } 239 | } 240 | #[test] 241 | fn parts_trailing_ws_single_line() { 242 | let inp = "''hello ''"; 243 | let expr = Root::parse(inp).ok().unwrap().expr().unwrap(); 244 | match expr { 245 | ast::Expr::Str(str) => { 246 | assert_eq!( 247 | str.normalized_parts(), 248 | vec![InterpolPart::Literal("hello ".to_string())] 249 | ) 250 | } 251 | _ => unreachable!(), 252 | } 253 | } 254 | #[test] 255 | fn parts_trailing_ws_multiline() { 256 | let inp = "''hello\n ''"; 257 | let expr = Root::parse(inp).ok().unwrap().expr().unwrap(); 258 | match expr { 259 | ast::Expr::Str(str) => { 260 | assert_eq!( 261 | str.normalized_parts(), 262 | vec![InterpolPart::Literal("hello\n".to_string())] 263 | ) 264 | } 265 | _ => unreachable!(), 266 | } 267 | } 268 | #[test] 269 | fn parts() { 270 | use crate::{NixLanguage, SyntaxNode}; 271 | use rowan::{GreenNodeBuilder, Language}; 272 | 273 | fn string_node(content: &str) -> ast::Str { 274 | let mut builder = GreenNodeBuilder::new(); 275 | builder.start_node(NixLanguage::kind_to_raw(NODE_STRING)); 276 | builder.token(NixLanguage::kind_to_raw(TOKEN_STRING_START), "''"); 277 | builder.token(NixLanguage::kind_to_raw(TOKEN_STRING_CONTENT), content); 278 | builder.token(NixLanguage::kind_to_raw(TOKEN_STRING_END), "''"); 279 | builder.finish_node(); 280 | 281 | ast::Str::cast(SyntaxNode::new_root(builder.finish())).unwrap() 282 | } 283 | 284 | let txtin = r#" 285 | |trailing-whitespace 286 | |trailing-whitespace 287 | This is a multiline string :D 288 | indented by two 289 | \'\'\'\'\ 290 | ''${ interpolation was escaped } 291 | two single quotes: ''' 292 | three single quotes: '''' 293 | "# 294 | .replace("|trailing-whitespace", ""); 295 | 296 | if let [InterpolPart::Literal(lit)] = 297 | &ast::Str::normalized_parts(&string_node(txtin.as_str()))[..] 298 | { 299 | assert_eq!(lit, 300 | // Get the below with nix repl 301 | " \n \nThis is a multiline string :D\n indented by two\n\\'\\'\\'\\'\\\n${ interpolation was escaped }\ntwo single quotes: ''\nthree single quotes: '''\n" 302 | ); 303 | } else { 304 | unreachable!(); 305 | } 306 | } 307 | 308 | #[test] 309 | fn parts_ast() { 310 | fn assert_eq_ast_ctn(it: &mut dyn Iterator>, x: &str) { 311 | let tmp = it.next().expect("unexpected EOF"); 312 | if let InterpolPart::Interpolation(astn) = tmp { 313 | assert_eq!(astn.expr().unwrap().syntax().to_string(), x); 314 | } else { 315 | unreachable!("unexpected literal {:?}", tmp); 316 | } 317 | } 318 | 319 | let inp = r#"'' 320 | 321 | This version of Nixpkgs requires Nix >= ${requiredVersion}, please upgrade: 322 | 323 | - If you are running NixOS, `nixos-rebuild' can be used to upgrade your system. 324 | 325 | - Alternatively, with Nix > 2.0 `nix upgrade-nix' can be used to imperatively 326 | upgrade Nix. You may use `nix-env --version' to check which version you have. 327 | 328 | - If you installed Nix using the install script (https://nixos.org/nix/install), 329 | it is safe to upgrade by running it again: 330 | 331 | curl -L https://nixos.org/nix/install | sh 332 | 333 | For more information, please see the NixOS release notes at 334 | https://nixos.org/nixos/manual or locally at 335 | ${toString ./nixos/doc/manual/release-notes}. 336 | 337 | If you need further help, see https://nixos.org/nixos/support.html 338 | ''"#; 339 | let expr = Root::parse(inp).ok().unwrap().expr().unwrap(); 340 | match expr { 341 | ast::Expr::Str(s) => { 342 | let mut it = s.normalized_parts().into_iter(); 343 | assert_eq!( 344 | it.next().unwrap(), 345 | InterpolPart::Literal("\nThis version of Nixpkgs requires Nix >= ".to_string()) 346 | ); 347 | assert_eq_ast_ctn(&mut it, "requiredVersion"); 348 | assert_eq!(it.next().unwrap(), InterpolPart::Literal( 349 | ", please upgrade:\n\n- If you are running NixOS, `nixos-rebuild' can be used to upgrade your system.\n\n- Alternatively, with Nix > 2.0 `nix upgrade-nix' can be used to imperatively\n upgrade Nix. You may use `nix-env --version' to check which version you have.\n\n- If you installed Nix using the install script (https://nixos.org/nix/install),\n it is safe to upgrade by running it again:\n\n curl -L https://nixos.org/nix/install | sh\n\nFor more information, please see the NixOS release notes at\nhttps://nixos.org/nixos/manual or locally at\n".to_string() 350 | )); 351 | assert_eq_ast_ctn(&mut it, "toString ./nixos/doc/manual/release-notes"); 352 | assert_eq!( 353 | it.next().unwrap(), 354 | InterpolPart::Literal( 355 | ".\n\nIf you need further help, see https://nixos.org/nixos/support.html\n" 356 | .to_string() 357 | ) 358 | ); 359 | } 360 | _ => unreachable!(), 361 | } 362 | } 363 | } 364 | -------------------------------------------------------------------------------- /src/ast/tokens.rs: -------------------------------------------------------------------------------- 1 | use core::num; 2 | 3 | use crate::{ 4 | ast::AstToken, 5 | SyntaxKind::{self, *}, 6 | SyntaxToken, 7 | }; 8 | 9 | macro_rules! token { 10 | ( 11 | #[from($kind:ident)] 12 | $(#[$meta:meta])* 13 | struct $name:ident; 14 | ) => { 15 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 16 | $(#[$meta])* 17 | pub struct $name(pub(super) SyntaxToken); 18 | 19 | impl std::fmt::Display for $name { 20 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 21 | std::fmt::Display::fmt(self.syntax(), f) 22 | } 23 | } 24 | 25 | impl AstToken for $name { 26 | fn can_cast(kind: SyntaxKind) -> bool { 27 | $kind == kind 28 | } 29 | 30 | fn cast(from: SyntaxToken) -> Option { 31 | if from.kind() == $kind { 32 | Some(Self(from)) 33 | } else { 34 | None 35 | } 36 | } 37 | 38 | fn syntax(&self) -> &SyntaxToken { 39 | &self.0 40 | } 41 | } 42 | }; 43 | } 44 | 45 | token! { #[from(TOKEN_WHITESPACE)] struct Whitespace; } 46 | 47 | token! { #[from(TOKEN_COMMENT)] struct Comment; } 48 | 49 | impl Comment { 50 | pub fn text(&self) -> &str { 51 | let text = self.syntax().text(); 52 | // Handle both "#..." and "/*...*/" comments. 53 | match text.strip_prefix("#") { 54 | Some(s) => s, 55 | None => text.strip_prefix(r#"/*"#).unwrap().strip_suffix(r#"*/"#).unwrap(), 56 | } 57 | } 58 | } 59 | 60 | #[cfg(test)] 61 | mod tests { 62 | use crate::ast; 63 | use crate::match_ast; 64 | use crate::Root; 65 | 66 | use super::*; 67 | use rowan::ast::AstNode; 68 | 69 | #[test] 70 | fn comment() { 71 | let s = "# comment bruh 72 | /* this is a multiline comment wow 73 | asdfasdf 74 | asdfasdf */ 75 | 1 + 1 76 | /* last one */ 77 | "; 78 | let comments: Vec = Root::parse(s) 79 | .ok() 80 | .unwrap() 81 | .syntax() 82 | .children_with_tokens() 83 | .filter_map(|e| match e { 84 | rowan::NodeOrToken::Token(token) => match_ast! { match token { 85 | ast::Comment(it) => Some(it.text().to_string()), 86 | _ => None, 87 | }}, 88 | rowan::NodeOrToken::Node(_) => None, 89 | }) 90 | .collect(); 91 | let expected = vec![ 92 | " comment bruh", 93 | " this is a multiline comment wow\nasdfasdf\nasdfasdf ", 94 | " last one ", 95 | ]; 96 | 97 | assert_eq!(comments, expected); 98 | } 99 | } 100 | 101 | token! { #[from(TOKEN_FLOAT)] struct Float; } 102 | 103 | impl Float { 104 | pub fn value(&self) -> Result { 105 | self.syntax().text().parse() 106 | } 107 | } 108 | 109 | token! { #[from(TOKEN_INTEGER)] struct Integer; } 110 | 111 | impl Integer { 112 | pub fn value(&self) -> Result { 113 | self.syntax().text().parse() 114 | } 115 | } 116 | 117 | token! { #[from(TOKEN_PATH)] struct PathContent; } 118 | 119 | token! { #[from(TOKEN_STRING_CONTENT)] struct StrContent; } 120 | 121 | token! { #[from(TOKEN_URI)] struct Uri; } 122 | -------------------------------------------------------------------------------- /src/kinds.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 2 | #[repr(u16)] 3 | #[allow(non_camel_case_types)] 4 | pub enum SyntaxKind { 5 | // Internals 6 | TOKEN_COMMENT, 7 | TOKEN_ERROR, 8 | TOKEN_WHITESPACE, 9 | 10 | // Keywords 11 | TOKEN_ASSERT, 12 | TOKEN_ELSE, 13 | TOKEN_IF, 14 | TOKEN_IN, 15 | TOKEN_INHERIT, 16 | TOKEN_LET, 17 | TOKEN_OR, 18 | TOKEN_REC, 19 | TOKEN_THEN, 20 | TOKEN_WITH, 21 | 22 | // Symbols 23 | TOKEN_L_BRACE, 24 | TOKEN_R_BRACE, 25 | TOKEN_L_BRACK, 26 | TOKEN_R_BRACK, 27 | TOKEN_ASSIGN, 28 | TOKEN_AT, 29 | TOKEN_COLON, 30 | TOKEN_COMMA, 31 | TOKEN_DOT, 32 | TOKEN_ELLIPSIS, 33 | TOKEN_QUESTION, 34 | TOKEN_SEMICOLON, 35 | 36 | // Operators 37 | TOKEN_L_PAREN, 38 | TOKEN_R_PAREN, 39 | TOKEN_CONCAT, 40 | TOKEN_INVERT, 41 | TOKEN_UPDATE, 42 | 43 | TOKEN_ADD, 44 | TOKEN_SUB, 45 | TOKEN_MUL, 46 | TOKEN_DIV, 47 | 48 | TOKEN_AND_AND, 49 | TOKEN_EQUAL, 50 | TOKEN_IMPLICATION, 51 | TOKEN_LESS, 52 | TOKEN_LESS_OR_EQ, 53 | TOKEN_MORE, 54 | TOKEN_MORE_OR_EQ, 55 | TOKEN_NOT_EQUAL, 56 | TOKEN_OR_OR, 57 | TOKEN_PIPE_RIGHT, 58 | TOKEN_PIPE_LEFT, 59 | 60 | // Identifiers and values 61 | TOKEN_FLOAT, 62 | TOKEN_IDENT, 63 | TOKEN_INTEGER, 64 | TOKEN_INTERPOL_END, 65 | TOKEN_INTERPOL_START, 66 | TOKEN_PATH, 67 | TOKEN_URI, 68 | TOKEN_STRING_CONTENT, 69 | TOKEN_STRING_END, 70 | TOKEN_STRING_START, 71 | 72 | NODE_APPLY, 73 | NODE_ASSERT, 74 | NODE_ATTRPATH, 75 | NODE_DYNAMIC, 76 | NODE_ERROR, 77 | NODE_IDENT, 78 | NODE_IF_ELSE, 79 | NODE_SELECT, 80 | NODE_INHERIT, 81 | NODE_INHERIT_FROM, 82 | NODE_STRING, 83 | NODE_INTERPOL, 84 | NODE_LAMBDA, 85 | NODE_IDENT_PARAM, 86 | // An old let { x = 92; body = x; } syntax 87 | NODE_LEGACY_LET, 88 | NODE_LET_IN, 89 | NODE_LIST, 90 | NODE_BIN_OP, 91 | NODE_PAREN, 92 | NODE_PATTERN, 93 | NODE_PAT_BIND, 94 | NODE_PAT_ENTRY, 95 | NODE_ROOT, 96 | NODE_ATTR_SET, 97 | NODE_ATTRPATH_VALUE, 98 | NODE_UNARY_OP, 99 | NODE_LITERAL, 100 | NODE_WITH, 101 | NODE_PATH, 102 | // Attrpath existence check: foo ? bar.${baz}."bux" 103 | NODE_HAS_ATTR, 104 | 105 | #[doc(hidden)] 106 | __LAST, 107 | } 108 | use SyntaxKind::*; 109 | 110 | impl SyntaxKind { 111 | /// Returns true if this token is a literal, such as an integer or a string 112 | pub fn is_literal(self) -> bool { 113 | matches!(self, TOKEN_FLOAT | TOKEN_INTEGER | TOKEN_PATH | TOKEN_URI) 114 | } 115 | 116 | /// Returns true if this token should be used as a function argument. 117 | /// ```ignore 118 | /// Example: 119 | /// add 1 2 + 3 120 | /// ^ ^ ^ ^ 121 | /// | | | +- false 122 | /// | | +- true 123 | /// | +- true 124 | /// +- true 125 | /// ``` 126 | pub fn is_fn_arg(self) -> bool { 127 | match self { 128 | TOKEN_REC | TOKEN_L_BRACE | TOKEN_L_BRACK | TOKEN_L_PAREN | TOKEN_STRING_START 129 | | TOKEN_IDENT => true, 130 | _ => self.is_literal(), 131 | } 132 | } 133 | /// Returns true if this token is a comment, whitespace, or similar, and 134 | /// should be skipped over by the parser. 135 | pub fn is_trivia(self) -> bool { 136 | matches!(self, TOKEN_COMMENT | TOKEN_WHITESPACE) 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | mod macros; 3 | pub mod ast; 4 | mod kinds; 5 | pub mod parser; 6 | #[cfg(test)] 7 | mod tests; 8 | mod token_set; 9 | pub mod tokenizer; 10 | 11 | use std::marker::PhantomData; 12 | 13 | pub use self::{kinds::SyntaxKind, tokenizer::tokenize}; 14 | 15 | use ast::AstNode; 16 | use parser::ParseError; 17 | use rowan::GreenNode; 18 | pub use rowan::{NodeOrToken, TextRange, TextSize, TokenAtOffset, WalkEvent}; 19 | pub(crate) use token_set::TokenSet; 20 | 21 | use self::tokenizer::Tokenizer; 22 | 23 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 24 | pub enum NixLanguage {} 25 | 26 | impl rowan::Language for NixLanguage { 27 | type Kind = SyntaxKind; 28 | fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { 29 | let discriminant: u16 = raw.0; 30 | assert!(discriminant <= (SyntaxKind::__LAST as u16)); 31 | unsafe { std::mem::transmute::(discriminant) } 32 | } 33 | fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { 34 | rowan::SyntaxKind(kind as u16) 35 | } 36 | } 37 | 38 | pub type SyntaxNode = rowan::SyntaxNode; 39 | pub type SyntaxToken = rowan::SyntaxToken; 40 | pub type SyntaxElement = rowan::NodeOrToken; 41 | pub type SyntaxElementChildren = rowan::SyntaxElementChildren; 42 | pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren; 43 | 44 | pub use ast::Root; 45 | 46 | impl Root { 47 | pub fn parse(s: &str) -> Parse { 48 | let (green, errors) = parser::parse(Tokenizer::new(s)); 49 | Parse { green, errors, _ty: PhantomData } 50 | } 51 | } 52 | 53 | /// The result of a parse 54 | #[derive(Clone)] 55 | pub struct Parse { 56 | green: GreenNode, 57 | errors: Vec, 58 | _ty: PhantomData T>, 59 | } 60 | 61 | impl Parse { 62 | pub fn syntax(&self) -> SyntaxNode { 63 | SyntaxNode::new_root(self.green.clone()) 64 | } 65 | } 66 | 67 | impl Parse { 68 | pub fn tree(&self) -> T { 69 | T::cast(self.syntax()).unwrap() 70 | } 71 | 72 | /// Return all errors in the tree, if any 73 | pub fn errors(&self) -> &[ParseError] { 74 | &*self.errors 75 | } 76 | 77 | /// Either return the first error in the tree, or if there are none return self 78 | pub fn ok(self) -> Result { 79 | if let Some(err) = self.errors().first() { 80 | return Err(err.clone()); 81 | } 82 | Ok(self.tree()) 83 | } 84 | } 85 | 86 | /// Matches a `SyntaxNode` against an `ast` type. 87 | /// 88 | /// # Example: 89 | /// 90 | /// ```ignore 91 | /// match_ast! { 92 | /// match node { 93 | /// ast::Apply(it) => { ... }, 94 | /// ast::Assert(it) => { ... }, 95 | /// ast::IfElse(it) => { ... }, 96 | /// _ => None, 97 | /// } 98 | /// } 99 | /// ``` 100 | #[macro_export] 101 | macro_rules! match_ast { 102 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; 103 | 104 | (match ($node:expr) { 105 | $( ast::$ast:ident($it:pat) => $res:expr, )* 106 | _ => $catch_all:expr $(,)? 107 | }) => {{ 108 | $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* 109 | { $catch_all } 110 | }}; 111 | } 112 | -------------------------------------------------------------------------------- /src/macros.rs: -------------------------------------------------------------------------------- 1 | #[macro_export] 2 | #[rustfmt::skip] 3 | macro_rules! T { 4 | (assert) => ($crate::SyntaxKind::TOKEN_ASSERT); 5 | (else) => ($crate::SyntaxKind::TOKEN_ELSE); 6 | (if) => ($crate::SyntaxKind::TOKEN_IF); 7 | (in) => ($crate::SyntaxKind::TOKEN_IN); 8 | (inherit) => ($crate::SyntaxKind::TOKEN_INHERIT); 9 | (let) => ($crate::SyntaxKind::TOKEN_LET); 10 | (or) => ($crate::SyntaxKind::TOKEN_OR); 11 | (rec) => ($crate::SyntaxKind::TOKEN_REC); 12 | (then) => ($crate::SyntaxKind::TOKEN_THEN); 13 | (with) => ($crate::SyntaxKind::TOKEN_WITH); 14 | 15 | ('{') => ($crate::SyntaxKind::TOKEN_L_BRACE); 16 | ('}') => ($crate::SyntaxKind::TOKEN_R_BRACE); 17 | ('[') => ($crate::SyntaxKind::TOKEN_L_BRACK); 18 | (']') => ($crate::SyntaxKind::TOKEN_R_BRACK); 19 | ('(') => ($crate::SyntaxKind::TOKEN_L_PAREN); 20 | (')') => ($crate::SyntaxKind::TOKEN_R_PAREN); 21 | 22 | (=) => ($crate::SyntaxKind::TOKEN_ASSIGN); 23 | (@) => ($crate::SyntaxKind::TOKEN_AT); 24 | (:) => ($crate::SyntaxKind::TOKEN_COLON); 25 | (,) => ($crate::SyntaxKind::TOKEN_COMMA); 26 | (.) => ($crate::SyntaxKind::TOKEN_DOT); 27 | (...) => ($crate::SyntaxKind::TOKEN_ELLIPSIS); 28 | (?) => ($crate::SyntaxKind::TOKEN_QUESTION); 29 | (;) => ($crate::SyntaxKind::TOKEN_SEMICOLON); 30 | (++) => ($crate::SyntaxKind::TOKEN_CONCAT); 31 | (!) => ($crate::SyntaxKind::TOKEN_INVERT); 32 | ("//") => ($crate::SyntaxKind::TOKEN_UPDATE); 33 | 34 | (+) => ($crate::SyntaxKind::TOKEN_ADD); 35 | (-) => ($crate::SyntaxKind::TOKEN_SUB); 36 | (*) => ($crate::SyntaxKind::TOKEN_MUL); 37 | (/) => ($crate::SyntaxKind::TOKEN_DIV); 38 | 39 | (&&) => ($crate::SyntaxKind::TOKEN_AND_AND); 40 | (==) => ($crate::SyntaxKind::TOKEN_EQUAL); 41 | (->) => ($crate::SyntaxKind::TOKEN_IMPLICATION); 42 | (<) => ($crate::SyntaxKind::TOKEN_LESS); 43 | (<=) => ($crate::SyntaxKind::TOKEN_LESS_OR_EQ); 44 | (>) => ($crate::SyntaxKind::TOKEN_MORE); 45 | (>=) => ($crate::SyntaxKind::TOKEN_MORE_OR_EQ); 46 | (!=) => ($crate::SyntaxKind::TOKEN_NOT_EQUAL); 47 | (||) => ($crate::SyntaxKind::TOKEN_OR_OR); 48 | ("|>") => ($crate::SyntaxKind::TOKEN_PIPE_RIGHT); 49 | ("<|") => ($crate::SyntaxKind::TOKEN_PIPE_LEFT); 50 | ($kind:ident) => ($crate::SyntaxKind::$kind); 51 | } 52 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | use std::{ffi::OsStr, fmt::Write, fs, path::PathBuf}; 2 | 3 | use expect_test::expect_file; 4 | use rowan::ast::AstNode; 5 | 6 | use crate::{ 7 | ast::{self, HasEntry}, 8 | tokenize, Root, SyntaxKind, 9 | }; 10 | 11 | #[test] 12 | fn interpolation() { 13 | let root = ast::Root::parse(include_str!("../test_data/parser/success/interpolation.nix")) 14 | .ok() 15 | .unwrap(); 16 | let let_in = ast::LetIn::try_from(root.expr().unwrap()).unwrap(); 17 | let set = ast::AttrSet::try_from(let_in.body().unwrap()).unwrap(); 18 | let entry = set.entries().nth(1).unwrap(); 19 | let attrpath_value = ast::AttrpathValue::try_from(entry).unwrap(); 20 | let value = ast::Str::try_from(attrpath_value.value().unwrap()).unwrap(); 21 | 22 | match &*value.normalized_parts() { 23 | &[ 24 | ast::InterpolPart::Literal(ref s1), 25 | ast::InterpolPart::Interpolation(_), 26 | ast::InterpolPart::Literal(ref s2), 27 | ast::InterpolPart::Interpolation(_), 28 | ast::InterpolPart::Literal(ref s3) 29 | ] 30 | if s1 == "The set's x value is: " 31 | && s2 == "\n\nThis line shall have no indention\n This line shall be indented by 2\n\n\n" 32 | && s3 == "\n" => (), 33 | parts => panic!("did not match: {:#?}", parts) 34 | } 35 | } 36 | 37 | #[test] 38 | fn inherit() { 39 | let root = 40 | ast::Root::parse(include_str!("../test_data/parser/success/inherit.nix")).ok().unwrap(); 41 | let let_in = ast::LetIn::try_from(root.expr().unwrap()).unwrap(); 42 | let set = ast::AttrSet::try_from(let_in.body().unwrap()).unwrap(); 43 | let inherit = set.inherits().nth(1).unwrap(); 44 | 45 | let from = inherit.from().unwrap().expr().unwrap(); 46 | let ident: ast::Ident = ast::Ident::try_from(from).unwrap(); 47 | assert_eq!(ident.syntax().text(), "set"); 48 | let mut children = inherit.attrs(); 49 | assert_eq!(children.next().unwrap().syntax().text(), "z"); 50 | assert_eq!(children.next().unwrap().syntax().text(), "a"); 51 | assert!(children.next().is_none()); 52 | } 53 | 54 | #[test] 55 | fn math() { 56 | let root = ast::Root::parse(include_str!("../test_data/parser/success/math.nix")).ok().unwrap(); 57 | let op1 = ast::BinOp::try_from(root.expr().unwrap()).unwrap(); 58 | let op2 = ast::BinOp::try_from(op1.lhs().unwrap()).unwrap(); 59 | assert_eq!(op1.operator().unwrap(), ast::BinOpKind::Add); 60 | 61 | let lhs = ast::Literal::try_from(op2.lhs().unwrap()).unwrap(); 62 | assert_eq!(lhs.syntax().text(), "1"); 63 | 64 | let rhs = ast::BinOp::try_from(op2.rhs().unwrap()).unwrap(); 65 | assert_eq!(rhs.operator().unwrap(), ast::BinOpKind::Mul); 66 | } 67 | 68 | #[test] 69 | fn t_macro() { 70 | assert_eq!(T![@], SyntaxKind::TOKEN_AT); 71 | assert!(matches!(SyntaxKind::TOKEN_L_PAREN, T!['('])); 72 | } 73 | 74 | fn dir_tests(dir: &str, get_actual: F) 75 | where 76 | F: Fn(String) -> String, 77 | { 78 | let base_path: PathBuf = [env!("CARGO_MANIFEST_DIR"), "test_data", dir].iter().collect(); 79 | let success_path = base_path.join("success"); 80 | let error_path = base_path.join("error"); 81 | 82 | let entries = success_path.read_dir().unwrap().chain(error_path.read_dir().unwrap()); 83 | 84 | for entry in entries { 85 | let path = entry.unwrap().path(); 86 | 87 | if path.extension() != Some(OsStr::new("nix")) { 88 | continue; 89 | } 90 | 91 | println!("testing: {}", path.display()); 92 | 93 | let mut code = fs::read_to_string(&path).unwrap(); 94 | if code.ends_with('\n') { 95 | code.truncate(code.len() - 1); 96 | } 97 | 98 | let actual = get_actual(code); 99 | expect_file![path.with_extension("expect")].assert_eq(&actual); 100 | } 101 | } 102 | 103 | #[test] 104 | fn parser_dir_tests() { 105 | dir_tests("parser", |code| { 106 | let parse = Root::parse(&code); 107 | 108 | let mut actual = String::new(); 109 | for error in parse.errors() { 110 | writeln!(actual, "error: {}", error).unwrap(); 111 | } 112 | writeln!(actual, "{:#?}", parse.syntax()).unwrap(); 113 | 114 | actual 115 | }) 116 | } 117 | 118 | #[test] 119 | fn tokenizer_dir_tests() { 120 | dir_tests("tokenizer", |code| { 121 | let mut actual = String::new(); 122 | for (kind, str) in tokenize(&code) { 123 | writeln!(actual, "{:?}, \"{}\"", kind, str).unwrap(); 124 | } 125 | actual 126 | }) 127 | } 128 | -------------------------------------------------------------------------------- /src/token_set.rs: -------------------------------------------------------------------------------- 1 | use std::ops; 2 | 3 | use crate::SyntaxKind; 4 | 5 | /// A bit-set of `SyntaxKind`s 6 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 7 | pub struct TokenSet(u128); 8 | 9 | impl TokenSet { 10 | #[allow(dead_code)] 11 | pub(crate) const EMPTY: TokenSet = TokenSet(0); 12 | 13 | pub(crate) const fn new(kind: SyntaxKind) -> TokenSet { 14 | TokenSet(mask(kind)) 15 | } 16 | 17 | pub(crate) const fn from_slice(kinds: &[SyntaxKind]) -> TokenSet { 18 | let mut res = 0u128; 19 | let mut i = 0; 20 | while i < kinds.len() { 21 | res |= mask(kinds[i]); 22 | i += 1 23 | } 24 | TokenSet(res) 25 | } 26 | 27 | pub(crate) const fn union(self, other: TokenSet) -> TokenSet { 28 | TokenSet(self.0 | other.0) 29 | } 30 | 31 | pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool { 32 | self.0 & mask(kind) != 0 33 | } 34 | } 35 | 36 | const fn mask(kind: SyntaxKind) -> u128 { 37 | 1u128 << (kind as usize) 38 | } 39 | 40 | impl ops::BitOr for SyntaxKind { 41 | type Output = TokenSet; 42 | 43 | fn bitor(self, rhs: Self) -> Self::Output { 44 | TokenSet(mask(self) | mask(rhs)) 45 | } 46 | } 47 | 48 | impl ops::BitOr for TokenSet { 49 | type Output = TokenSet; 50 | 51 | fn bitor(self, rhs: SyntaxKind) -> Self::Output { 52 | self.union(TokenSet(mask(rhs))) 53 | } 54 | } 55 | 56 | impl ops::BitOr for SyntaxKind { 57 | type Output = TokenSet; 58 | 59 | fn bitor(self, rhs: TokenSet) -> Self::Output { 60 | TokenSet(mask(self)).union(rhs) 61 | } 62 | } 63 | 64 | impl ops::BitOr for TokenSet { 65 | type Output = TokenSet; 66 | 67 | fn bitor(self, rhs: TokenSet) -> Self::Output { 68 | self.union(rhs) 69 | } 70 | } 71 | 72 | impl ops::BitOr for () { 73 | type Output = TokenSet; 74 | 75 | fn bitor(self, rhs: SyntaxKind) -> Self::Output { 76 | TokenSet::new(rhs) 77 | } 78 | } 79 | 80 | impl ops::BitOr<()> for SyntaxKind { 81 | type Output = TokenSet; 82 | 83 | fn bitor(self, (): ()) -> Self::Output { 84 | TokenSet::new(self) 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /test_data/parser/error/error.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_STRING_START at 10..28, wanted any of [TOKEN_ASSIGN] 2 | error: unexpected TOKEN_STRING_START at 78..98, wanted any of [TOKEN_ASSIGN] 3 | error: unexpected TOKEN_R_BRACE at 162..166, wanted any of [TOKEN_ASSIGN] 4 | error: unexpected end of file, wanted any of [TOKEN_ASSIGN] 5 | error: unexpected end of file 6 | error: unexpected end of file, wanted any of [TOKEN_SEMICOLON] 7 | error: unexpected end of file 8 | error: unexpected end of file, wanted any of [TOKEN_SEMICOLON] 9 | error: unexpected end of file 10 | NODE_ROOT@0..166 11 | NODE_ATTR_SET@0..166 12 | TOKEN_L_BRACE@0..1 "{" 13 | TOKEN_WHITESPACE@1..4 "\n " 14 | NODE_ATTRPATH_VALUE@4..33 15 | NODE_ATTRPATH@4..9 16 | NODE_IDENT@4..9 17 | TOKEN_IDENT@4..9 "hello" 18 | TOKEN_WHITESPACE@9..10 " " 19 | NODE_ERROR@10..28 20 | TOKEN_STRING_START@10..11 "\"" 21 | TOKEN_STRING_CONTENT@11..27 "Hello Ùnicödə" 22 | TOKEN_STRING_END@27..28 "\"" 23 | TOKEN_WHITESPACE@28..29 " " 24 | TOKEN_ASSIGN@29..30 "=" 25 | TOKEN_WHITESPACE@30..31 " " 26 | NODE_LITERAL@31..32 27 | TOKEN_INTEGER@31..32 "1" 28 | TOKEN_SEMICOLON@32..33 ";" 29 | TOKEN_WHITESPACE@33..36 "\n " 30 | NODE_ATTRPATH_VALUE@36..120 31 | NODE_ATTRPATH@36..40 32 | NODE_IDENT@36..40 33 | TOKEN_IDENT@36..40 "test" 34 | TOKEN_WHITESPACE@40..41 " " 35 | TOKEN_ASSIGN@41..42 "=" 36 | TOKEN_WHITESPACE@42..43 " " 37 | NODE_ATTR_SET@43..119 38 | TOKEN_L_BRACE@43..44 "{" 39 | TOKEN_WHITESPACE@44..49 "\n " 40 | NODE_ATTRPATH_VALUE@49..65 41 | NODE_ATTRPATH@49..54 42 | NODE_IDENT@49..54 43 | TOKEN_IDENT@49..54 "valid" 44 | TOKEN_WHITESPACE@54..55 " " 45 | TOKEN_ASSIGN@55..56 "=" 46 | TOKEN_WHITESPACE@56..57 " " 47 | NODE_STRING@57..64 48 | TOKEN_STRING_START@57..58 "\"" 49 | TOKEN_STRING_CONTENT@58..63 "entry" 50 | TOKEN_STRING_END@63..64 "\"" 51 | TOKEN_SEMICOLON@64..65 ";" 52 | TOKEN_WHITESPACE@65..70 "\n " 53 | NODE_ATTRPATH_VALUE@70..115 54 | NODE_ATTRPATH@70..77 55 | NODE_IDENT@70..77 56 | TOKEN_IDENT@70..77 "invalid" 57 | TOKEN_WHITESPACE@77..78 " " 58 | NODE_ERROR@78..98 59 | TOKEN_STRING_START@78..79 "\"" 60 | TOKEN_STRING_CONTENT@79..84 "entry" 61 | TOKEN_STRING_END@84..85 "\"" 62 | TOKEN_SEMICOLON@85..86 ";" 63 | TOKEN_WHITESPACE@86..91 "\n " 64 | TOKEN_IDENT@91..98 "another" 65 | TOKEN_WHITESPACE@98..99 " " 66 | TOKEN_ASSIGN@99..100 "=" 67 | TOKEN_WHITESPACE@100..101 " " 68 | NODE_STRING@101..114 69 | TOKEN_STRING_START@101..102 "\"" 70 | TOKEN_STRING_CONTENT@102..113 "valid entry" 71 | TOKEN_STRING_END@113..114 "\"" 72 | TOKEN_SEMICOLON@114..115 ";" 73 | TOKEN_WHITESPACE@115..118 "\n " 74 | TOKEN_R_BRACE@118..119 "}" 75 | TOKEN_SEMICOLON@119..120 ";" 76 | TOKEN_WHITESPACE@120..123 "\n " 77 | NODE_ATTRPATH_VALUE@123..166 78 | NODE_ATTRPATH@123..128 79 | NODE_IDENT@123..128 80 | TOKEN_IDENT@123..128 "test2" 81 | TOKEN_WHITESPACE@128..129 " " 82 | TOKEN_ASSIGN@129..130 "=" 83 | TOKEN_WHITESPACE@130..131 " " 84 | NODE_ATTR_SET@131..166 85 | TOKEN_L_BRACE@131..132 "{" 86 | TOKEN_WHITESPACE@132..137 "\n " 87 | NODE_ATTRPATH_VALUE@137..150 88 | NODE_ATTRPATH@137..142 89 | NODE_IDENT@137..142 90 | TOKEN_IDENT@137..142 "hello" 91 | TOKEN_WHITESPACE@142..143 " " 92 | TOKEN_ASSIGN@143..144 "=" 93 | TOKEN_WHITESPACE@144..145 " " 94 | NODE_STRING@145..149 95 | TOKEN_STRING_START@145..146 "\"" 96 | TOKEN_STRING_CONTENT@146..148 "hi" 97 | TOKEN_STRING_END@148..149 "\"" 98 | TOKEN_SEMICOLON@149..150 ";" 99 | TOKEN_WHITESPACE@150..155 "\n " 100 | NODE_ATTRPATH_VALUE@155..166 101 | NODE_ATTRPATH@155..159 102 | NODE_IDENT@155..159 103 | TOKEN_IDENT@155..159 "aaaa" 104 | TOKEN_WHITESPACE@159..162 "\n " 105 | NODE_ERROR@162..166 106 | TOKEN_R_BRACE@162..163 "}" 107 | TOKEN_SEMICOLON@163..164 ";" 108 | TOKEN_WHITESPACE@164..165 "\n" 109 | TOKEN_R_BRACE@165..166 "}" 110 | 111 | -------------------------------------------------------------------------------- /test_data/parser/error/error.nix: -------------------------------------------------------------------------------- 1 | { 2 | hello "Hello Ùnicödə" = 1; 3 | test = { 4 | valid = "entry"; 5 | invalid "entry"; 6 | another = "valid entry"; 7 | }; 8 | test2 = { 9 | hello = "hi"; 10 | aaaa 11 | }; 12 | } 13 | -------------------------------------------------------------------------------- /test_data/parser/error/extra_comma.expect: -------------------------------------------------------------------------------- 1 | error: unexpected token at 3..4 2 | NODE_ROOT@0..4 3 | NODE_ATTR_SET@0..2 4 | TOKEN_L_BRACE@0..1 "{" 5 | TOKEN_R_BRACE@1..2 "}" 6 | TOKEN_WHITESPACE@2..3 "\n" 7 | NODE_ERROR@3..4 8 | TOKEN_COMMA@3..4 "," 9 | 10 | -------------------------------------------------------------------------------- /test_data/parser/error/extra_comma.nix: -------------------------------------------------------------------------------- 1 | {} 2 | , -------------------------------------------------------------------------------- /test_data/parser/error/formals_double_bind.expect: -------------------------------------------------------------------------------- 1 | error: unexpected double bind at 4..6 2 | NODE_ROOT@0..10 3 | NODE_LAMBDA@0..10 4 | NODE_PATTERN@0..6 5 | NODE_PAT_BIND@0..2 6 | NODE_IDENT@0..1 7 | TOKEN_IDENT@0..1 "f" 8 | TOKEN_AT@1..2 "@" 9 | TOKEN_L_BRACE@2..3 "{" 10 | TOKEN_R_BRACE@3..4 "}" 11 | NODE_ERROR@4..6 12 | TOKEN_AT@4..5 "@" 13 | NODE_IDENT@5..6 14 | TOKEN_IDENT@5..6 "f" 15 | TOKEN_COLON@6..7 ":" 16 | TOKEN_WHITESPACE@7..8 "\n" 17 | NODE_STRING@8..10 18 | TOKEN_STRING_START@8..9 "\"" 19 | TOKEN_STRING_END@9..10 "\"" 20 | 21 | -------------------------------------------------------------------------------- /test_data/parser/error/formals_double_bind.nix: -------------------------------------------------------------------------------- 1 | f@{}@f: 2 | "" -------------------------------------------------------------------------------- /test_data/parser/error/inherit_from_late.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_L_PAREN at 12..13, wanted any of [TOKEN_IDENT, TOKEN_OR] 2 | error: unexpected TOKEN_R_PAREN at 14..15, wanted any of [TOKEN_IDENT, TOKEN_OR] 3 | NODE_ROOT@0..20 4 | NODE_ATTR_SET@0..20 5 | TOKEN_L_BRACE@0..1 "{" 6 | TOKEN_WHITESPACE@1..2 " " 7 | NODE_INHERIT@2..18 8 | TOKEN_INHERIT@2..9 "inherit" 9 | TOKEN_WHITESPACE@9..10 " " 10 | NODE_IDENT@10..11 11 | TOKEN_IDENT@10..11 "a" 12 | TOKEN_WHITESPACE@11..12 " " 13 | NODE_ERROR@12..13 14 | TOKEN_L_PAREN@12..13 "(" 15 | NODE_IDENT@13..14 16 | TOKEN_IDENT@13..14 "b" 17 | NODE_ERROR@14..15 18 | TOKEN_R_PAREN@14..15 ")" 19 | TOKEN_WHITESPACE@15..16 " " 20 | NODE_IDENT@16..17 21 | TOKEN_IDENT@16..17 "c" 22 | TOKEN_SEMICOLON@17..18 ";" 23 | TOKEN_WHITESPACE@18..19 " " 24 | TOKEN_R_BRACE@19..20 "}" 25 | 26 | -------------------------------------------------------------------------------- /test_data/parser/error/inherit_from_late.nix: -------------------------------------------------------------------------------- 1 | { inherit a (b) c; } -------------------------------------------------------------------------------- /test_data/parser/error/inherit_incomplete.expect: -------------------------------------------------------------------------------- 1 | error: unexpected end of file 2 | error: unexpected end of file, wanted any of [TOKEN_SEMICOLON] 3 | error: unexpected end of file 4 | error: unexpected end of file 5 | NODE_ROOT@0..13 6 | NODE_LET_IN@0..13 7 | TOKEN_LET@0..3 "let" 8 | TOKEN_WHITESPACE@3..6 "\n " 9 | NODE_INHERIT@6..13 10 | TOKEN_INHERIT@6..13 "inherit" 11 | 12 | -------------------------------------------------------------------------------- /test_data/parser/error/inherit_incomplete.nix: -------------------------------------------------------------------------------- 1 | let 2 | inherit 3 | -------------------------------------------------------------------------------- /test_data/parser/error/path_bare_tilde.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_ERROR at 0..1, wanted any of [TOKEN_L_PAREN, TOKEN_REC, TOKEN_L_BRACE, TOKEN_L_BRACK, TOKEN_STRING_START, TOKEN_IDENT] 2 | NODE_ROOT@0..1 3 | NODE_ERROR@0..1 4 | TOKEN_ERROR@0..1 "~" 5 | 6 | -------------------------------------------------------------------------------- /test_data/parser/error/path_bare_tilde.nix: -------------------------------------------------------------------------------- 1 | ~ -------------------------------------------------------------------------------- /test_data/parser/error/path_interp_no_separator.expect: -------------------------------------------------------------------------------- 1 | error: unexpected token at 1..7 2 | NODE_ROOT@0..7 3 | NODE_IDENT@0..1 4 | TOKEN_IDENT@0..1 "a" 5 | NODE_ERROR@1..7 6 | TOKEN_INTERPOL_START@1..3 "${" 7 | TOKEN_IDENT@3..4 "b" 8 | TOKEN_INTERPOL_END@4..5 "}" 9 | TOKEN_PATH@5..7 "/c" 10 | 11 | -------------------------------------------------------------------------------- /test_data/parser/error/path_interp_no_separator.nix: -------------------------------------------------------------------------------- 1 | a${b}/c 2 | -------------------------------------------------------------------------------- /test_data/parser/error/path_interp_trailing_slash.expect: -------------------------------------------------------------------------------- 1 | error: unexpected token at 8..9 2 | NODE_ROOT@0..9 3 | NODE_PATH@0..8 4 | TOKEN_PATH@0..2 "./" 5 | NODE_INTERPOL@2..8 6 | TOKEN_INTERPOL_START@2..4 "${" 7 | NODE_IDENT@4..7 8 | TOKEN_IDENT@4..7 "foo" 9 | TOKEN_INTERPOL_END@7..8 "}" 10 | NODE_ERROR@8..9 11 | TOKEN_ERROR@8..9 "/" 12 | 13 | -------------------------------------------------------------------------------- /test_data/parser/error/path_interp_trailing_slash.nix: -------------------------------------------------------------------------------- 1 | ./${foo}/ 2 | -------------------------------------------------------------------------------- /test_data/parser/error/path_store_interp.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_LESS at 0..1, wanted any of [TOKEN_L_PAREN, TOKEN_REC, TOKEN_L_BRACE, TOKEN_L_BRACK, TOKEN_STRING_START, TOKEN_IDENT] 2 | error: unexpected end of file 3 | NODE_ROOT@0..20 4 | NODE_BIN_OP@0..20 5 | NODE_APPLY@0..19 6 | NODE_ERROR@0..1 7 | TOKEN_LESS@0..1 "<" 8 | NODE_PATH@1..19 9 | TOKEN_PATH@1..9 "nixpkgs/" 10 | NODE_INTERPOL@9..15 11 | TOKEN_INTERPOL_START@9..11 "${" 12 | NODE_IDENT@11..14 13 | TOKEN_IDENT@11..14 "foo" 14 | TOKEN_INTERPOL_END@14..15 "}" 15 | TOKEN_PATH@15..19 "/bar" 16 | TOKEN_MORE@19..20 ">" 17 | 18 | -------------------------------------------------------------------------------- /test_data/parser/error/path_store_interp.nix: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test_data/parser/error/path_tilde.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_ERROR at 0..2, wanted any of [TOKEN_L_PAREN, TOKEN_REC, TOKEN_L_BRACE, TOKEN_L_BRACK, TOKEN_STRING_START, TOKEN_IDENT] 2 | NODE_ROOT@0..2 3 | NODE_ERROR@0..2 4 | TOKEN_ERROR@0..2 "~p" 5 | 6 | -------------------------------------------------------------------------------- /test_data/parser/error/path_tilde.nix: -------------------------------------------------------------------------------- 1 | ~p -------------------------------------------------------------------------------- /test_data/parser/error/path_tilde_slash.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_ERROR at 0..2, wanted any of [TOKEN_L_PAREN, TOKEN_REC, TOKEN_L_BRACE, TOKEN_L_BRACK, TOKEN_STRING_START, TOKEN_IDENT] 2 | NODE_ROOT@0..2 3 | NODE_ERROR@0..2 4 | TOKEN_ERROR@0..2 "~/" 5 | 6 | -------------------------------------------------------------------------------- /test_data/parser/error/path_tilde_slash.nix: -------------------------------------------------------------------------------- 1 | ~/ 2 | -------------------------------------------------------------------------------- /test_data/parser/error/path_trailing_slash.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_ERROR at 0..3, wanted any of [TOKEN_L_PAREN, TOKEN_REC, TOKEN_L_BRACE, TOKEN_L_BRACK, TOKEN_STRING_START, TOKEN_IDENT] 2 | NODE_ROOT@0..3 3 | NODE_ERROR@0..3 4 | TOKEN_ERROR@0..3 "/a/" 5 | 6 | -------------------------------------------------------------------------------- /test_data/parser/error/path_trailing_slash.nix: -------------------------------------------------------------------------------- 1 | /a/ 2 | -------------------------------------------------------------------------------- /test_data/parser/error/select_both_errors.expect: -------------------------------------------------------------------------------- 1 | error: unexpected TOKEN_R_BRACK at 0..1, wanted any of [TOKEN_L_PAREN, TOKEN_REC, TOKEN_L_BRACE, TOKEN_L_BRACK, TOKEN_STRING_START, TOKEN_IDENT] 2 | error: unexpected end of file, wanted any of [TOKEN_IDENT, TOKEN_OR] 3 | NODE_ROOT@0..2 4 | NODE_SELECT@0..2 5 | NODE_ERROR@0..1 6 | TOKEN_R_BRACK@0..1 "]" 7 | TOKEN_DOT@1..2 "." 8 | NODE_ATTRPATH@2..2 9 | 10 | -------------------------------------------------------------------------------- /test_data/parser/error/select_both_errors.nix: -------------------------------------------------------------------------------- 1 | ]. 2 | -------------------------------------------------------------------------------- /test_data/parser/error/ws_belongs_to_root.expect: -------------------------------------------------------------------------------- 1 | error: unexpected end of file 2 | error: unexpected end of file, wanted any of [TOKEN_SEMICOLON] 3 | error: unexpected end of file 4 | NODE_ROOT@0..50 5 | NODE_ATTR_SET@0..48 6 | TOKEN_L_BRACE@0..1 "{" 7 | TOKEN_WHITESPACE@1..4 "\n " 8 | NODE_ATTRPATH_VALUE@4..48 9 | NODE_ATTRPATH@4..11 10 | NODE_IDENT@4..11 11 | TOKEN_IDENT@4..11 "traceIf" 12 | TOKEN_WHITESPACE@11..12 " " 13 | TOKEN_ASSIGN@12..13 "=" 14 | TOKEN_WHITESPACE@13..18 "\n " 15 | TOKEN_COMMENT@18..38 "# predicate to check" 16 | TOKEN_WHITESPACE@38..43 "\n " 17 | NODE_LAMBDA@43..48 18 | NODE_IDENT_PARAM@43..47 19 | NODE_IDENT@43..47 20 | TOKEN_IDENT@43..47 "pred" 21 | TOKEN_COLON@47..48 ":" 22 | TOKEN_WHITESPACE@48..50 "\n\n" 23 | 24 | -------------------------------------------------------------------------------- /test_data/parser/error/ws_belongs_to_root.nix: -------------------------------------------------------------------------------- 1 | { 2 | traceIf = 3 | # predicate to check 4 | pred: 5 | 6 | 7 | -------------------------------------------------------------------------------- /test_data/parser/error/ws_belongs_to_root2.expect: -------------------------------------------------------------------------------- 1 | error: unexpected token at 3..4 2 | NODE_ROOT@0..5 3 | NODE_ATTR_SET@0..2 4 | TOKEN_L_BRACE@0..1 "{" 5 | TOKEN_R_BRACE@1..2 "}" 6 | TOKEN_WHITESPACE@2..3 " " 7 | NODE_ERROR@3..4 8 | TOKEN_ASSIGN@3..4 "=" 9 | TOKEN_WHITESPACE@4..5 "\n" 10 | 11 | -------------------------------------------------------------------------------- /test_data/parser/error/ws_belongs_to_root2.nix: -------------------------------------------------------------------------------- 1 | {} = 2 | 3 | -------------------------------------------------------------------------------- /test_data/parser/success/apply.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..9 2 | NODE_BIN_OP@0..9 3 | NODE_APPLY@0..5 4 | NODE_APPLY@0..3 5 | NODE_IDENT@0..1 6 | TOKEN_IDENT@0..1 "a" 7 | TOKEN_WHITESPACE@1..2 " " 8 | NODE_LITERAL@2..3 9 | TOKEN_INTEGER@2..3 "1" 10 | TOKEN_WHITESPACE@3..4 " " 11 | NODE_LITERAL@4..5 12 | TOKEN_INTEGER@4..5 "2" 13 | TOKEN_WHITESPACE@5..6 " " 14 | TOKEN_ADD@6..7 "+" 15 | TOKEN_WHITESPACE@7..8 " " 16 | NODE_LITERAL@8..9 17 | TOKEN_INTEGER@8..9 "3" 18 | 19 | -------------------------------------------------------------------------------- /test_data/parser/success/apply.nix: -------------------------------------------------------------------------------- 1 | a 1 2 + 3 2 | -------------------------------------------------------------------------------- /test_data/parser/success/assert.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..18 2 | NODE_ASSERT@0..18 3 | TOKEN_ASSERT@0..6 "assert" 4 | TOKEN_WHITESPACE@6..7 " " 5 | NODE_BIN_OP@7..11 6 | NODE_IDENT@7..8 7 | TOKEN_IDENT@7..8 "a" 8 | TOKEN_EQUAL@8..10 "==" 9 | NODE_IDENT@10..11 10 | TOKEN_IDENT@10..11 "b" 11 | TOKEN_SEMICOLON@11..12 ";" 12 | NODE_STRING@12..18 13 | TOKEN_STRING_START@12..13 "\"" 14 | TOKEN_STRING_CONTENT@13..17 "a==b" 15 | TOKEN_STRING_END@17..18 "\"" 16 | 17 | -------------------------------------------------------------------------------- /test_data/parser/success/assert.nix: -------------------------------------------------------------------------------- 1 | assert a==b;"a==b" 2 | -------------------------------------------------------------------------------- /test_data/parser/success/attrpath_ident.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..10 2 | NODE_ATTR_SET@0..10 3 | TOKEN_L_BRACE@0..1 "{" 4 | NODE_ATTRPATH_VALUE@1..9 5 | NODE_ATTRPATH@1..6 6 | NODE_IDENT@1..2 7 | TOKEN_IDENT@1..2 "a" 8 | TOKEN_DOT@2..3 "." 9 | NODE_IDENT@3..4 10 | TOKEN_IDENT@3..4 "b" 11 | TOKEN_DOT@4..5 "." 12 | NODE_IDENT@5..6 13 | TOKEN_IDENT@5..6 "c" 14 | TOKEN_ASSIGN@6..7 "=" 15 | NODE_LITERAL@7..8 16 | TOKEN_INTEGER@7..8 "1" 17 | TOKEN_SEMICOLON@8..9 ";" 18 | TOKEN_R_BRACE@9..10 "}" 19 | 20 | -------------------------------------------------------------------------------- /test_data/parser/success/attrpath_ident.nix: -------------------------------------------------------------------------------- 1 | {a.b.c=1;} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..49 2 | NODE_ATTR_SET@0..49 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_ATTRPATH_VALUE@4..25 6 | NODE_ATTRPATH@4..19 7 | NODE_IDENT@4..19 8 | TOKEN_IDENT@4..19 "meaning_of_life" 9 | TOKEN_WHITESPACE@19..20 " " 10 | TOKEN_ASSIGN@20..21 "=" 11 | TOKEN_WHITESPACE@21..22 " " 12 | NODE_LITERAL@22..24 13 | TOKEN_INTEGER@22..24 "42" 14 | TOKEN_SEMICOLON@24..25 ";" 15 | TOKEN_WHITESPACE@25..28 "\n " 16 | NODE_ATTRPATH_VALUE@28..47 17 | NODE_ATTRPATH@28..38 18 | NODE_IDENT@28..38 19 | TOKEN_IDENT@28..38 "H4X0RNUM83" 20 | TOKEN_WHITESPACE@38..39 " " 21 | TOKEN_ASSIGN@39..40 "=" 22 | TOKEN_WHITESPACE@40..41 " " 23 | NODE_LITERAL@41..46 24 | TOKEN_FLOAT@41..46 "1.337" 25 | TOKEN_SEMICOLON@46..47 ";" 26 | TOKEN_WHITESPACE@47..48 "\n" 27 | TOKEN_R_BRACE@48..49 "}" 28 | 29 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset.nix: -------------------------------------------------------------------------------- 1 | { 2 | meaning_of_life = 42; 3 | H4X0RNUM83 = 1.337; 4 | } 5 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset_dynamic.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..33 2 | NODE_ATTR_SET@0..33 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_ATTRPATH_VALUE@4..12 6 | NODE_ATTRPATH@4..7 7 | NODE_IDENT@4..5 8 | TOKEN_IDENT@4..5 "a" 9 | TOKEN_DOT@5..6 "." 10 | NODE_IDENT@6..7 11 | TOKEN_IDENT@6..7 "b" 12 | TOKEN_WHITESPACE@7..8 " " 13 | TOKEN_ASSIGN@8..9 "=" 14 | TOKEN_WHITESPACE@9..10 " " 15 | NODE_LITERAL@10..11 16 | TOKEN_INTEGER@10..11 "2" 17 | TOKEN_SEMICOLON@11..12 ";" 18 | TOKEN_WHITESPACE@12..15 "\n " 19 | NODE_ATTRPATH_VALUE@15..31 20 | NODE_ATTRPATH@15..26 21 | NODE_STRING@15..21 22 | TOKEN_STRING_START@15..16 "\"" 23 | NODE_INTERPOL@16..20 24 | TOKEN_INTERPOL_START@16..18 "${" 25 | NODE_IDENT@18..19 26 | TOKEN_IDENT@18..19 "c" 27 | TOKEN_INTERPOL_END@19..20 "}" 28 | TOKEN_STRING_END@20..21 "\"" 29 | TOKEN_DOT@21..22 "." 30 | NODE_DYNAMIC@22..26 31 | TOKEN_INTERPOL_START@22..24 "${" 32 | NODE_IDENT@24..25 33 | TOKEN_IDENT@24..25 "d" 34 | TOKEN_INTERPOL_END@25..26 "}" 35 | TOKEN_WHITESPACE@26..27 " " 36 | TOKEN_ASSIGN@27..28 "=" 37 | TOKEN_WHITESPACE@28..29 " " 38 | NODE_LITERAL@29..30 39 | TOKEN_INTEGER@29..30 "3" 40 | TOKEN_SEMICOLON@30..31 ";" 41 | TOKEN_WHITESPACE@31..32 "\n" 42 | TOKEN_R_BRACE@32..33 "}" 43 | 44 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset_dynamic.nix: -------------------------------------------------------------------------------- 1 | { 2 | a.b = 2; 3 | "${c}".${d} = 3; 4 | } 5 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset_empty.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..2 2 | NODE_ATTR_SET@0..2 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_R_BRACE@1..2 "}" 5 | 6 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset_empty.nix: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset_rec.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..12 2 | NODE_ATTR_SET@0..12 3 | TOKEN_REC@0..3 "rec" 4 | TOKEN_L_BRACE@3..4 "{" 5 | NODE_ATTRPATH_VALUE@4..11 6 | NODE_ATTRPATH@4..8 7 | NODE_IDENT@4..8 8 | TOKEN_IDENT@4..8 "test" 9 | TOKEN_ASSIGN@8..9 "=" 10 | NODE_LITERAL@9..10 11 | TOKEN_INTEGER@9..10 "1" 12 | TOKEN_SEMICOLON@10..11 ";" 13 | TOKEN_R_BRACE@11..12 "}" 14 | 15 | -------------------------------------------------------------------------------- /test_data/parser/success/attrset_rec.nix: -------------------------------------------------------------------------------- 1 | rec{test=1;} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/bool_arith_ops.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..20 2 | NODE_BIN_OP@0..20 3 | NODE_BIN_OP@0..3 4 | NODE_LITERAL@0..1 5 | TOKEN_INTEGER@0..1 "1" 6 | TOKEN_LESS@1..2 "<" 7 | NODE_LITERAL@2..3 8 | TOKEN_INTEGER@2..3 "2" 9 | TOKEN_OR_OR@3..5 "||" 10 | NODE_BIN_OP@5..20 11 | NODE_BIN_OP@5..14 12 | NODE_BIN_OP@5..9 13 | NODE_LITERAL@5..6 14 | TOKEN_INTEGER@5..6 "2" 15 | TOKEN_LESS_OR_EQ@6..8 "<=" 16 | NODE_LITERAL@8..9 17 | TOKEN_INTEGER@8..9 "2" 18 | TOKEN_AND_AND@9..11 "&&" 19 | NODE_BIN_OP@11..14 20 | NODE_LITERAL@11..12 21 | TOKEN_INTEGER@11..12 "2" 22 | TOKEN_MORE@12..13 ">" 23 | NODE_LITERAL@13..14 24 | TOKEN_INTEGER@13..14 "1" 25 | TOKEN_AND_AND@14..16 "&&" 26 | NODE_BIN_OP@16..20 27 | NODE_LITERAL@16..17 28 | TOKEN_INTEGER@16..17 "2" 29 | TOKEN_MORE_OR_EQ@17..19 ">=" 30 | NODE_LITERAL@19..20 31 | TOKEN_INTEGER@19..20 "2" 32 | 33 | -------------------------------------------------------------------------------- /test_data/parser/success/bool_arith_ops.nix: -------------------------------------------------------------------------------- 1 | 1<2||2<=2&&2>1&&2>=2 2 | -------------------------------------------------------------------------------- /test_data/parser/success/bool_ops.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..33 2 | NODE_BIN_OP@0..33 3 | NODE_IDENT@0..5 4 | TOKEN_IDENT@0..5 "false" 5 | TOKEN_WHITESPACE@5..6 " " 6 | TOKEN_IMPLICATION@6..8 "->" 7 | NODE_BIN_OP@8..33 8 | NODE_BIN_OP@8..27 9 | NODE_UNARY_OP@8..14 10 | TOKEN_INVERT@8..9 "!" 11 | NODE_IDENT@9..14 12 | TOKEN_IDENT@9..14 "false" 13 | TOKEN_AND_AND@14..16 "&&" 14 | NODE_BIN_OP@16..27 15 | NODE_IDENT@16..21 16 | TOKEN_IDENT@16..21 "false" 17 | TOKEN_EQUAL@21..23 "==" 18 | NODE_IDENT@23..27 19 | TOKEN_IDENT@23..27 "true" 20 | TOKEN_OR_OR@27..29 "||" 21 | NODE_IDENT@29..33 22 | TOKEN_IDENT@29..33 "true" 23 | 24 | -------------------------------------------------------------------------------- /test_data/parser/success/bool_ops.nix: -------------------------------------------------------------------------------- 1 | false ->!false&&false==true||true 2 | -------------------------------------------------------------------------------- /test_data/parser/success/bool_ops_eq.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..10 2 | NODE_BIN_OP@0..10 3 | NODE_BIN_OP@0..4 4 | NODE_LITERAL@0..1 5 | TOKEN_INTEGER@0..1 "1" 6 | TOKEN_EQUAL@1..3 "==" 7 | NODE_LITERAL@3..4 8 | TOKEN_INTEGER@3..4 "1" 9 | TOKEN_AND_AND@4..6 "&&" 10 | NODE_BIN_OP@6..10 11 | NODE_LITERAL@6..7 12 | TOKEN_INTEGER@6..7 "2" 13 | TOKEN_NOT_EQUAL@7..9 "!=" 14 | NODE_LITERAL@9..10 15 | TOKEN_INTEGER@9..10 "3" 16 | 17 | -------------------------------------------------------------------------------- /test_data/parser/success/bool_ops_eq.nix: -------------------------------------------------------------------------------- 1 | 1==1&&2!=3 2 | -------------------------------------------------------------------------------- /test_data/parser/success/docs.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..306 2 | TOKEN_COMMENT@0..35 "# Test used by exampl ..." 3 | TOKEN_WHITESPACE@35..36 "\n" 4 | NODE_ATTR_SET@36..306 5 | TOKEN_REC@36..39 "rec" 6 | TOKEN_WHITESPACE@39..40 " " 7 | TOKEN_L_BRACE@40..41 "{" 8 | TOKEN_WHITESPACE@41..44 "\n " 9 | TOKEN_COMMENT@44..60 "# Usage: add x y" 10 | TOKEN_WHITESPACE@60..63 "\n " 11 | TOKEN_COMMENT@63..122 "# Adds the integers x ..." 12 | TOKEN_WHITESPACE@122..125 "\n " 13 | NODE_ATTRPATH_VALUE@125..196 14 | NODE_ATTRPATH@125..128 15 | NODE_IDENT@125..128 16 | TOKEN_IDENT@125..128 "add" 17 | TOKEN_WHITESPACE@128..129 " " 18 | TOKEN_ASSIGN@129..130 "=" 19 | TOKEN_WHITESPACE@130..135 "\n " 20 | TOKEN_COMMENT@135..150 "# First integer" 21 | TOKEN_WHITESPACE@150..155 "\n " 22 | NODE_LAMBDA@155..195 23 | NODE_IDENT_PARAM@155..156 24 | NODE_IDENT@155..156 25 | TOKEN_IDENT@155..156 "x" 26 | TOKEN_COLON@156..157 ":" 27 | TOKEN_WHITESPACE@157..162 "\n " 28 | TOKEN_COMMENT@162..178 "# Second integer" 29 | TOKEN_WHITESPACE@178..183 "\n " 30 | NODE_LAMBDA@183..195 31 | NODE_IDENT_PARAM@183..184 32 | NODE_IDENT@183..184 33 | TOKEN_IDENT@183..184 "y" 34 | TOKEN_COLON@184..185 ":" 35 | TOKEN_WHITESPACE@185..190 "\n " 36 | NODE_BIN_OP@190..195 37 | NODE_IDENT@190..191 38 | TOKEN_IDENT@190..191 "x" 39 | TOKEN_WHITESPACE@191..192 " " 40 | TOKEN_ADD@192..193 "+" 41 | TOKEN_WHITESPACE@193..194 " " 42 | NODE_IDENT@194..195 43 | TOKEN_IDENT@194..195 "y" 44 | TOKEN_SEMICOLON@195..196 ";" 45 | TOKEN_WHITESPACE@196..199 "\n " 46 | TOKEN_COMMENT@199..216 "# Usage: sum nums" 47 | TOKEN_WHITESPACE@216..219 "\n " 48 | TOKEN_COMMENT@219..262 "# Returns the sum of ..." 49 | TOKEN_WHITESPACE@262..265 "\n " 50 | NODE_ATTRPATH_VALUE@265..304 51 | NODE_ATTRPATH@265..268 52 | NODE_IDENT@265..268 53 | TOKEN_IDENT@265..268 "sum" 54 | TOKEN_WHITESPACE@268..269 " " 55 | TOKEN_ASSIGN@269..270 "=" 56 | TOKEN_WHITESPACE@270..271 " " 57 | NODE_LAMBDA@271..303 58 | NODE_IDENT_PARAM@271..275 59 | NODE_IDENT@271..275 60 | TOKEN_IDENT@271..275 "nums" 61 | TOKEN_COLON@275..276 ":" 62 | TOKEN_WHITESPACE@276..277 " " 63 | NODE_APPLY@277..303 64 | NODE_APPLY@277..298 65 | NODE_APPLY@277..296 66 | NODE_SELECT@277..292 67 | NODE_IDENT@277..285 68 | TOKEN_IDENT@277..285 "builtins" 69 | TOKEN_DOT@285..286 "." 70 | NODE_ATTRPATH@286..292 71 | NODE_IDENT@286..292 72 | TOKEN_IDENT@286..292 "foldl'" 73 | TOKEN_WHITESPACE@292..293 " " 74 | NODE_IDENT@293..296 75 | TOKEN_IDENT@293..296 "add" 76 | TOKEN_WHITESPACE@296..297 " " 77 | NODE_LITERAL@297..298 78 | TOKEN_INTEGER@297..298 "0" 79 | TOKEN_WHITESPACE@298..299 " " 80 | NODE_IDENT@299..303 81 | TOKEN_IDENT@299..303 "nums" 82 | TOKEN_SEMICOLON@303..304 ";" 83 | TOKEN_WHITESPACE@304..305 "\n" 84 | TOKEN_R_BRACE@305..306 "}" 85 | 86 | -------------------------------------------------------------------------------- /test_data/parser/success/docs.nix: -------------------------------------------------------------------------------- 1 | # Test used by examples/list-fns.rs 2 | rec { 3 | # Usage: add x y 4 | # Adds the integers x and y together and returns the result 5 | add = 6 | # First integer 7 | x: 8 | # Second integer 9 | y: 10 | x + y; 11 | # Usage: sum nums 12 | # Returns the sum of the integer array nums 13 | sum = nums: builtins.foldl' add 0 nums; 14 | } 15 | -------------------------------------------------------------------------------- /test_data/parser/success/has_attr_prec.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..11 2 | NODE_BIN_OP@0..11 3 | NODE_HAS_ATTR@0..5 4 | NODE_IDENT@0..1 5 | TOKEN_IDENT@0..1 "a" 6 | TOKEN_QUESTION@1..2 "?" 7 | NODE_ATTRPATH@2..5 8 | NODE_STRING@2..5 9 | TOKEN_STRING_START@2..3 "\"" 10 | TOKEN_STRING_CONTENT@3..4 "b" 11 | TOKEN_STRING_END@4..5 "\"" 12 | TOKEN_AND_AND@5..7 "&&" 13 | NODE_IDENT@7..11 14 | TOKEN_IDENT@7..11 "true" 15 | 16 | -------------------------------------------------------------------------------- /test_data/parser/success/has_attr_prec.nix: -------------------------------------------------------------------------------- 1 | a?"b"&&true 2 | -------------------------------------------------------------------------------- /test_data/parser/success/if_elseif_else.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..44 2 | NODE_IF_ELSE@0..44 3 | TOKEN_IF@0..2 "if" 4 | TOKEN_WHITESPACE@2..3 " " 5 | NODE_IDENT@3..8 6 | TOKEN_IDENT@3..8 "false" 7 | TOKEN_WHITESPACE@8..9 " " 8 | TOKEN_THEN@9..13 "then" 9 | TOKEN_WHITESPACE@13..14 " " 10 | NODE_LITERAL@14..15 11 | TOKEN_INTEGER@14..15 "1" 12 | TOKEN_WHITESPACE@15..16 " " 13 | TOKEN_ELSE@16..20 "else" 14 | TOKEN_WHITESPACE@20..21 " " 15 | NODE_IF_ELSE@21..44 16 | TOKEN_IF@21..23 "if" 17 | TOKEN_WHITESPACE@23..24 " " 18 | NODE_IDENT@24..28 19 | TOKEN_IDENT@24..28 "true" 20 | TOKEN_WHITESPACE@28..29 " " 21 | TOKEN_THEN@29..33 "then" 22 | TOKEN_WHITESPACE@33..34 " " 23 | NODE_IDENT@34..37 24 | TOKEN_IDENT@34..37 "two" 25 | TOKEN_WHITESPACE@37..38 " " 26 | TOKEN_ELSE@38..42 "else" 27 | TOKEN_WHITESPACE@42..43 " " 28 | NODE_LITERAL@43..44 29 | TOKEN_INTEGER@43..44 "3" 30 | 31 | -------------------------------------------------------------------------------- /test_data/parser/success/if_elseif_else.nix: -------------------------------------------------------------------------------- 1 | if false then 1 else if true then two else 3 2 | -------------------------------------------------------------------------------- /test_data/parser/success/import_nixpkgs.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..17 2 | NODE_APPLY@0..17 3 | NODE_APPLY@0..15 4 | NODE_IDENT@0..6 5 | TOKEN_IDENT@0..6 "import" 6 | NODE_PATH@6..15 7 | TOKEN_PATH@6..15 "" 8 | NODE_ATTR_SET@15..17 9 | TOKEN_L_BRACE@15..16 "{" 10 | TOKEN_R_BRACE@16..17 "}" 11 | 12 | -------------------------------------------------------------------------------- /test_data/parser/success/import_nixpkgs.nix: -------------------------------------------------------------------------------- 1 | import{} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/inherit.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..96 2 | NODE_LET_IN@0..96 3 | TOKEN_LET@0..3 "let" 4 | TOKEN_WHITESPACE@3..6 "\n " 5 | NODE_ATTRPATH_VALUE@6..12 6 | NODE_ATTRPATH@6..7 7 | NODE_IDENT@6..7 8 | TOKEN_IDENT@6..7 "y" 9 | TOKEN_WHITESPACE@7..8 " " 10 | TOKEN_ASSIGN@8..9 "=" 11 | TOKEN_WHITESPACE@9..10 " " 12 | NODE_LITERAL@10..11 13 | TOKEN_INTEGER@10..11 "2" 14 | TOKEN_SEMICOLON@11..12 ";" 15 | TOKEN_WHITESPACE@12..15 "\n " 16 | NODE_ATTRPATH_VALUE@15..46 17 | NODE_ATTRPATH@15..18 18 | NODE_IDENT@15..18 19 | TOKEN_IDENT@15..18 "set" 20 | TOKEN_WHITESPACE@18..19 " " 21 | TOKEN_ASSIGN@19..20 "=" 22 | TOKEN_WHITESPACE@20..21 " " 23 | NODE_ATTR_SET@21..45 24 | TOKEN_L_BRACE@21..22 "{" 25 | TOKEN_WHITESPACE@22..23 " " 26 | NODE_ATTRPATH_VALUE@23..29 27 | NODE_ATTRPATH@23..24 28 | NODE_IDENT@23..24 29 | TOKEN_IDENT@23..24 "z" 30 | TOKEN_WHITESPACE@24..25 " " 31 | TOKEN_ASSIGN@25..26 "=" 32 | TOKEN_WHITESPACE@26..27 " " 33 | NODE_LITERAL@27..28 34 | TOKEN_INTEGER@27..28 "3" 35 | TOKEN_SEMICOLON@28..29 ";" 36 | TOKEN_WHITESPACE@29..30 " " 37 | NODE_ATTRPATH_VALUE@30..36 38 | NODE_ATTRPATH@30..31 39 | NODE_IDENT@30..31 40 | TOKEN_IDENT@30..31 "a" 41 | TOKEN_WHITESPACE@31..32 " " 42 | TOKEN_ASSIGN@32..33 "=" 43 | TOKEN_WHITESPACE@33..34 " " 44 | NODE_LITERAL@34..35 45 | TOKEN_INTEGER@34..35 "4" 46 | TOKEN_SEMICOLON@35..36 ";" 47 | TOKEN_WHITESPACE@36..37 " " 48 | NODE_ATTRPATH_VALUE@37..43 49 | NODE_ATTRPATH@37..38 50 | NODE_IDENT@37..38 51 | TOKEN_IDENT@37..38 "b" 52 | TOKEN_WHITESPACE@38..39 " " 53 | TOKEN_ASSIGN@39..40 "=" 54 | TOKEN_WHITESPACE@40..41 " " 55 | NODE_LITERAL@41..42 56 | TOKEN_INTEGER@41..42 "5" 57 | TOKEN_SEMICOLON@42..43 ";" 58 | TOKEN_WHITESPACE@43..44 " " 59 | TOKEN_R_BRACE@44..45 "}" 60 | TOKEN_SEMICOLON@45..46 ";" 61 | TOKEN_WHITESPACE@46..47 "\n" 62 | TOKEN_IN@47..49 "in" 63 | TOKEN_WHITESPACE@49..50 " " 64 | NODE_ATTR_SET@50..96 65 | TOKEN_L_BRACE@50..51 "{" 66 | TOKEN_WHITESPACE@51..54 "\n " 67 | NODE_ATTRPATH_VALUE@54..60 68 | NODE_ATTRPATH@54..55 69 | NODE_IDENT@54..55 70 | TOKEN_IDENT@54..55 "x" 71 | TOKEN_WHITESPACE@55..56 " " 72 | TOKEN_ASSIGN@56..57 "=" 73 | TOKEN_WHITESPACE@57..58 " " 74 | NODE_LITERAL@58..59 75 | TOKEN_INTEGER@58..59 "1" 76 | TOKEN_SEMICOLON@59..60 ";" 77 | TOKEN_WHITESPACE@60..63 "\n " 78 | NODE_INHERIT@63..73 79 | TOKEN_INHERIT@63..70 "inherit" 80 | TOKEN_WHITESPACE@70..71 " " 81 | NODE_IDENT@71..72 82 | TOKEN_IDENT@71..72 "y" 83 | TOKEN_SEMICOLON@72..73 ";" 84 | TOKEN_WHITESPACE@73..76 "\n " 85 | NODE_INHERIT@76..94 86 | TOKEN_INHERIT@76..83 "inherit" 87 | TOKEN_WHITESPACE@83..84 " " 88 | NODE_INHERIT_FROM@84..89 89 | TOKEN_L_PAREN@84..85 "(" 90 | NODE_IDENT@85..88 91 | TOKEN_IDENT@85..88 "set" 92 | TOKEN_R_PAREN@88..89 ")" 93 | TOKEN_WHITESPACE@89..90 " " 94 | NODE_IDENT@90..91 95 | TOKEN_IDENT@90..91 "z" 96 | TOKEN_WHITESPACE@91..92 " " 97 | NODE_IDENT@92..93 98 | TOKEN_IDENT@92..93 "a" 99 | TOKEN_SEMICOLON@93..94 ";" 100 | TOKEN_WHITESPACE@94..95 "\n" 101 | TOKEN_R_BRACE@95..96 "}" 102 | 103 | -------------------------------------------------------------------------------- /test_data/parser/success/inherit.nix: -------------------------------------------------------------------------------- 1 | let 2 | y = 2; 3 | set = { z = 3; a = 4; b = 5; }; 4 | in { 5 | x = 1; 6 | inherit y; 7 | inherit (set) z a; 8 | } 9 | -------------------------------------------------------------------------------- /test_data/parser/success/inherit_dynamic.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..95 2 | NODE_ATTR_SET@0..95 3 | TOKEN_L_BRACE@0..1 "{" 4 | NODE_ATTRPATH_VALUE@1..5 5 | NODE_ATTRPATH@1..2 6 | NODE_IDENT@1..2 7 | TOKEN_IDENT@1..2 "a" 8 | TOKEN_ASSIGN@2..3 "=" 9 | NODE_LITERAL@3..4 10 | TOKEN_INTEGER@3..4 "1" 11 | TOKEN_SEMICOLON@4..5 ";" 12 | NODE_INHERIT@5..17 13 | TOKEN_INHERIT@5..12 "inherit" 14 | TOKEN_WHITESPACE@12..13 " " 15 | NODE_IDENT@13..14 16 | TOKEN_IDENT@13..14 "b" 17 | TOKEN_WHITESPACE@14..15 " " 18 | NODE_IDENT@15..16 19 | TOKEN_IDENT@15..16 "c" 20 | TOKEN_SEMICOLON@16..17 ";" 21 | NODE_INHERIT@17..35 22 | TOKEN_INHERIT@17..24 "inherit" 23 | TOKEN_WHITESPACE@24..25 " " 24 | NODE_INHERIT_FROM@25..30 25 | TOKEN_L_PAREN@25..26 "(" 26 | NODE_IDENT@26..29 27 | TOKEN_IDENT@26..29 "set" 28 | TOKEN_R_PAREN@29..30 ")" 29 | TOKEN_WHITESPACE@30..31 " " 30 | NODE_IDENT@31..32 31 | TOKEN_IDENT@31..32 "d" 32 | TOKEN_WHITESPACE@32..33 " " 33 | NODE_IDENT@33..34 34 | TOKEN_IDENT@33..34 "e" 35 | TOKEN_SEMICOLON@34..35 ";" 36 | TOKEN_WHITESPACE@35..36 " " 37 | NODE_INHERIT@36..51 38 | TOKEN_INHERIT@36..43 "inherit" 39 | TOKEN_WHITESPACE@43..44 " " 40 | NODE_DYNAMIC@44..50 41 | TOKEN_INTERPOL_START@44..46 "${" 42 | NODE_STRING@46..49 43 | TOKEN_STRING_START@46..47 "\"" 44 | TOKEN_STRING_CONTENT@47..48 "f" 45 | TOKEN_STRING_END@48..49 "\"" 46 | TOKEN_INTERPOL_END@49..50 "}" 47 | TOKEN_SEMICOLON@50..51 ";" 48 | TOKEN_WHITESPACE@51..52 " " 49 | NODE_INHERIT@52..66 50 | TOKEN_INHERIT@52..59 "inherit" 51 | TOKEN_WHITESPACE@59..60 " " 52 | NODE_STRING@60..65 53 | TOKEN_STRING_START@60..61 "\"" 54 | TOKEN_STRING_CONTENT@61..64 "foo" 55 | TOKEN_STRING_END@64..65 "\"" 56 | TOKEN_SEMICOLON@65..66 ";" 57 | TOKEN_WHITESPACE@66..67 " " 58 | NODE_INHERIT@67..85 59 | TOKEN_INHERIT@67..74 "inherit" 60 | TOKEN_WHITESPACE@74..75 " " 61 | NODE_INHERIT_FROM@75..78 62 | TOKEN_L_PAREN@75..76 "(" 63 | NODE_IDENT@76..77 64 | TOKEN_IDENT@76..77 "a" 65 | TOKEN_R_PAREN@77..78 ")" 66 | TOKEN_WHITESPACE@78..79 " " 67 | NODE_STRING@79..84 68 | TOKEN_STRING_START@79..80 "\"" 69 | TOKEN_STRING_CONTENT@80..83 "bar" 70 | TOKEN_STRING_END@83..84 "\"" 71 | TOKEN_SEMICOLON@84..85 ";" 72 | TOKEN_WHITESPACE@85..86 " " 73 | NODE_INHERIT@86..94 74 | TOKEN_INHERIT@86..93 "inherit" 75 | TOKEN_SEMICOLON@93..94 ";" 76 | TOKEN_R_BRACE@94..95 "}" 77 | 78 | -------------------------------------------------------------------------------- /test_data/parser/success/inherit_dynamic.nix: -------------------------------------------------------------------------------- 1 | {a=1;inherit b c;inherit (set) d e; inherit ${"f"}; inherit "foo"; inherit (a) "bar"; inherit;} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/interpolation.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..270 2 | NODE_LET_IN@0..270 3 | TOKEN_LET@0..3 "let" 4 | TOKEN_WHITESPACE@3..6 "\n " 5 | NODE_ATTRPATH_VALUE@6..22 6 | NODE_ATTRPATH@6..11 7 | NODE_IDENT@6..11 8 | TOKEN_IDENT@6..11 "world" 9 | TOKEN_WHITESPACE@11..12 " " 10 | TOKEN_ASSIGN@12..13 "=" 11 | TOKEN_WHITESPACE@13..14 " " 12 | NODE_STRING@14..21 13 | TOKEN_STRING_START@14..15 "\"" 14 | TOKEN_STRING_CONTENT@15..20 "World" 15 | TOKEN_STRING_END@20..21 "\"" 16 | TOKEN_SEMICOLON@21..22 ";" 17 | TOKEN_WHITESPACE@22..23 "\n" 18 | TOKEN_IN@23..25 "in" 19 | TOKEN_WHITESPACE@25..26 " " 20 | NODE_ATTR_SET@26..270 21 | TOKEN_L_BRACE@26..27 "{" 22 | TOKEN_WHITESPACE@27..30 "\n " 23 | NODE_ATTRPATH_VALUE@30..57 24 | NODE_ATTRPATH@30..36 25 | NODE_IDENT@30..36 26 | TOKEN_IDENT@30..36 "string" 27 | TOKEN_WHITESPACE@36..37 " " 28 | TOKEN_ASSIGN@37..38 "=" 29 | TOKEN_WHITESPACE@38..39 " " 30 | NODE_STRING@39..56 31 | TOKEN_STRING_START@39..40 "\"" 32 | TOKEN_STRING_CONTENT@40..46 "Hello " 33 | NODE_INTERPOL@46..54 34 | TOKEN_INTERPOL_START@46..48 "${" 35 | NODE_IDENT@48..53 36 | TOKEN_IDENT@48..53 "world" 37 | TOKEN_INTERPOL_END@53..54 "}" 38 | TOKEN_STRING_CONTENT@54..55 "!" 39 | TOKEN_STRING_END@55..56 "\"" 40 | TOKEN_SEMICOLON@56..57 ";" 41 | TOKEN_WHITESPACE@57..60 "\n " 42 | NODE_ATTRPATH_VALUE@60..268 43 | NODE_ATTRPATH@60..69 44 | NODE_IDENT@60..69 45 | TOKEN_IDENT@60..69 "multiline" 46 | TOKEN_WHITESPACE@69..70 " " 47 | TOKEN_ASSIGN@70..71 "=" 48 | TOKEN_WHITESPACE@71..72 " " 49 | NODE_STRING@72..267 50 | TOKEN_STRING_START@72..74 "''" 51 | TOKEN_STRING_CONTENT@74..101 "\n The set's x valu ..." 52 | NODE_INTERPOL@101..161 53 | TOKEN_INTERPOL_START@101..103 "${" 54 | TOKEN_WHITESPACE@103..110 "\n " 55 | NODE_SELECT@110..155 56 | NODE_ATTR_SET@110..153 57 | TOKEN_L_BRACE@110..111 "{" 58 | TOKEN_WHITESPACE@111..120 "\n " 59 | NODE_ATTRPATH_VALUE@120..128 60 | NODE_ATTRPATH@120..121 61 | NODE_IDENT@120..121 62 | TOKEN_IDENT@120..121 "x" 63 | TOKEN_WHITESPACE@121..122 " " 64 | TOKEN_ASSIGN@122..123 "=" 65 | TOKEN_WHITESPACE@123..124 " " 66 | NODE_STRING@124..127 67 | TOKEN_STRING_START@124..125 "\"" 68 | TOKEN_STRING_CONTENT@125..126 "1" 69 | TOKEN_STRING_END@126..127 "\"" 70 | TOKEN_SEMICOLON@127..128 ";" 71 | TOKEN_WHITESPACE@128..137 "\n " 72 | NODE_ATTRPATH_VALUE@137..145 73 | NODE_ATTRPATH@137..138 74 | NODE_IDENT@137..138 75 | TOKEN_IDENT@137..138 "y" 76 | TOKEN_WHITESPACE@138..139 " " 77 | TOKEN_ASSIGN@139..140 "=" 78 | TOKEN_WHITESPACE@140..141 " " 79 | NODE_STRING@141..144 80 | TOKEN_STRING_START@141..142 "\"" 81 | TOKEN_STRING_CONTENT@142..143 "2" 82 | TOKEN_STRING_END@143..144 "\"" 83 | TOKEN_SEMICOLON@144..145 ";" 84 | TOKEN_WHITESPACE@145..152 "\n " 85 | TOKEN_R_BRACE@152..153 "}" 86 | TOKEN_DOT@153..154 "." 87 | NODE_ATTRPATH@154..155 88 | NODE_IDENT@154..155 89 | TOKEN_IDENT@154..155 "x" 90 | TOKEN_WHITESPACE@155..160 "\n " 91 | TOKEN_INTERPOL_END@160..161 "}" 92 | TOKEN_STRING_CONTENT@161..253 "\n\n This line shall ..." 93 | NODE_INTERPOL@253..262 94 | TOKEN_INTERPOL_START@253..255 "${" 95 | TOKEN_WHITESPACE@255..256 " " 96 | NODE_STRING@256..260 97 | TOKEN_STRING_START@256..257 "\"" 98 | TOKEN_STRING_CONTENT@257..259 "hi" 99 | TOKEN_STRING_END@259..260 "\"" 100 | TOKEN_WHITESPACE@260..261 " " 101 | TOKEN_INTERPOL_END@261..262 "}" 102 | TOKEN_STRING_CONTENT@262..265 "\n " 103 | TOKEN_STRING_END@265..267 "''" 104 | TOKEN_SEMICOLON@267..268 ";" 105 | TOKEN_WHITESPACE@268..269 "\n" 106 | TOKEN_R_BRACE@269..270 "}" 107 | 108 | -------------------------------------------------------------------------------- /test_data/parser/success/interpolation.nix: -------------------------------------------------------------------------------- 1 | let 2 | world = "World"; 3 | in { 4 | string = "Hello ${world}!"; 5 | multiline = '' 6 | The set's x value is: ${ 7 | { 8 | x = "1"; 9 | y = "2"; 10 | }.x 11 | } 12 | 13 | This line shall have no indention 14 | This line shall be indented by 2 15 | ''\n 16 | ${ "hi" } 17 | ''; 18 | } 19 | -------------------------------------------------------------------------------- /test_data/parser/success/lambda_is_not_uri.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..6 2 | NODE_LAMBDA@0..6 3 | NODE_IDENT_PARAM@0..1 4 | NODE_IDENT@0..1 5 | TOKEN_IDENT@0..1 "_" 6 | TOKEN_COLON@1..2 ":" 7 | NODE_IDENT@2..6 8 | TOKEN_IDENT@2..6 "null" 9 | 10 | -------------------------------------------------------------------------------- /test_data/parser/success/lambda_is_not_uri.nix: -------------------------------------------------------------------------------- 1 | _:null -------------------------------------------------------------------------------- /test_data/parser/success/lambda_list.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..7 2 | NODE_LAMBDA@0..7 3 | NODE_IDENT_PARAM@0..1 4 | NODE_IDENT@0..1 5 | TOKEN_IDENT@0..1 "m" 6 | TOKEN_COLON@1..2 ":" 7 | NODE_LIST@2..7 8 | TOKEN_L_BRACK@2..3 "[" 9 | TOKEN_WHITESPACE@3..4 " " 10 | NODE_IDENT@4..5 11 | TOKEN_IDENT@4..5 "m" 12 | TOKEN_WHITESPACE@5..6 " " 13 | TOKEN_R_BRACK@6..7 "]" 14 | 15 | -------------------------------------------------------------------------------- /test_data/parser/success/lambda_list.nix: -------------------------------------------------------------------------------- 1 | m:[ m ] 2 | -------------------------------------------------------------------------------- /test_data/parser/success/lambda_nested.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..11 2 | NODE_LAMBDA@0..11 3 | NODE_IDENT_PARAM@0..1 4 | NODE_IDENT@0..1 5 | TOKEN_IDENT@0..1 "a" 6 | TOKEN_COLON@1..2 ":" 7 | TOKEN_WHITESPACE@2..3 " " 8 | NODE_LAMBDA@3..11 9 | NODE_IDENT_PARAM@3..4 10 | NODE_IDENT@3..4 11 | TOKEN_IDENT@3..4 "b" 12 | TOKEN_COLON@4..5 ":" 13 | TOKEN_WHITESPACE@5..6 " " 14 | NODE_BIN_OP@6..11 15 | NODE_IDENT@6..7 16 | TOKEN_IDENT@6..7 "a" 17 | TOKEN_WHITESPACE@7..8 " " 18 | TOKEN_ADD@8..9 "+" 19 | TOKEN_WHITESPACE@9..10 " " 20 | NODE_IDENT@10..11 21 | TOKEN_IDENT@10..11 "b" 22 | 23 | -------------------------------------------------------------------------------- /test_data/parser/success/lambda_nested.nix: -------------------------------------------------------------------------------- 1 | a: b: a + b 2 | -------------------------------------------------------------------------------- /test_data/parser/success/let.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..16 2 | NODE_LET_IN@0..16 3 | TOKEN_LET@0..3 "let" 4 | TOKEN_WHITESPACE@3..4 " " 5 | NODE_ATTRPATH_VALUE@4..11 6 | NODE_ATTRPATH@4..5 7 | NODE_IDENT@4..5 8 | TOKEN_IDENT@4..5 "a" 9 | TOKEN_WHITESPACE@5..6 " " 10 | TOKEN_ASSIGN@6..7 "=" 11 | TOKEN_WHITESPACE@7..8 " " 12 | NODE_LITERAL@8..10 13 | TOKEN_INTEGER@8..10 "42" 14 | TOKEN_SEMICOLON@10..11 ";" 15 | TOKEN_WHITESPACE@11..12 " " 16 | TOKEN_IN@12..14 "in" 17 | TOKEN_WHITESPACE@14..15 " " 18 | NODE_IDENT@15..16 19 | TOKEN_IDENT@15..16 "a" 20 | 21 | -------------------------------------------------------------------------------- /test_data/parser/success/let.nix: -------------------------------------------------------------------------------- 1 | let a = 42; in a 2 | -------------------------------------------------------------------------------- /test_data/parser/success/let_legacy.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..17 2 | NODE_LEGACY_LET@0..17 3 | TOKEN_LET@0..3 "let" 4 | TOKEN_L_BRACE@3..4 "{" 5 | NODE_ATTRPATH_VALUE@4..9 6 | NODE_ATTRPATH@4..5 7 | NODE_IDENT@4..5 8 | TOKEN_IDENT@4..5 "a" 9 | TOKEN_ASSIGN@5..6 "=" 10 | NODE_LITERAL@6..8 11 | TOKEN_INTEGER@6..8 "42" 12 | TOKEN_SEMICOLON@8..9 ";" 13 | NODE_ATTRPATH_VALUE@9..16 14 | NODE_ATTRPATH@9..13 15 | NODE_IDENT@9..13 16 | TOKEN_IDENT@9..13 "body" 17 | TOKEN_ASSIGN@13..14 "=" 18 | NODE_IDENT@14..15 19 | TOKEN_IDENT@14..15 "a" 20 | TOKEN_SEMICOLON@15..16 ";" 21 | TOKEN_R_BRACE@16..17 "}" 22 | 23 | -------------------------------------------------------------------------------- /test_data/parser/success/let_legacy.nix: -------------------------------------------------------------------------------- 1 | let{a=42;body=a;} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/list.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..13 2 | NODE_LIST@0..13 3 | TOKEN_L_BRACK@0..1 "[" 4 | NODE_IDENT@1..2 5 | TOKEN_IDENT@1..2 "a" 6 | TOKEN_WHITESPACE@2..3 " " 7 | NODE_LITERAL@3..4 8 | TOKEN_INTEGER@3..4 "2" 9 | TOKEN_WHITESPACE@4..5 " " 10 | NODE_LITERAL@5..6 11 | TOKEN_INTEGER@5..6 "3" 12 | TOKEN_WHITESPACE@6..7 " " 13 | NODE_STRING@7..12 14 | TOKEN_STRING_START@7..8 "\"" 15 | TOKEN_STRING_CONTENT@8..11 "lol" 16 | TOKEN_STRING_END@11..12 "\"" 17 | TOKEN_R_BRACK@12..13 "]" 18 | 19 | -------------------------------------------------------------------------------- /test_data/parser/success/list.nix: -------------------------------------------------------------------------------- 1 | [a 2 3 "lol"] 2 | -------------------------------------------------------------------------------- /test_data/parser/success/list_concat.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..15 2 | NODE_BIN_OP@0..15 3 | NODE_LIST@0..3 4 | TOKEN_L_BRACK@0..1 "[" 5 | NODE_LITERAL@1..2 6 | TOKEN_INTEGER@1..2 "1" 7 | TOKEN_R_BRACK@2..3 "]" 8 | TOKEN_CONCAT@3..5 "++" 9 | NODE_BIN_OP@5..15 10 | NODE_LIST@5..10 11 | TOKEN_L_BRACK@5..6 "[" 12 | NODE_IDENT@6..9 13 | TOKEN_IDENT@6..9 "two" 14 | TOKEN_R_BRACK@9..10 "]" 15 | TOKEN_CONCAT@10..12 "++" 16 | NODE_LIST@12..15 17 | TOKEN_L_BRACK@12..13 "[" 18 | NODE_LITERAL@13..14 19 | TOKEN_INTEGER@13..14 "3" 20 | TOKEN_R_BRACK@14..15 "]" 21 | 22 | -------------------------------------------------------------------------------- /test_data/parser/success/list_concat.nix: -------------------------------------------------------------------------------- 1 | [1]++[two]++[3] 2 | -------------------------------------------------------------------------------- /test_data/parser/success/math.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..23 2 | NODE_BIN_OP@0..23 3 | NODE_BIN_OP@0..9 4 | NODE_LITERAL@0..1 5 | TOKEN_INTEGER@0..1 "1" 6 | TOKEN_WHITESPACE@1..2 " " 7 | TOKEN_ADD@2..3 "+" 8 | TOKEN_WHITESPACE@3..4 " " 9 | NODE_BIN_OP@4..9 10 | NODE_LITERAL@4..5 11 | TOKEN_INTEGER@4..5 "2" 12 | TOKEN_WHITESPACE@5..6 " " 13 | TOKEN_MUL@6..7 "*" 14 | TOKEN_WHITESPACE@7..8 " " 15 | NODE_LITERAL@8..9 16 | TOKEN_INTEGER@8..9 "3" 17 | TOKEN_WHITESPACE@9..10 " " 18 | TOKEN_ADD@10..11 "+" 19 | TOKEN_WHITESPACE@11..12 " " 20 | NODE_BIN_OP@12..23 21 | NODE_LITERAL@12..13 22 | TOKEN_INTEGER@12..13 "4" 23 | TOKEN_WHITESPACE@13..14 " " 24 | TOKEN_DIV@14..15 "/" 25 | TOKEN_WHITESPACE@15..16 " " 26 | NODE_PAREN@16..23 27 | TOKEN_L_PAREN@16..17 "(" 28 | NODE_BIN_OP@17..22 29 | NODE_LITERAL@17..18 30 | TOKEN_INTEGER@17..18 "5" 31 | TOKEN_WHITESPACE@18..19 " " 32 | TOKEN_SUB@19..20 "-" 33 | TOKEN_WHITESPACE@20..21 " " 34 | NODE_LITERAL@21..22 35 | TOKEN_INTEGER@21..22 "6" 36 | TOKEN_R_PAREN@22..23 ")" 37 | 38 | -------------------------------------------------------------------------------- /test_data/parser/success/math.nix: -------------------------------------------------------------------------------- 1 | 1 + 2 * 3 + 4 / (5 - 6) 2 | -------------------------------------------------------------------------------- /test_data/parser/success/math2.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..13 2 | NODE_BIN_OP@0..13 3 | NODE_BIN_OP@0..5 4 | NODE_LITERAL@0..1 5 | TOKEN_INTEGER@0..1 "1" 6 | TOKEN_WHITESPACE@1..2 " " 7 | TOKEN_ADD@2..3 "+" 8 | TOKEN_WHITESPACE@3..4 " " 9 | NODE_LITERAL@4..5 10 | TOKEN_INTEGER@4..5 "2" 11 | TOKEN_WHITESPACE@5..6 " " 12 | TOKEN_ADD@6..7 "+" 13 | TOKEN_WHITESPACE@7..8 " " 14 | NODE_BIN_OP@8..13 15 | NODE_LITERAL@8..9 16 | TOKEN_INTEGER@8..9 "3" 17 | TOKEN_WHITESPACE@9..10 " " 18 | TOKEN_MUL@10..11 "*" 19 | TOKEN_WHITESPACE@11..12 " " 20 | NODE_LITERAL@12..13 21 | TOKEN_INTEGER@12..13 "4" 22 | 23 | -------------------------------------------------------------------------------- /test_data/parser/success/math2.nix: -------------------------------------------------------------------------------- 1 | 1 + 2 + 3 * 4 2 | -------------------------------------------------------------------------------- /test_data/parser/success/math_no_ws.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..8 2 | NODE_BIN_OP@0..8 3 | NODE_LITERAL@0..1 4 | TOKEN_INTEGER@0..1 "5" 5 | TOKEN_MUL@1..2 "*" 6 | NODE_UNARY_OP@2..8 7 | TOKEN_SUB@2..3 "-" 8 | NODE_PAREN@3..8 9 | TOKEN_L_PAREN@3..4 "(" 10 | NODE_BIN_OP@4..7 11 | NODE_LITERAL@4..5 12 | TOKEN_INTEGER@4..5 "3" 13 | TOKEN_SUB@5..6 "-" 14 | NODE_LITERAL@6..7 15 | TOKEN_INTEGER@6..7 "2" 16 | TOKEN_R_PAREN@7..8 ")" 17 | 18 | -------------------------------------------------------------------------------- /test_data/parser/success/math_no_ws.nix: -------------------------------------------------------------------------------- 1 | 5*-(3-2) 2 | -------------------------------------------------------------------------------- /test_data/parser/success/merge.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..14 2 | NODE_BIN_OP@0..14 3 | NODE_ATTR_SET@0..6 4 | TOKEN_L_BRACE@0..1 "{" 5 | NODE_ATTRPATH_VALUE@1..5 6 | NODE_ATTRPATH@1..2 7 | NODE_IDENT@1..2 8 | TOKEN_IDENT@1..2 "a" 9 | TOKEN_ASSIGN@2..3 "=" 10 | NODE_LITERAL@3..4 11 | TOKEN_INTEGER@3..4 "1" 12 | TOKEN_SEMICOLON@4..5 ";" 13 | TOKEN_R_BRACE@5..6 "}" 14 | TOKEN_UPDATE@6..8 "//" 15 | NODE_ATTR_SET@8..14 16 | TOKEN_L_BRACE@8..9 "{" 17 | NODE_ATTRPATH_VALUE@9..13 18 | NODE_ATTRPATH@9..10 19 | NODE_IDENT@9..10 20 | TOKEN_IDENT@9..10 "b" 21 | TOKEN_ASSIGN@10..11 "=" 22 | NODE_LITERAL@11..12 23 | TOKEN_INTEGER@11..12 "2" 24 | TOKEN_SEMICOLON@12..13 ";" 25 | TOKEN_R_BRACE@13..14 "}" 26 | 27 | -------------------------------------------------------------------------------- /test_data/parser/success/merge.nix: -------------------------------------------------------------------------------- 1 | {a=1;}//{b=2;} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/multiple.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..15 2 | NODE_HAS_ATTR@0..15 3 | NODE_HAS_ATTR@0..6 4 | NODE_ATTR_SET@0..2 5 | TOKEN_L_BRACE@0..1 "{" 6 | TOKEN_R_BRACE@1..2 "}" 7 | TOKEN_WHITESPACE@2..3 " " 8 | TOKEN_QUESTION@3..4 "?" 9 | TOKEN_WHITESPACE@4..5 " " 10 | NODE_ATTRPATH@5..6 11 | NODE_IDENT@5..6 12 | TOKEN_IDENT@5..6 "a" 13 | TOKEN_WHITESPACE@6..7 " " 14 | TOKEN_QUESTION@7..8 "?" 15 | TOKEN_WHITESPACE@8..9 " " 16 | NODE_ATTRPATH@9..15 17 | NODE_DYNAMIC@9..15 18 | TOKEN_INTERPOL_START@9..11 "${" 19 | NODE_STRING@11..14 20 | TOKEN_STRING_START@11..12 "\"" 21 | TOKEN_STRING_CONTENT@12..13 "a" 22 | TOKEN_STRING_END@13..14 "\"" 23 | TOKEN_INTERPOL_END@14..15 "}" 24 | 25 | -------------------------------------------------------------------------------- /test_data/parser/success/multiple.nix: -------------------------------------------------------------------------------- 1 | {} ? a ? ${"a"} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/operators_right_assoc.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..71 2 | NODE_LIST@0..71 3 | TOKEN_L_BRACK@0..1 "[" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_PAREN@4..17 6 | TOKEN_L_PAREN@4..5 "(" 7 | NODE_BIN_OP@5..16 8 | NODE_IDENT@5..6 9 | TOKEN_IDENT@5..6 "a" 10 | TOKEN_WHITESPACE@6..7 " " 11 | TOKEN_IMPLICATION@7..9 "->" 12 | TOKEN_WHITESPACE@9..10 " " 13 | NODE_BIN_OP@10..16 14 | NODE_IDENT@10..11 15 | TOKEN_IDENT@10..11 "b" 16 | TOKEN_WHITESPACE@11..12 " " 17 | TOKEN_IMPLICATION@12..14 "->" 18 | TOKEN_WHITESPACE@14..15 " " 19 | NODE_IDENT@15..16 20 | TOKEN_IDENT@15..16 "c" 21 | TOKEN_R_PAREN@16..17 ")" 22 | TOKEN_WHITESPACE@17..20 "\n " 23 | NODE_PAREN@20..33 24 | TOKEN_L_PAREN@20..21 "(" 25 | NODE_BIN_OP@21..32 26 | NODE_IDENT@21..22 27 | TOKEN_IDENT@21..22 "a" 28 | TOKEN_WHITESPACE@22..23 " " 29 | TOKEN_CONCAT@23..25 "++" 30 | TOKEN_WHITESPACE@25..26 " " 31 | NODE_BIN_OP@26..32 32 | NODE_IDENT@26..27 33 | TOKEN_IDENT@26..27 "b" 34 | TOKEN_WHITESPACE@27..28 " " 35 | TOKEN_CONCAT@28..30 "++" 36 | TOKEN_WHITESPACE@30..31 " " 37 | NODE_IDENT@31..32 38 | TOKEN_IDENT@31..32 "c" 39 | TOKEN_R_PAREN@32..33 ")" 40 | TOKEN_WHITESPACE@33..36 "\n " 41 | NODE_PAREN@36..49 42 | TOKEN_L_PAREN@36..37 "(" 43 | NODE_BIN_OP@37..48 44 | NODE_IDENT@37..38 45 | TOKEN_IDENT@37..38 "a" 46 | TOKEN_WHITESPACE@38..39 " " 47 | TOKEN_UPDATE@39..41 "//" 48 | TOKEN_WHITESPACE@41..42 " " 49 | NODE_BIN_OP@42..48 50 | NODE_IDENT@42..43 51 | TOKEN_IDENT@42..43 "b" 52 | TOKEN_WHITESPACE@43..44 " " 53 | TOKEN_UPDATE@44..46 "//" 54 | TOKEN_WHITESPACE@46..47 " " 55 | NODE_IDENT@47..48 56 | TOKEN_IDENT@47..48 "c" 57 | TOKEN_R_PAREN@48..49 ")" 58 | TOKEN_WHITESPACE@49..52 "\n " 59 | NODE_PAREN@52..69 60 | TOKEN_L_PAREN@52..53 "(" 61 | NODE_BIN_OP@53..68 62 | NODE_BIN_OP@53..63 63 | NODE_BIN_OP@53..58 64 | NODE_IDENT@53..54 65 | TOKEN_IDENT@53..54 "a" 66 | TOKEN_WHITESPACE@54..55 " " 67 | TOKEN_ADD@55..56 "+" 68 | TOKEN_WHITESPACE@56..57 " " 69 | NODE_IDENT@57..58 70 | TOKEN_IDENT@57..58 "b" 71 | TOKEN_WHITESPACE@58..59 " " 72 | TOKEN_UPDATE@59..61 "//" 73 | TOKEN_WHITESPACE@61..62 " " 74 | NODE_IDENT@62..63 75 | TOKEN_IDENT@62..63 "c" 76 | TOKEN_WHITESPACE@63..64 " " 77 | TOKEN_IMPLICATION@64..66 "->" 78 | TOKEN_WHITESPACE@66..67 " " 79 | NODE_IDENT@67..68 80 | TOKEN_IDENT@67..68 "d" 81 | TOKEN_R_PAREN@68..69 ")" 82 | TOKEN_WHITESPACE@69..70 "\n" 83 | TOKEN_R_BRACK@70..71 "]" 84 | 85 | -------------------------------------------------------------------------------- /test_data/parser/success/operators_right_assoc.nix: -------------------------------------------------------------------------------- 1 | [ 2 | (a -> b -> c) 3 | (a ++ b ++ c) 4 | (a // b // c) 5 | (a + b // c -> d) 6 | ] 7 | -------------------------------------------------------------------------------- /test_data/parser/success/or-as-ident.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..136 2 | TOKEN_COMMENT@0..117 "# https://github.com/ ..." 3 | TOKEN_WHITESPACE@117..118 "\n" 4 | NODE_APPLY@118..136 5 | NODE_APPLY@118..130 6 | NODE_IDENT@118..121 7 | TOKEN_IDENT@118..121 "foo" 8 | TOKEN_WHITESPACE@121..122 " " 9 | NODE_APPLY@122..130 10 | NODE_IDENT@122..127 11 | TOKEN_IDENT@122..127 "foldl" 12 | TOKEN_WHITESPACE@127..128 " " 13 | NODE_IDENT@128..130 14 | TOKEN_IDENT@128..130 "or" 15 | TOKEN_WHITESPACE@130..131 " " 16 | NODE_IDENT@131..136 17 | TOKEN_IDENT@131..136 "false" 18 | 19 | -------------------------------------------------------------------------------- /test_data/parser/success/or-as-ident.nix: -------------------------------------------------------------------------------- 1 | # https://github.com/NixOS/nixpkgs/blob/38860c9e91cb00f4d8cd19c7b4e36c45680c89b5/nixos/modules/security/pam.nix#L1180 2 | foo foldl or false 3 | -------------------------------------------------------------------------------- /test_data/parser/success/or_in_attr.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..37 2 | NODE_ATTR_SET@0..37 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_ATTRPATH_VALUE@4..16 6 | NODE_ATTRPATH@4..9 7 | NODE_IDENT@4..5 8 | TOKEN_IDENT@4..5 "a" 9 | TOKEN_DOT@5..6 "." 10 | NODE_IDENT@6..7 11 | TOKEN_IDENT@6..7 "b" 12 | TOKEN_DOT@7..8 "." 13 | NODE_IDENT@8..9 14 | TOKEN_IDENT@8..9 "c" 15 | TOKEN_WHITESPACE@9..10 " " 16 | TOKEN_ASSIGN@10..11 "=" 17 | TOKEN_WHITESPACE@11..12 " " 18 | NODE_LITERAL@12..15 19 | TOKEN_INTEGER@12..15 "324" 20 | TOKEN_SEMICOLON@15..16 ";" 21 | TOKEN_WHITESPACE@16..19 "\n " 22 | NODE_ATTRPATH_VALUE@19..35 23 | NODE_ATTRPATH@19..28 24 | NODE_IDENT@19..20 25 | TOKEN_IDENT@19..20 "a" 26 | TOKEN_DOT@20..21 "." 27 | NODE_IDENT@21..23 28 | TOKEN_IDENT@21..23 "or" 29 | TOKEN_DOT@23..24 "." 30 | NODE_IDENT@24..25 31 | TOKEN_IDENT@24..25 "b" 32 | TOKEN_DOT@25..26 "." 33 | NODE_IDENT@26..28 34 | TOKEN_IDENT@26..28 "or" 35 | TOKEN_WHITESPACE@28..29 " " 36 | TOKEN_ASSIGN@29..30 "=" 37 | TOKEN_WHITESPACE@30..31 " " 38 | NODE_LITERAL@31..34 39 | TOKEN_INTEGER@31..34 "234" 40 | TOKEN_SEMICOLON@34..35 ";" 41 | TOKEN_WHITESPACE@35..36 "\n" 42 | TOKEN_R_BRACE@36..37 "}" 43 | 44 | -------------------------------------------------------------------------------- /test_data/parser/success/or_in_attr.nix: -------------------------------------------------------------------------------- 1 | { 2 | a.b.c = 324; 3 | a.or.b.or = 234; 4 | } 5 | -------------------------------------------------------------------------------- /test_data/parser/success/path.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..107 2 | NODE_ATTR_SET@0..107 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_ATTRPATH_VALUE@4..21 6 | NODE_ATTRPATH@4..7 7 | NODE_IDENT@4..7 8 | TOKEN_IDENT@4..7 "abs" 9 | TOKEN_WHITESPACE@7..8 " " 10 | TOKEN_ASSIGN@8..9 "=" 11 | TOKEN_WHITESPACE@9..10 " " 12 | NODE_PATH@10..20 13 | TOKEN_PATH@10..20 "/nix/store" 14 | TOKEN_SEMICOLON@20..21 ";" 15 | TOKEN_WHITESPACE@21..24 "\n " 16 | NODE_ATTRPATH_VALUE@24..50 17 | NODE_ATTRPATH@24..28 18 | NODE_IDENT@24..28 19 | TOKEN_IDENT@24..28 "home" 20 | TOKEN_WHITESPACE@28..29 " " 21 | TOKEN_ASSIGN@29..30 "=" 22 | TOKEN_WHITESPACE@30..31 " " 23 | NODE_PATH@31..49 24 | TOKEN_PATH@31..49 "~/.nix-profile/bin" 25 | TOKEN_SEMICOLON@49..50 ";" 26 | TOKEN_WHITESPACE@50..53 "\n " 27 | NODE_ATTRPATH_VALUE@53..79 28 | NODE_ATTRPATH@53..56 29 | NODE_IDENT@53..56 30 | TOKEN_IDENT@53..56 "rel" 31 | TOKEN_WHITESPACE@56..57 " " 32 | TOKEN_ASSIGN@57..58 "=" 33 | TOKEN_WHITESPACE@58..59 " " 34 | NODE_PATH@59..78 35 | TOKEN_PATH@59..78 "./configuration.nix" 36 | TOKEN_SEMICOLON@78..79 ";" 37 | TOKEN_WHITESPACE@79..82 "\n " 38 | NODE_ATTRPATH_VALUE@82..105 39 | NODE_ATTRPATH@82..87 40 | NODE_IDENT@82..87 41 | TOKEN_IDENT@82..87 "store" 42 | TOKEN_WHITESPACE@87..88 " " 43 | TOKEN_ASSIGN@88..89 "=" 44 | TOKEN_WHITESPACE@89..90 " " 45 | NODE_PATH@90..104 46 | TOKEN_PATH@90..104 "" 47 | TOKEN_SEMICOLON@104..105 ";" 48 | TOKEN_WHITESPACE@105..106 "\n" 49 | TOKEN_R_BRACE@106..107 "}" 50 | 51 | -------------------------------------------------------------------------------- /test_data/parser/success/path.nix: -------------------------------------------------------------------------------- 1 | { 2 | abs = /nix/store; 3 | home = ~/.nix-profile/bin; 4 | rel = ./configuration.nix; 5 | store = ; 6 | } 7 | -------------------------------------------------------------------------------- /test_data/parser/success/path_interp.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..91 2 | NODE_LET_IN@0..91 3 | TOKEN_LET@0..3 "let" 4 | TOKEN_WHITESPACE@3..6 "\n " 5 | NODE_ATTRPATH_VALUE@6..29 6 | NODE_ATTRPATH@6..7 7 | NODE_IDENT@6..7 8 | TOKEN_IDENT@6..7 "a" 9 | TOKEN_WHITESPACE@7..8 " " 10 | TOKEN_ASSIGN@8..9 "=" 11 | TOKEN_WHITESPACE@9..10 " " 12 | NODE_LAMBDA@10..28 13 | NODE_IDENT_PARAM@10..11 14 | NODE_IDENT@10..11 15 | TOKEN_IDENT@10..11 "f" 16 | TOKEN_COLON@11..12 ":" 17 | TOKEN_WHITESPACE@12..13 " " 18 | NODE_PATH@13..28 19 | TOKEN_PATH@13..18 "./foo" 20 | NODE_INTERPOL@18..24 21 | TOKEN_INTERPOL_START@18..20 "${" 22 | NODE_IDENT@20..23 23 | TOKEN_IDENT@20..23 "bar" 24 | TOKEN_INTERPOL_END@23..24 "}" 25 | TOKEN_PATH@24..28 "/baz" 26 | TOKEN_SEMICOLON@28..29 ";" 27 | TOKEN_WHITESPACE@29..32 "\n " 28 | NODE_ATTRPATH_VALUE@32..86 29 | NODE_ATTRPATH@32..33 30 | NODE_IDENT@32..33 31 | TOKEN_IDENT@32..33 "b" 32 | TOKEN_WHITESPACE@33..34 " " 33 | TOKEN_ASSIGN@34..35 "=" 34 | TOKEN_WHITESPACE@35..36 " " 35 | NODE_APPLY@36..85 36 | NODE_APPLY@36..66 37 | NODE_APPLY@36..53 38 | NODE_APPLY@36..43 39 | NODE_IDENT@36..37 40 | TOKEN_IDENT@36..37 "a" 41 | TOKEN_WHITESPACE@37..38 " " 42 | NODE_PATH@38..43 43 | TOKEN_PATH@38..43 "./bar" 44 | TOKEN_WHITESPACE@43..44 " " 45 | NODE_PATH@44..53 46 | TOKEN_PATH@44..49 "./baz" 47 | NODE_INTERPOL@49..53 48 | TOKEN_INTERPOL_START@49..51 "${" 49 | NODE_IDENT@51..52 50 | TOKEN_IDENT@51..52 "x" 51 | TOKEN_INTERPOL_END@52..53 "}" 52 | TOKEN_WHITESPACE@53..54 " " 53 | NODE_PATH@54..66 54 | TOKEN_PATH@54..61 "./snens" 55 | NODE_INTERPOL@61..65 56 | TOKEN_INTERPOL_START@61..63 "${" 57 | NODE_IDENT@63..64 58 | TOKEN_IDENT@63..64 "x" 59 | TOKEN_INTERPOL_END@64..65 "}" 60 | TOKEN_PATH@65..66 "y" 61 | TOKEN_WHITESPACE@66..67 " " 62 | NODE_PATH@67..85 63 | TOKEN_PATH@67..72 "./qux" 64 | NODE_INTERPOL@72..76 65 | TOKEN_INTERPOL_START@72..74 "${" 66 | NODE_IDENT@74..75 67 | TOKEN_IDENT@74..75 "x" 68 | TOKEN_INTERPOL_END@75..76 "}" 69 | TOKEN_PATH@76..77 "." 70 | NODE_INTERPOL@77..81 71 | TOKEN_INTERPOL_START@77..79 "${" 72 | NODE_IDENT@79..80 73 | TOKEN_IDENT@79..80 "y" 74 | TOKEN_INTERPOL_END@80..81 "}" 75 | TOKEN_PATH@81..85 ".z/w" 76 | TOKEN_SEMICOLON@85..86 ";" 77 | TOKEN_WHITESPACE@86..87 "\n" 78 | TOKEN_IN@87..89 "in" 79 | TOKEN_WHITESPACE@89..90 " " 80 | NODE_IDENT@90..91 81 | TOKEN_IDENT@90..91 "b" 82 | 83 | -------------------------------------------------------------------------------- /test_data/parser/success/path_interp.nix: -------------------------------------------------------------------------------- 1 | let 2 | a = f: ./foo${bar}/baz; 3 | b = a ./bar ./baz${x} ./snens${x}y ./qux${x}.${y}.z/w; 4 | in b 5 | -------------------------------------------------------------------------------- /test_data/parser/success/path_interp_no_prefix.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..6 2 | NODE_PATH@0..6 3 | TOKEN_PATH@0..2 "a/" 4 | NODE_INTERPOL@2..6 5 | TOKEN_INTERPOL_START@2..4 "${" 6 | NODE_IDENT@4..5 7 | TOKEN_IDENT@4..5 "b" 8 | TOKEN_INTERPOL_END@5..6 "}" 9 | 10 | -------------------------------------------------------------------------------- /test_data/parser/success/path_interp_no_prefix.nix: -------------------------------------------------------------------------------- 1 | a/${b} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/path_no_newline.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..11 2 | NODE_APPLY@0..10 3 | NODE_IDENT@0..6 4 | TOKEN_IDENT@0..6 "import" 5 | TOKEN_WHITESPACE@6..7 " " 6 | NODE_PATH@7..10 7 | TOKEN_PATH@7..10 "./." 8 | TOKEN_WHITESPACE@10..11 "\n" 9 | 10 | -------------------------------------------------------------------------------- /test_data/parser/success/path_no_newline.nix: -------------------------------------------------------------------------------- 1 | import ./. 2 | 3 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_bind_left.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..20 2 | NODE_LAMBDA@0..20 3 | NODE_PATTERN@0..13 4 | NODE_PAT_BIND@0..7 5 | NODE_IDENT@0..5 6 | TOKEN_IDENT@0..5 "outer" 7 | TOKEN_WHITESPACE@5..6 " " 8 | TOKEN_AT@6..7 "@" 9 | TOKEN_WHITESPACE@7..8 " " 10 | TOKEN_L_BRACE@8..9 "{" 11 | TOKEN_WHITESPACE@9..10 " " 12 | NODE_PAT_ENTRY@10..11 13 | NODE_IDENT@10..11 14 | TOKEN_IDENT@10..11 "a" 15 | TOKEN_WHITESPACE@11..12 " " 16 | TOKEN_R_BRACE@12..13 "}" 17 | TOKEN_COLON@13..14 ":" 18 | TOKEN_WHITESPACE@14..15 " " 19 | NODE_IDENT@15..20 20 | TOKEN_IDENT@15..20 "outer" 21 | 22 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_bind_left.nix: -------------------------------------------------------------------------------- 1 | outer @ { a }: outer 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_bind_right.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..13 2 | NODE_LAMBDA@0..13 3 | NODE_PATTERN@0..10 4 | TOKEN_L_BRACE@0..1 "{" 5 | TOKEN_R_BRACE@1..2 "}" 6 | TOKEN_WHITESPACE@2..3 " " 7 | NODE_PAT_BIND@3..10 8 | TOKEN_AT@3..4 "@" 9 | TOKEN_WHITESPACE@4..5 " " 10 | NODE_IDENT@5..10 11 | TOKEN_IDENT@5..10 "outer" 12 | TOKEN_COLON@10..11 ":" 13 | TOKEN_WHITESPACE@11..12 " " 14 | NODE_LITERAL@12..13 15 | TOKEN_INTEGER@12..13 "1" 16 | 17 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_bind_right.nix: -------------------------------------------------------------------------------- 1 | {} @ outer: 1 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_default.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..23 2 | NODE_LAMBDA@0..23 3 | NODE_PATTERN@0..20 4 | TOKEN_L_BRACE@0..1 "{" 5 | TOKEN_WHITESPACE@1..2 " " 6 | NODE_PAT_ENTRY@2..3 7 | NODE_IDENT@2..3 8 | TOKEN_IDENT@2..3 "a" 9 | TOKEN_COMMA@3..4 "," 10 | TOKEN_WHITESPACE@4..5 " " 11 | NODE_PAT_ENTRY@5..18 12 | NODE_IDENT@5..6 13 | TOKEN_IDENT@5..6 "b" 14 | TOKEN_WHITESPACE@6..7 " " 15 | TOKEN_QUESTION@7..8 "?" 16 | TOKEN_WHITESPACE@8..9 " " 17 | NODE_STRING@9..18 18 | TOKEN_STRING_START@9..10 "\"" 19 | TOKEN_STRING_CONTENT@10..17 "default" 20 | TOKEN_STRING_END@17..18 "\"" 21 | TOKEN_WHITESPACE@18..19 " " 22 | TOKEN_R_BRACE@19..20 "}" 23 | TOKEN_COLON@20..21 ":" 24 | TOKEN_WHITESPACE@21..22 " " 25 | NODE_IDENT@22..23 26 | TOKEN_IDENT@22..23 "a" 27 | 28 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_default.nix: -------------------------------------------------------------------------------- 1 | { a, b ? "default" }: a 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_default_attrset.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..8 2 | NODE_LAMBDA@0..8 3 | NODE_PATTERN@0..6 4 | TOKEN_L_BRACE@0..1 "{" 5 | NODE_PAT_ENTRY@1..5 6 | NODE_IDENT@1..2 7 | TOKEN_IDENT@1..2 "a" 8 | TOKEN_QUESTION@2..3 "?" 9 | NODE_ATTR_SET@3..5 10 | TOKEN_L_BRACE@3..4 "{" 11 | TOKEN_R_BRACE@4..5 "}" 12 | TOKEN_R_BRACE@5..6 "}" 13 | TOKEN_COLON@6..7 ":" 14 | NODE_IDENT@7..8 15 | TOKEN_IDENT@7..8 "a" 16 | 17 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_default_attrset.nix: -------------------------------------------------------------------------------- 1 | {a?{}}:a 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_default_ellipsis.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..28 2 | NODE_LAMBDA@0..28 3 | NODE_PATTERN@0..25 4 | TOKEN_L_BRACE@0..1 "{" 5 | TOKEN_WHITESPACE@1..2 " " 6 | NODE_PAT_ENTRY@2..3 7 | NODE_IDENT@2..3 8 | TOKEN_IDENT@2..3 "a" 9 | TOKEN_COMMA@3..4 "," 10 | TOKEN_WHITESPACE@4..5 " " 11 | NODE_PAT_ENTRY@5..18 12 | NODE_IDENT@5..6 13 | TOKEN_IDENT@5..6 "b" 14 | TOKEN_WHITESPACE@6..7 " " 15 | TOKEN_QUESTION@7..8 "?" 16 | TOKEN_WHITESPACE@8..9 " " 17 | NODE_STRING@9..18 18 | TOKEN_STRING_START@9..10 "\"" 19 | TOKEN_STRING_CONTENT@10..17 "default" 20 | TOKEN_STRING_END@17..18 "\"" 21 | TOKEN_COMMA@18..19 "," 22 | TOKEN_WHITESPACE@19..20 " " 23 | TOKEN_ELLIPSIS@20..23 "..." 24 | TOKEN_WHITESPACE@23..24 " " 25 | TOKEN_R_BRACE@24..25 "}" 26 | TOKEN_COLON@25..26 ":" 27 | TOKEN_WHITESPACE@26..27 " " 28 | NODE_IDENT@27..28 29 | TOKEN_IDENT@27..28 "a" 30 | 31 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_default_ellipsis.nix: -------------------------------------------------------------------------------- 1 | { a, b ? "default", ... }: a 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_ellipsis.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..10 2 | NODE_LAMBDA@0..10 3 | NODE_PATTERN@0..7 4 | TOKEN_L_BRACE@0..1 "{" 5 | TOKEN_WHITESPACE@1..2 " " 6 | TOKEN_ELLIPSIS@2..5 "..." 7 | TOKEN_WHITESPACE@5..6 " " 8 | TOKEN_R_BRACE@6..7 "}" 9 | TOKEN_COLON@7..8 ":" 10 | TOKEN_WHITESPACE@8..9 " " 11 | NODE_LITERAL@9..10 12 | TOKEN_INTEGER@9..10 "1" 13 | 14 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_ellipsis.nix: -------------------------------------------------------------------------------- 1 | { ... }: 1 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_trailing_comma.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..6 2 | NODE_LAMBDA@0..6 3 | NODE_PATTERN@0..4 4 | TOKEN_L_BRACE@0..1 "{" 5 | NODE_PAT_ENTRY@1..2 6 | NODE_IDENT@1..2 7 | TOKEN_IDENT@1..2 "a" 8 | TOKEN_COMMA@2..3 "," 9 | TOKEN_R_BRACE@3..4 "}" 10 | TOKEN_COLON@4..5 ":" 11 | NODE_IDENT@5..6 12 | TOKEN_IDENT@5..6 "a" 13 | 14 | -------------------------------------------------------------------------------- /test_data/parser/success/pattern_trailing_comma.nix: -------------------------------------------------------------------------------- 1 | {a,}:a 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_left.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..22 2 | NODE_BIN_OP@0..22 3 | NODE_SELECT@0..17 4 | NODE_IDENT@0..8 5 | TOKEN_IDENT@0..8 "builtins" 6 | TOKEN_DOT@8..9 "." 7 | NODE_ATTRPATH@9..17 8 | NODE_IDENT@9..17 9 | TOKEN_IDENT@9..17 "toString" 10 | TOKEN_WHITESPACE@17..18 " " 11 | TOKEN_PIPE_LEFT@18..20 "<|" 12 | TOKEN_WHITESPACE@20..21 " " 13 | NODE_LITERAL@21..22 14 | TOKEN_INTEGER@21..22 "1" 15 | 16 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_left.nix: -------------------------------------------------------------------------------- 1 | builtins.toString <| 1 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_left_assoc.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..29 2 | NODE_BIN_OP@0..29 3 | NODE_IDENT@0..3 4 | TOKEN_IDENT@0..3 "foo" 5 | TOKEN_WHITESPACE@3..4 " " 6 | TOKEN_PIPE_LEFT@4..6 "<|" 7 | TOKEN_WHITESPACE@6..7 " " 8 | NODE_BIN_OP@7..29 9 | NODE_SELECT@7..24 10 | NODE_IDENT@7..15 11 | TOKEN_IDENT@7..15 "builtins" 12 | TOKEN_DOT@15..16 "." 13 | NODE_ATTRPATH@16..24 14 | NODE_IDENT@16..24 15 | TOKEN_IDENT@16..24 "toString" 16 | TOKEN_WHITESPACE@24..25 " " 17 | TOKEN_PIPE_LEFT@25..27 "<|" 18 | TOKEN_WHITESPACE@27..28 " " 19 | NODE_LITERAL@28..29 20 | TOKEN_INTEGER@28..29 "1" 21 | 22 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_left_assoc.nix: -------------------------------------------------------------------------------- 1 | foo <| builtins.toString <| 1 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_left_math.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..26 2 | NODE_BIN_OP@0..26 3 | NODE_SELECT@0..17 4 | NODE_IDENT@0..8 5 | TOKEN_IDENT@0..8 "builtins" 6 | TOKEN_DOT@8..9 "." 7 | NODE_ATTRPATH@9..17 8 | NODE_IDENT@9..17 9 | TOKEN_IDENT@9..17 "toString" 10 | TOKEN_WHITESPACE@17..18 " " 11 | TOKEN_PIPE_LEFT@18..20 "<|" 12 | TOKEN_WHITESPACE@20..21 " " 13 | NODE_BIN_OP@21..26 14 | NODE_LITERAL@21..22 15 | TOKEN_INTEGER@21..22 "1" 16 | TOKEN_WHITESPACE@22..23 " " 17 | TOKEN_ADD@23..24 "+" 18 | TOKEN_WHITESPACE@24..25 " " 19 | NODE_LITERAL@25..26 20 | TOKEN_INTEGER@25..26 "1" 21 | 22 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_left_math.nix: -------------------------------------------------------------------------------- 1 | builtins.toString <| 1 + 1 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_mixed.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..29 2 | NODE_BIN_OP@0..29 3 | NODE_SELECT@0..17 4 | NODE_IDENT@0..8 5 | TOKEN_IDENT@0..8 "builtins" 6 | TOKEN_DOT@8..9 "." 7 | NODE_ATTRPATH@9..17 8 | NODE_IDENT@9..17 9 | TOKEN_IDENT@9..17 "toString" 10 | TOKEN_WHITESPACE@17..18 " " 11 | TOKEN_PIPE_LEFT@18..20 "<|" 12 | TOKEN_WHITESPACE@20..21 " " 13 | NODE_BIN_OP@21..29 14 | NODE_LITERAL@21..22 15 | TOKEN_INTEGER@21..22 "1" 16 | TOKEN_WHITESPACE@22..23 " " 17 | TOKEN_PIPE_RIGHT@23..25 "|>" 18 | TOKEN_WHITESPACE@25..26 " " 19 | NODE_IDENT@26..29 20 | TOKEN_IDENT@26..29 "foo" 21 | 22 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_mixed.nix: -------------------------------------------------------------------------------- 1 | builtins.toString <| 1 |> foo 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_mixed_math.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..33 2 | NODE_BIN_OP@0..33 3 | NODE_SELECT@0..17 4 | NODE_IDENT@0..8 5 | TOKEN_IDENT@0..8 "builtins" 6 | TOKEN_DOT@8..9 "." 7 | NODE_ATTRPATH@9..17 8 | NODE_IDENT@9..17 9 | TOKEN_IDENT@9..17 "toString" 10 | TOKEN_WHITESPACE@17..18 " " 11 | TOKEN_PIPE_LEFT@18..20 "<|" 12 | TOKEN_WHITESPACE@20..21 " " 13 | NODE_BIN_OP@21..33 14 | NODE_BIN_OP@21..26 15 | NODE_LITERAL@21..22 16 | TOKEN_INTEGER@21..22 "1" 17 | TOKEN_WHITESPACE@22..23 " " 18 | TOKEN_ADD@23..24 "+" 19 | TOKEN_WHITESPACE@24..25 " " 20 | NODE_LITERAL@25..26 21 | TOKEN_INTEGER@25..26 "1" 22 | TOKEN_WHITESPACE@26..27 " " 23 | TOKEN_PIPE_RIGHT@27..29 "|>" 24 | TOKEN_WHITESPACE@29..30 " " 25 | NODE_IDENT@30..33 26 | TOKEN_IDENT@30..33 "foo" 27 | 28 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_mixed_math.nix: -------------------------------------------------------------------------------- 1 | builtins.toString <| 1 + 1 |> foo 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_right.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..22 2 | NODE_BIN_OP@0..22 3 | NODE_LITERAL@0..1 4 | TOKEN_INTEGER@0..1 "1" 5 | TOKEN_WHITESPACE@1..2 " " 6 | TOKEN_PIPE_RIGHT@2..4 "|>" 7 | TOKEN_WHITESPACE@4..5 " " 8 | NODE_SELECT@5..22 9 | NODE_IDENT@5..13 10 | TOKEN_IDENT@5..13 "builtins" 11 | TOKEN_DOT@13..14 "." 12 | NODE_ATTRPATH@14..22 13 | NODE_IDENT@14..22 14 | TOKEN_IDENT@14..22 "toString" 15 | 16 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_right.nix: -------------------------------------------------------------------------------- 1 | 1 |> builtins.toString 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_right_assoc.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..29 2 | NODE_BIN_OP@0..29 3 | NODE_BIN_OP@0..22 4 | NODE_LITERAL@0..1 5 | TOKEN_INTEGER@0..1 "1" 6 | TOKEN_WHITESPACE@1..2 " " 7 | TOKEN_PIPE_RIGHT@2..4 "|>" 8 | TOKEN_WHITESPACE@4..5 " " 9 | NODE_SELECT@5..22 10 | NODE_IDENT@5..13 11 | TOKEN_IDENT@5..13 "builtins" 12 | TOKEN_DOT@13..14 "." 13 | NODE_ATTRPATH@14..22 14 | NODE_IDENT@14..22 15 | TOKEN_IDENT@14..22 "toString" 16 | TOKEN_WHITESPACE@22..23 " " 17 | TOKEN_PIPE_RIGHT@23..25 "|>" 18 | TOKEN_WHITESPACE@25..26 " " 19 | NODE_IDENT@26..29 20 | TOKEN_IDENT@26..29 "foo" 21 | 22 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_right_assoc.nix: -------------------------------------------------------------------------------- 1 | 1 |> builtins.toString |> foo 2 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_right_math.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..26 2 | NODE_BIN_OP@0..26 3 | NODE_BIN_OP@0..5 4 | NODE_LITERAL@0..1 5 | TOKEN_INTEGER@0..1 "1" 6 | TOKEN_WHITESPACE@1..2 " " 7 | TOKEN_ADD@2..3 "+" 8 | TOKEN_WHITESPACE@3..4 " " 9 | NODE_LITERAL@4..5 10 | TOKEN_INTEGER@4..5 "1" 11 | TOKEN_WHITESPACE@5..6 " " 12 | TOKEN_PIPE_RIGHT@6..8 "|>" 13 | TOKEN_WHITESPACE@8..9 " " 14 | NODE_SELECT@9..26 15 | NODE_IDENT@9..17 16 | TOKEN_IDENT@9..17 "builtins" 17 | TOKEN_DOT@17..18 "." 18 | NODE_ATTRPATH@18..26 19 | NODE_IDENT@18..26 20 | TOKEN_IDENT@18..26 "toString" 21 | 22 | -------------------------------------------------------------------------------- /test_data/parser/success/pipe_right_math.nix: -------------------------------------------------------------------------------- 1 | 1 + 1 |> builtins.toString 2 | -------------------------------------------------------------------------------- /test_data/parser/success/select_default.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..14 2 | NODE_BIN_OP@0..14 3 | NODE_SELECT@0..10 4 | NODE_IDENT@0..1 5 | TOKEN_IDENT@0..1 "a" 6 | TOKEN_DOT@1..2 "." 7 | NODE_ATTRPATH@2..5 8 | NODE_IDENT@2..3 9 | TOKEN_IDENT@2..3 "b" 10 | TOKEN_DOT@3..4 "." 11 | NODE_IDENT@4..5 12 | TOKEN_IDENT@4..5 "c" 13 | TOKEN_WHITESPACE@5..6 " " 14 | TOKEN_OR@6..8 "or" 15 | TOKEN_WHITESPACE@8..9 " " 16 | NODE_LITERAL@9..10 17 | TOKEN_INTEGER@9..10 "1" 18 | TOKEN_WHITESPACE@10..11 " " 19 | TOKEN_ADD@11..12 "+" 20 | TOKEN_WHITESPACE@12..13 " " 21 | NODE_LITERAL@13..14 22 | TOKEN_INTEGER@13..14 "1" 23 | 24 | -------------------------------------------------------------------------------- /test_data/parser/success/select_default.nix: -------------------------------------------------------------------------------- 1 | a.b.c or 1 + 1 2 | -------------------------------------------------------------------------------- /test_data/parser/success/select_ident.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..5 2 | NODE_SELECT@0..5 3 | NODE_IDENT@0..1 4 | TOKEN_IDENT@0..1 "a" 5 | TOKEN_DOT@1..2 "." 6 | NODE_ATTRPATH@2..5 7 | NODE_IDENT@2..3 8 | TOKEN_IDENT@2..3 "b" 9 | TOKEN_DOT@3..4 "." 10 | NODE_IDENT@4..5 11 | TOKEN_IDENT@4..5 "c" 12 | 13 | -------------------------------------------------------------------------------- /test_data/parser/success/select_ident.nix: -------------------------------------------------------------------------------- 1 | a.b.c 2 | -------------------------------------------------------------------------------- /test_data/parser/success/select_string_dynamic.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..33 2 | NODE_SELECT@0..33 3 | NODE_IDENT@0..4 4 | TOKEN_IDENT@0..4 "test" 5 | TOKEN_DOT@4..5 "." 6 | NODE_ATTRPATH@5..33 7 | NODE_STRING@5..20 8 | TOKEN_STRING_START@5..6 "\"" 9 | TOKEN_STRING_CONTENT@6..19 "invalid ident" 10 | TOKEN_STRING_END@19..20 "\"" 11 | TOKEN_DOT@20..21 "." 12 | NODE_STRING@21..28 13 | TOKEN_STRING_START@21..22 "\"" 14 | NODE_INTERPOL@22..27 15 | TOKEN_INTERPOL_START@22..24 "${" 16 | NODE_IDENT@24..26 17 | TOKEN_IDENT@24..26 "hi" 18 | TOKEN_INTERPOL_END@26..27 "}" 19 | TOKEN_STRING_END@27..28 "\"" 20 | TOKEN_DOT@28..29 "." 21 | NODE_DYNAMIC@29..33 22 | TOKEN_INTERPOL_START@29..31 "${" 23 | NODE_IDENT@31..32 24 | TOKEN_IDENT@31..32 "a" 25 | TOKEN_INTERPOL_END@32..33 "}" 26 | 27 | -------------------------------------------------------------------------------- /test_data/parser/success/select_string_dynamic.nix: -------------------------------------------------------------------------------- 1 | test."invalid ident"."${hi}".${a} 2 | -------------------------------------------------------------------------------- /test_data/parser/success/string.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..137 2 | NODE_ATTR_SET@0..137 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_ATTRPATH_VALUE@4..21 6 | NODE_ATTRPATH@4..10 7 | NODE_IDENT@4..10 8 | TOKEN_IDENT@4..10 "simple" 9 | TOKEN_WHITESPACE@10..11 " " 10 | TOKEN_ASSIGN@11..12 "=" 11 | TOKEN_WHITESPACE@12..13 " " 12 | NODE_STRING@13..20 13 | TOKEN_STRING_START@13..14 "\"" 14 | TOKEN_STRING_CONTENT@14..19 "hello" 15 | TOKEN_STRING_END@19..20 "\"" 16 | TOKEN_SEMICOLON@20..21 ";" 17 | TOKEN_WHITESPACE@21..24 "\n " 18 | NODE_ATTRPATH_VALUE@24..63 19 | NODE_ATTRPATH@24..32 20 | NODE_IDENT@24..32 21 | TOKEN_IDENT@24..32 "indented" 22 | TOKEN_WHITESPACE@32..33 " " 23 | TOKEN_ASSIGN@33..34 "=" 24 | TOKEN_WHITESPACE@34..35 " " 25 | NODE_STRING@35..62 26 | TOKEN_STRING_START@35..37 "''" 27 | TOKEN_STRING_CONTENT@37..60 "\n hello\n world\n " 28 | TOKEN_STRING_END@60..62 "''" 29 | TOKEN_SEMICOLON@62..63 ";" 30 | TOKEN_WHITESPACE@63..66 "\n " 31 | NODE_ATTRPATH_VALUE@66..135 32 | NODE_ATTRPATH@66..101 33 | NODE_IDENT@66..101 34 | TOKEN_IDENT@66..101 "uri_is_also_a_string_ ..." 35 | TOKEN_WHITESPACE@101..102 " " 36 | TOKEN_ASSIGN@102..103 "=" 37 | TOKEN_WHITESPACE@103..104 " " 38 | NODE_LITERAL@104..134 39 | TOKEN_URI@104..134 "http://example.org/fo ..." 40 | TOKEN_SEMICOLON@134..135 ";" 41 | TOKEN_WHITESPACE@135..136 "\n" 42 | TOKEN_R_BRACE@136..137 "}" 43 | 44 | -------------------------------------------------------------------------------- /test_data/parser/success/string.nix: -------------------------------------------------------------------------------- 1 | { 2 | simple = "hello"; 3 | indented = '' 4 | hello 5 | world 6 | ''; 7 | uri_is_also_a_string_how_convenient = http://example.org/foo.tar.bz2; 8 | } 9 | -------------------------------------------------------------------------------- /test_data/parser/success/string_complex_url.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..99 2 | NODE_ATTR_SET@0..99 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_ATTRPATH_VALUE@4..97 6 | NODE_ATTRPATH@4..24 7 | NODE_IDENT@4..24 8 | TOKEN_IDENT@4..24 "more_complicated_url" 9 | TOKEN_WHITESPACE@24..25 " " 10 | TOKEN_ASSIGN@25..26 "=" 11 | TOKEN_WHITESPACE@26..31 "\n " 12 | NODE_LITERAL@31..96 13 | TOKEN_URI@31..96 "http://c758482.r82.cf ..." 14 | TOKEN_SEMICOLON@96..97 ";" 15 | TOKEN_WHITESPACE@97..98 "\n" 16 | TOKEN_R_BRACE@98..99 "}" 17 | 18 | -------------------------------------------------------------------------------- /test_data/parser/success/string_complex_url.nix: -------------------------------------------------------------------------------- 1 | { 2 | more_complicated_url = 3 | http://c758482.r82.cf2.rackcdn.com/Sublime%20Text%202.0.2.tar.bz2; 4 | } 5 | -------------------------------------------------------------------------------- /test_data/parser/success/string_interp_ident.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..21 2 | TOKEN_WHITESPACE@0..1 " " 3 | NODE_STRING@1..20 4 | TOKEN_STRING_START@1..2 "\"" 5 | NODE_INTERPOL@2..10 6 | TOKEN_INTERPOL_START@2..4 "${" 7 | NODE_IDENT@4..9 8 | TOKEN_IDENT@4..9 "hello" 9 | TOKEN_INTERPOL_END@9..10 "}" 10 | TOKEN_STRING_CONTENT@10..11 " " 11 | NODE_INTERPOL@11..19 12 | TOKEN_INTERPOL_START@11..13 "${" 13 | NODE_IDENT@13..18 14 | TOKEN_IDENT@13..18 "world" 15 | TOKEN_INTERPOL_END@18..19 "}" 16 | TOKEN_STRING_END@19..20 "\"" 17 | TOKEN_WHITESPACE@20..21 " " 18 | 19 | -------------------------------------------------------------------------------- /test_data/parser/success/string_interp_ident.nix: -------------------------------------------------------------------------------- 1 | "${hello} ${world}" 2 | -------------------------------------------------------------------------------- /test_data/parser/success/string_interp_nested.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..17 2 | TOKEN_WHITESPACE@0..1 " " 3 | NODE_STRING@1..16 4 | TOKEN_STRING_START@1..3 "''" 5 | NODE_INTERPOL@3..14 6 | TOKEN_INTERPOL_START@3..5 "${" 7 | NODE_STRING@5..13 8 | TOKEN_STRING_START@5..6 "\"" 9 | NODE_INTERPOL@6..12 10 | TOKEN_INTERPOL_START@6..8 "${" 11 | NODE_IDENT@8..11 12 | TOKEN_IDENT@8..11 "var" 13 | TOKEN_INTERPOL_END@11..12 "}" 14 | TOKEN_STRING_END@12..13 "\"" 15 | TOKEN_INTERPOL_END@13..14 "}" 16 | TOKEN_STRING_END@14..16 "''" 17 | TOKEN_WHITESPACE@16..17 " " 18 | 19 | -------------------------------------------------------------------------------- /test_data/parser/success/string_interp_nested.nix: -------------------------------------------------------------------------------- 1 | ''${"${var}"}'' 2 | -------------------------------------------------------------------------------- /test_data/parser/success/string_interp_select.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..43 2 | TOKEN_WHITESPACE@0..1 " " 3 | NODE_STRING@1..42 4 | TOKEN_STRING_START@1..2 "\"" 5 | TOKEN_STRING_CONTENT@2..9 "Hello, " 6 | NODE_INTERPOL@9..40 7 | TOKEN_INTERPOL_START@9..11 "${" 8 | TOKEN_WHITESPACE@11..12 " " 9 | NODE_SELECT@12..38 10 | NODE_ATTR_SET@12..32 11 | TOKEN_L_BRACE@12..13 "{" 12 | TOKEN_WHITESPACE@13..14 " " 13 | NODE_ATTRPATH_VALUE@14..30 14 | NODE_ATTRPATH@14..19 15 | NODE_IDENT@14..19 16 | TOKEN_IDENT@14..19 "world" 17 | TOKEN_WHITESPACE@19..20 " " 18 | TOKEN_ASSIGN@20..21 "=" 19 | TOKEN_WHITESPACE@21..22 " " 20 | NODE_STRING@22..29 21 | TOKEN_STRING_START@22..23 "\"" 22 | TOKEN_STRING_CONTENT@23..28 "World" 23 | TOKEN_STRING_END@28..29 "\"" 24 | TOKEN_SEMICOLON@29..30 ";" 25 | TOKEN_WHITESPACE@30..31 " " 26 | TOKEN_R_BRACE@31..32 "}" 27 | TOKEN_DOT@32..33 "." 28 | NODE_ATTRPATH@33..38 29 | NODE_IDENT@33..38 30 | TOKEN_IDENT@33..38 "world" 31 | TOKEN_WHITESPACE@38..39 " " 32 | TOKEN_INTERPOL_END@39..40 "}" 33 | TOKEN_STRING_CONTENT@40..41 "!" 34 | TOKEN_STRING_END@41..42 "\"" 35 | TOKEN_WHITESPACE@42..43 " " 36 | 37 | -------------------------------------------------------------------------------- /test_data/parser/success/string_interp_select.nix: -------------------------------------------------------------------------------- 1 | "Hello, ${ { world = "World"; }.world }!" 2 | -------------------------------------------------------------------------------- /test_data/parser/success/trivia.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..229 2 | NODE_ATTR_SET@0..197 3 | TOKEN_L_BRACE@0..1 "{" 4 | TOKEN_WHITESPACE@1..4 "\n " 5 | NODE_ATTRPATH_VALUE@4..67 6 | NODE_ATTRPATH@4..7 7 | NODE_IDENT@4..7 8 | TOKEN_IDENT@4..7 "add" 9 | TOKEN_WHITESPACE@7..8 " " 10 | TOKEN_ASSIGN@8..9 "=" 11 | TOKEN_WHITESPACE@9..10 " " 12 | TOKEN_COMMENT@10..31 "/* the first value */" 13 | TOKEN_WHITESPACE@31..32 " " 14 | NODE_LAMBDA@32..66 15 | NODE_IDENT_PARAM@32..33 16 | NODE_IDENT@32..33 17 | TOKEN_IDENT@32..33 "x" 18 | TOKEN_COLON@33..34 ":" 19 | TOKEN_WHITESPACE@34..35 " " 20 | TOKEN_COMMENT@35..57 "/* the second value */" 21 | TOKEN_WHITESPACE@57..58 " " 22 | NODE_LAMBDA@58..66 23 | NODE_IDENT_PARAM@58..59 24 | NODE_IDENT@58..59 25 | TOKEN_IDENT@58..59 "y" 26 | TOKEN_COLON@59..60 ":" 27 | TOKEN_WHITESPACE@60..61 " " 28 | NODE_BIN_OP@61..66 29 | NODE_IDENT@61..62 30 | TOKEN_IDENT@61..62 "x" 31 | TOKEN_WHITESPACE@62..63 " " 32 | TOKEN_ADD@63..64 "+" 33 | TOKEN_WHITESPACE@64..65 " " 34 | NODE_IDENT@65..66 35 | TOKEN_IDENT@65..66 "y" 36 | TOKEN_SEMICOLON@66..67 ";" 37 | TOKEN_WHITESPACE@67..70 "\n " 38 | NODE_ATTRPATH_VALUE@70..106 39 | NODE_ATTRPATH@70..76 40 | NODE_IDENT@70..76 41 | TOKEN_IDENT@70..76 "string" 42 | TOKEN_WHITESPACE@76..77 " " 43 | TOKEN_ASSIGN@77..78 "=" 44 | TOKEN_WHITESPACE@78..79 " " 45 | TOKEN_COMMENT@79..100 "/* meaning of life */" 46 | TOKEN_WHITESPACE@100..101 " " 47 | NODE_STRING@101..105 48 | TOKEN_STRING_START@101..102 "\"" 49 | TOKEN_STRING_CONTENT@102..104 "42" 50 | TOKEN_STRING_END@104..105 "\"" 51 | TOKEN_SEMICOLON@105..106 ";" 52 | TOKEN_WHITESPACE@106..111 "\n " 53 | NODE_ATTRPATH_VALUE@111..126 54 | NODE_ATTRPATH@111..113 55 | NODE_IDENT@111..113 56 | TOKEN_IDENT@111..113 "hi" 57 | TOKEN_WHITESPACE@113..114 " " 58 | TOKEN_ASSIGN@114..115 "=" 59 | TOKEN_WHITESPACE@115..116 " " 60 | NODE_LITERAL@116..125 61 | TOKEN_FLOAT@116..125 "3.0000000" 62 | TOKEN_SEMICOLON@125..126 ";" 63 | TOKEN_WHITESPACE@126..128 "\n " 64 | TOKEN_COMMENT@128..161 "# Store my password i ..." 65 | TOKEN_WHITESPACE@161..168 "\n " 66 | NODE_ATTRPATH_VALUE@168..195 67 | NODE_ATTRPATH@168..176 68 | NODE_IDENT@168..176 69 | TOKEN_IDENT@168..176 "password" 70 | TOKEN_WHITESPACE@176..178 " " 71 | TOKEN_ASSIGN@178..179 "=" 72 | TOKEN_WHITESPACE@179..185 " " 73 | NODE_STRING@185..194 74 | TOKEN_STRING_START@185..186 "\"" 75 | TOKEN_STRING_CONTENT@186..193 "hunter2" 76 | TOKEN_STRING_END@193..194 "\"" 77 | TOKEN_SEMICOLON@194..195 ";" 78 | TOKEN_WHITESPACE@195..196 "\n" 79 | TOKEN_R_BRACE@196..197 "}" 80 | TOKEN_WHITESPACE@197..201 "\n " 81 | TOKEN_COMMENT@201..211 "# trailing" 82 | TOKEN_WHITESPACE@211..219 "\n " 83 | TOKEN_COMMENT@219..229 "# comments" 84 | 85 | -------------------------------------------------------------------------------- /test_data/parser/success/trivia.nix: -------------------------------------------------------------------------------- 1 | { 2 | add = /* the first value */ x: /* the second value */ y: x + y; 3 | string = /* meaning of life */ "42"; 4 | hi = 3.0000000; 5 | # Store my password in plain text 6 | password = "hunter2"; 7 | } 8 | # trailing 9 | # comments 10 | -------------------------------------------------------------------------------- /test_data/parser/success/with-import-let-in.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..150 2 | NODE_WITH@0..150 3 | TOKEN_WITH@0..4 "with" 4 | TOKEN_WHITESPACE@4..5 " " 5 | NODE_APPLY@5..28 6 | NODE_IDENT@5..11 7 | TOKEN_IDENT@5..11 "import" 8 | TOKEN_WHITESPACE@11..12 " " 9 | NODE_PATH@12..28 10 | TOKEN_PATH@12..28 "./simple-set.nix" 11 | TOKEN_SEMICOLON@28..29 ";" 12 | TOKEN_WHITESPACE@29..31 "\n\n" 13 | NODE_LET_IN@31..150 14 | TOKEN_LET@31..34 "let" 15 | TOKEN_WHITESPACE@34..37 "\n " 16 | NODE_ATTRPATH_VALUE@37..47 17 | NODE_ATTRPATH@37..38 18 | NODE_IDENT@37..38 19 | TOKEN_IDENT@37..38 "a" 20 | TOKEN_WHITESPACE@38..39 " " 21 | TOKEN_ASSIGN@39..40 "=" 22 | TOKEN_WHITESPACE@40..41 " " 23 | NODE_BIN_OP@41..46 24 | NODE_LITERAL@41..42 25 | TOKEN_INTEGER@41..42 "4" 26 | TOKEN_WHITESPACE@42..43 " " 27 | TOKEN_ADD@43..44 "+" 28 | TOKEN_WHITESPACE@44..45 " " 29 | NODE_LITERAL@45..46 30 | TOKEN_INTEGER@45..46 "2" 31 | TOKEN_SEMICOLON@46..47 ";" 32 | TOKEN_WHITESPACE@47..48 "\n" 33 | TOKEN_IN@48..50 "in" 34 | TOKEN_WHITESPACE@50..51 " " 35 | NODE_ATTR_SET@51..150 36 | TOKEN_L_BRACE@51..52 "{" 37 | TOKEN_WHITESPACE@52..55 "\n " 38 | NODE_ATTRPATH_VALUE@55..65 39 | NODE_ATTRPATH@55..56 40 | NODE_IDENT@55..56 41 | TOKEN_IDENT@55..56 "b" 42 | TOKEN_WHITESPACE@56..57 " " 43 | TOKEN_ASSIGN@57..58 "=" 44 | TOKEN_WHITESPACE@58..59 " " 45 | NODE_BIN_OP@59..64 46 | NODE_IDENT@59..60 47 | TOKEN_IDENT@59..60 "a" 48 | TOKEN_WHITESPACE@60..61 " " 49 | TOKEN_ADD@61..62 "+" 50 | TOKEN_WHITESPACE@62..63 " " 51 | NODE_LITERAL@63..64 52 | TOKEN_INTEGER@63..64 "2" 53 | TOKEN_SEMICOLON@64..65 ";" 54 | TOKEN_WHITESPACE@65..69 "\n\n " 55 | NODE_ATTRPATH_VALUE@69..148 56 | NODE_ATTRPATH@69..75 57 | NODE_IDENT@69..75 58 | TOKEN_IDENT@69..75 "legacy" 59 | TOKEN_WHITESPACE@75..76 " " 60 | TOKEN_ASSIGN@76..77 "=" 61 | TOKEN_WHITESPACE@77..78 " " 62 | NODE_LEGACY_LET@78..147 63 | TOKEN_LET@78..81 "let" 64 | TOKEN_WHITESPACE@81..82 " " 65 | TOKEN_L_BRACE@82..83 "{" 66 | TOKEN_WHITESPACE@83..88 "\n " 67 | NODE_ATTRPATH_VALUE@88..113 68 | NODE_ATTRPATH@88..105 69 | NODE_IDENT@88..105 70 | TOKEN_IDENT@88..105 "this_syntax_sucks" 71 | TOKEN_WHITESPACE@105..106 " " 72 | TOKEN_ASSIGN@106..107 "=" 73 | TOKEN_WHITESPACE@107..108 " " 74 | NODE_IDENT@108..112 75 | TOKEN_IDENT@108..112 "true" 76 | TOKEN_SEMICOLON@112..113 ";" 77 | TOKEN_WHITESPACE@113..118 "\n " 78 | NODE_ATTRPATH_VALUE@118..143 79 | NODE_ATTRPATH@118..122 80 | NODE_IDENT@118..122 81 | TOKEN_IDENT@118..122 "body" 82 | TOKEN_WHITESPACE@122..123 " " 83 | TOKEN_ASSIGN@123..124 "=" 84 | TOKEN_WHITESPACE@124..125 " " 85 | NODE_IDENT@125..142 86 | TOKEN_IDENT@125..142 "this_syntax_sucks" 87 | TOKEN_SEMICOLON@142..143 ";" 88 | TOKEN_WHITESPACE@143..146 "\n " 89 | TOKEN_R_BRACE@146..147 "}" 90 | TOKEN_SEMICOLON@147..148 ";" 91 | TOKEN_WHITESPACE@148..149 "\n" 92 | TOKEN_R_BRACE@149..150 "}" 93 | 94 | -------------------------------------------------------------------------------- /test_data/parser/success/with-import-let-in.nix: -------------------------------------------------------------------------------- 1 | with import ./simple-set.nix; 2 | 3 | let 4 | a = 4 + 2; 5 | in { 6 | b = a + 2; 7 | 8 | legacy = let { 9 | this_syntax_sucks = true; 10 | body = this_syntax_sucks; 11 | }; 12 | } 13 | -------------------------------------------------------------------------------- /test_data/parser/success/with.expect: -------------------------------------------------------------------------------- 1 | NODE_ROOT@0..20 2 | NODE_WITH@0..20 3 | TOKEN_WITH@0..4 "with" 4 | TOKEN_WHITESPACE@4..5 " " 5 | NODE_IDENT@5..14 6 | TOKEN_IDENT@5..14 "namespace" 7 | TOKEN_SEMICOLON@14..15 ";" 8 | TOKEN_WHITESPACE@15..16 " " 9 | NODE_IDENT@16..20 10 | TOKEN_IDENT@16..20 "expr" 11 | 12 | -------------------------------------------------------------------------------- /test_data/parser/success/with.nix: -------------------------------------------------------------------------------- 1 | with namespace; expr 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_interp_trailing_slash.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "./" 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_IDENT, "foo" 4 | TOKEN_INTERPOL_END, "}" 5 | TOKEN_ERROR, "/" 6 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_interp_trailing_slash.nix: -------------------------------------------------------------------------------- 1 | ./${foo}/ 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_tilde.expect: -------------------------------------------------------------------------------- 1 | TOKEN_ERROR, "~" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_tilde.nix: -------------------------------------------------------------------------------- 1 | ~ 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_tilde_slash.expect: -------------------------------------------------------------------------------- 1 | TOKEN_ERROR, "~/" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_tilde_slash.nix: -------------------------------------------------------------------------------- 1 | ~/ 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_trailing_slash.expect: -------------------------------------------------------------------------------- 1 | TOKEN_ERROR, "/a/" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/error/path_trailing_slash.nix: -------------------------------------------------------------------------------- 1 | /a/ 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/comment_interspersed.expect: -------------------------------------------------------------------------------- 1 | TOKEN_L_BRACE, "{" 2 | TOKEN_WHITESPACE, " " 3 | TOKEN_IDENT, "a" 4 | TOKEN_WHITESPACE, " " 5 | TOKEN_ASSIGN, "=" 6 | TOKEN_WHITESPACE, " " 7 | TOKEN_COMMENT, "/* multiline * comment */" 8 | TOKEN_WHITESPACE, " " 9 | TOKEN_INTEGER, "123" 10 | TOKEN_SEMICOLON, ";" 11 | TOKEN_COMMENT, "# single line" 12 | TOKEN_WHITESPACE, " 13 | " 14 | TOKEN_R_BRACE, "}" 15 | TOKEN_WHITESPACE, " " 16 | TOKEN_COMMENT, "# single line at the end" 17 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/comment_interspersed.nix: -------------------------------------------------------------------------------- 1 | { a = /* multiline * comment */ 123;# single line 2 | } # single line at the end 3 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/comment_multiline.expect: -------------------------------------------------------------------------------- 1 | TOKEN_COMMENT, "/**/" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/comment_multiline.nix: -------------------------------------------------------------------------------- 1 | /**/ 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/comment_multiline2.expect: -------------------------------------------------------------------------------- 1 | TOKEN_COMMENT, "/***/" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/comment_multiline2.nix: -------------------------------------------------------------------------------- 1 | /***/ 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float.expect: -------------------------------------------------------------------------------- 1 | TOKEN_FLOAT, "1.234" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float.nix: -------------------------------------------------------------------------------- 1 | 1.234 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_no_leading_part.expect: -------------------------------------------------------------------------------- 1 | TOKEN_FLOAT, ".5" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_no_leading_part.nix: -------------------------------------------------------------------------------- 1 | .5 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_no_trailing_part.expect: -------------------------------------------------------------------------------- 1 | TOKEN_FLOAT, "1." 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_no_trailing_part.nix: -------------------------------------------------------------------------------- 1 | 1. 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_lower.expect: -------------------------------------------------------------------------------- 1 | TOKEN_FLOAT, "1.1e4" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_lower.nix: -------------------------------------------------------------------------------- 1 | 1.1e4 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_neg.expect: -------------------------------------------------------------------------------- 1 | TOKEN_FLOAT, "123.4e-2" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_neg.nix: -------------------------------------------------------------------------------- 1 | 123.4e-2 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_pos.expect: -------------------------------------------------------------------------------- 1 | TOKEN_FLOAT, "123.4e+2" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_pos.nix: -------------------------------------------------------------------------------- 1 | 123.4e+2 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_upper.expect: -------------------------------------------------------------------------------- 1 | TOKEN_FLOAT, "1.1E4" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/float_scientific_upper.nix: -------------------------------------------------------------------------------- 1 | 1.1E4 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/int.expect: -------------------------------------------------------------------------------- 1 | TOKEN_INTEGER, "42" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/int.nix: -------------------------------------------------------------------------------- 1 | 42 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/lambda.expect: -------------------------------------------------------------------------------- 1 | TOKEN_IDENT, "a" 2 | TOKEN_COLON, ":" 3 | TOKEN_WHITESPACE, " " 4 | TOKEN_IDENT, "b" 5 | TOKEN_COLON, ":" 6 | TOKEN_WHITESPACE, " " 7 | TOKEN_IDENT, "a" 8 | TOKEN_WHITESPACE, " " 9 | TOKEN_ADD, "+" 10 | TOKEN_WHITESPACE, " " 11 | TOKEN_IDENT, "b" 12 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/lambda.nix: -------------------------------------------------------------------------------- 1 | a: b: a + b 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/lambda_arg_underscore.expect: -------------------------------------------------------------------------------- 1 | TOKEN_IDENT, "_" 2 | TOKEN_COLON, ":" 3 | TOKEN_IDENT, "null" 4 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/lambda_arg_underscore.nix: -------------------------------------------------------------------------------- 1 | _:null 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/let.expect: -------------------------------------------------------------------------------- 1 | TOKEN_LET, "let" 2 | TOKEN_WHITESPACE, " " 3 | TOKEN_IDENT, "a" 4 | TOKEN_WHITESPACE, " " 5 | TOKEN_ASSIGN, "=" 6 | TOKEN_WHITESPACE, " " 7 | TOKEN_INTEGER, "3" 8 | TOKEN_SEMICOLON, ";" 9 | TOKEN_WHITESPACE, " " 10 | TOKEN_IN, "in" 11 | TOKEN_WHITESPACE, " " 12 | TOKEN_IDENT, "a" 13 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/let.nix: -------------------------------------------------------------------------------- 1 | let a = 3; in a 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/list.expect: -------------------------------------------------------------------------------- 1 | TOKEN_L_BRACK, "[" 2 | TOKEN_IDENT, "a" 3 | TOKEN_WHITESPACE, " " 4 | TOKEN_INTEGER, "2" 5 | TOKEN_WHITESPACE, " " 6 | TOKEN_INTEGER, "3" 7 | TOKEN_WHITESPACE, " " 8 | TOKEN_STRING_START, """ 9 | TOKEN_STRING_CONTENT, "lol" 10 | TOKEN_STRING_END, """ 11 | TOKEN_R_BRACK, "]" 12 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/list.nix: -------------------------------------------------------------------------------- 1 | [a 2 3 "lol"] 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/list_concat.expect: -------------------------------------------------------------------------------- 1 | TOKEN_L_BRACK, "[" 2 | TOKEN_INTEGER, "1" 3 | TOKEN_R_BRACK, "]" 4 | TOKEN_WHITESPACE, " " 5 | TOKEN_CONCAT, "++" 6 | TOKEN_WHITESPACE, " " 7 | TOKEN_L_BRACK, "[" 8 | TOKEN_INTEGER, "2" 9 | TOKEN_R_BRACK, "]" 10 | TOKEN_WHITESPACE, " " 11 | TOKEN_CONCAT, "++" 12 | TOKEN_WHITESPACE, " " 13 | TOKEN_L_BRACK, "[" 14 | TOKEN_INTEGER, "3" 15 | TOKEN_R_BRACK, "]" 16 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/list_concat.nix: -------------------------------------------------------------------------------- 1 | [1] ++ [2] ++ [3] 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/math.expect: -------------------------------------------------------------------------------- 1 | TOKEN_INTEGER, "1" 2 | TOKEN_WHITESPACE, " " 3 | TOKEN_ADD, "+" 4 | TOKEN_WHITESPACE, " " 5 | TOKEN_INTEGER, "2" 6 | TOKEN_WHITESPACE, " " 7 | TOKEN_MUL, "*" 8 | TOKEN_WHITESPACE, " " 9 | TOKEN_INTEGER, "3" 10 | TOKEN_WHITESPACE, " " 11 | TOKEN_ADD, "+" 12 | TOKEN_WHITESPACE, " " 13 | TOKEN_INTEGER, "4" 14 | TOKEN_WHITESPACE, " " 15 | TOKEN_DIV, "/" 16 | TOKEN_WHITESPACE, " " 17 | TOKEN_L_PAREN, "(" 18 | TOKEN_INTEGER, "5" 19 | TOKEN_WHITESPACE, " " 20 | TOKEN_SUB, "-" 21 | TOKEN_WHITESPACE, " " 22 | TOKEN_INTEGER, "6" 23 | TOKEN_R_PAREN, ")" 24 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/math.nix: -------------------------------------------------------------------------------- 1 | 1 + 2 * 3 + 4 / (5 - 6) 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/math_div_isnt_path.expect: -------------------------------------------------------------------------------- 1 | TOKEN_IDENT, "a" 2 | TOKEN_DIV, "/" 3 | TOKEN_WHITESPACE, " " 4 | TOKEN_INTEGER, "3" 5 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/math_div_isnt_path.nix: -------------------------------------------------------------------------------- 1 | a/ 3 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/math_no_ws.expect: -------------------------------------------------------------------------------- 1 | TOKEN_INTEGER, "5" 2 | TOKEN_MUL, "*" 3 | TOKEN_SUB, "-" 4 | TOKEN_L_PAREN, "(" 5 | TOKEN_INTEGER, "3" 6 | TOKEN_SUB, "-" 7 | TOKEN_INTEGER, "2" 8 | TOKEN_R_PAREN, ")" 9 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/math_no_ws.nix: -------------------------------------------------------------------------------- 1 | 5*-(3-2) 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/op_geq_isnt_path.expect: -------------------------------------------------------------------------------- 1 | TOKEN_IDENT, "x" 2 | TOKEN_MORE_OR_EQ, ">=" 3 | TOKEN_IDENT, "y" 4 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/op_geq_isnt_path.nix: -------------------------------------------------------------------------------- 1 | x>=y 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/op_update.expect: -------------------------------------------------------------------------------- 1 | TOKEN_IDENT, "a" 2 | TOKEN_UPDATE, "//" 3 | TOKEN_IDENT, "b" 4 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/op_update.nix: -------------------------------------------------------------------------------- 1 | a//b 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_absolute.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "/hello/world" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_absolute.nix: -------------------------------------------------------------------------------- 1 | /hello/world 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_home.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "~/hello/world" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_home.nix: -------------------------------------------------------------------------------- 1 | ~/hello/world 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "./" 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_IDENT, "foo" 4 | TOKEN_INTERPOL_END, "}" 5 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp.nix: -------------------------------------------------------------------------------- 1 | ./${foo} 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_apply.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "./" 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_IDENT, "foo" 4 | TOKEN_INTERPOL_END, "}" 5 | TOKEN_WHITESPACE, " " 6 | TOKEN_IDENT, "bar" 7 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_apply.nix: -------------------------------------------------------------------------------- 1 | ./${foo} bar 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_multiple.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "./" 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_IDENT, "foo" 4 | TOKEN_INTERPOL_END, "}" 5 | TOKEN_INTERPOL_START, "${" 6 | TOKEN_IDENT, "bar" 7 | TOKEN_INTERPOL_END, "}" 8 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_multiple.nix: -------------------------------------------------------------------------------- 1 | ./${foo}${bar} 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_multiple2.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "./" 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_IDENT, "foo" 4 | TOKEN_INTERPOL_END, "}" 5 | TOKEN_PATH, "a" 6 | TOKEN_INTERPOL_START, "${" 7 | TOKEN_IDENT, "bar" 8 | TOKEN_INTERPOL_END, "}" 9 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_multiple2.nix: -------------------------------------------------------------------------------- 1 | ./${foo}a${bar} 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_then_plain.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "./" 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_IDENT, "foo" 4 | TOKEN_INTERPOL_END, "}" 5 | TOKEN_PATH, ".jpg" 6 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_interp_then_plain.nix: -------------------------------------------------------------------------------- 1 | ./${foo}.jpg 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_isnt_math.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "a+3/5+b" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_isnt_math.nix: -------------------------------------------------------------------------------- 1 | a+3/5+b 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_no_newline.expect: -------------------------------------------------------------------------------- 1 | TOKEN_IDENT, "import" 2 | TOKEN_WHITESPACE, " " 3 | TOKEN_PATH, "./." 4 | TOKEN_WHITESPACE, " 5 | " 6 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_no_newline.nix: -------------------------------------------------------------------------------- 1 | import ./. 2 | 3 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_relative.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "hello/world" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_relative.nix: -------------------------------------------------------------------------------- 1 | hello/world 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_relative_prefix.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "./hello/world" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_relative_prefix.nix: -------------------------------------------------------------------------------- 1 | ./hello/world 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_store.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_store.nix: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_underscore.expect: -------------------------------------------------------------------------------- 1 | TOKEN_PATH, "hello_/world" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/path_underscore.nix: -------------------------------------------------------------------------------- 1 | hello_/world 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/patterns.expect: -------------------------------------------------------------------------------- 1 | TOKEN_L_BRACE, "{" 2 | TOKEN_WHITESPACE, " " 3 | TOKEN_IDENT, "a" 4 | TOKEN_COMMA, "," 5 | TOKEN_WHITESPACE, " " 6 | TOKEN_IDENT, "b" 7 | TOKEN_WHITESPACE, " " 8 | TOKEN_QUESTION, "?" 9 | TOKEN_WHITESPACE, " " 10 | TOKEN_STRING_START, """ 11 | TOKEN_STRING_CONTENT, "default" 12 | TOKEN_STRING_END, """ 13 | TOKEN_COMMA, "," 14 | TOKEN_WHITESPACE, " " 15 | TOKEN_ELLIPSIS, "..." 16 | TOKEN_WHITESPACE, " " 17 | TOKEN_R_BRACE, "}" 18 | TOKEN_WHITESPACE, " " 19 | TOKEN_AT, "@" 20 | TOKEN_WHITESPACE, " " 21 | TOKEN_IDENT, "outer" 22 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/patterns.nix: -------------------------------------------------------------------------------- 1 | { a, b ? "default", ... } @ outer 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/select_dynamic.expect: -------------------------------------------------------------------------------- 1 | TOKEN_IDENT, "a" 2 | TOKEN_DOT, "." 3 | TOKEN_INTERPOL_START, "${" 4 | TOKEN_IDENT, "b" 5 | TOKEN_INTERPOL_END, "}" 6 | TOKEN_DOT, "." 7 | TOKEN_IDENT, "c" 8 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/select_dynamic.nix: -------------------------------------------------------------------------------- 1 | a.${b}.c 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, """ 2 | TOKEN_STRING_CONTENT, "Hello world" 3 | TOKEN_STRING_END, """ 4 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string.nix: -------------------------------------------------------------------------------- 1 | "Hello world" -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_dollar_escape.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, """ 2 | TOKEN_STRING_CONTENT, "$${test}" 3 | TOKEN_STRING_END, """ 4 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_dollar_escape.nix: -------------------------------------------------------------------------------- 1 | "$${test}" -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_escape.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, """ 2 | TOKEN_STRING_CONTENT, "Hello \"World\"" 3 | TOKEN_STRING_END, """ 4 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_escape.nix: -------------------------------------------------------------------------------- 1 | "Hello \"World\"" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, """ 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_IDENT, "hello" 4 | TOKEN_INTERPOL_END, "}" 5 | TOKEN_STRING_CONTENT, " " 6 | TOKEN_INTERPOL_START, "${" 7 | TOKEN_IDENT, "world" 8 | TOKEN_INTERPOL_END, "}" 9 | TOKEN_STRING_END, """ 10 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp.nix: -------------------------------------------------------------------------------- 1 | "${hello} ${world}" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp_dollar_escape.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, """ 2 | TOKEN_STRING_CONTENT, "\$" 3 | TOKEN_INTERPOL_START, "${" 4 | TOKEN_IDENT, "test" 5 | TOKEN_INTERPOL_END, "}" 6 | TOKEN_STRING_END, """ 7 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp_dollar_escape.nix: -------------------------------------------------------------------------------- 1 | "\$${test}" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp_nested.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, "''" 2 | TOKEN_INTERPOL_START, "${" 3 | TOKEN_STRING_START, """ 4 | TOKEN_INTERPOL_START, "${" 5 | TOKEN_IDENT, "var" 6 | TOKEN_INTERPOL_END, "}" 7 | TOKEN_STRING_END, """ 8 | TOKEN_INTERPOL_END, "}" 9 | TOKEN_STRING_END, "''" 10 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp_nested.nix: -------------------------------------------------------------------------------- 1 | ''${"${var}"}'' -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp_select.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, """ 2 | TOKEN_STRING_CONTENT, "Hello, " 3 | TOKEN_INTERPOL_START, "${" 4 | TOKEN_WHITESPACE, " " 5 | TOKEN_L_BRACE, "{" 6 | TOKEN_WHITESPACE, " " 7 | TOKEN_IDENT, "world" 8 | TOKEN_WHITESPACE, " " 9 | TOKEN_ASSIGN, "=" 10 | TOKEN_WHITESPACE, " " 11 | TOKEN_STRING_START, """ 12 | TOKEN_STRING_CONTENT, "World" 13 | TOKEN_STRING_END, """ 14 | TOKEN_SEMICOLON, ";" 15 | TOKEN_WHITESPACE, " " 16 | TOKEN_R_BRACE, "}" 17 | TOKEN_DOT, "." 18 | TOKEN_IDENT, "world" 19 | TOKEN_WHITESPACE, " " 20 | TOKEN_INTERPOL_END, "}" 21 | TOKEN_STRING_CONTENT, "!" 22 | TOKEN_STRING_END, """ 23 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_interp_select.nix: -------------------------------------------------------------------------------- 1 | "Hello, ${ { world = "World"; }.world }!" -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_isnt_path.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, """ 2 | TOKEN_STRING_CONTENT, "./" 3 | TOKEN_INTERPOL_START, "${" 4 | TOKEN_IDENT, "foo" 5 | TOKEN_INTERPOL_END, "}" 6 | TOKEN_STRING_END, """ 7 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_isnt_path.nix: -------------------------------------------------------------------------------- 1 | "./${foo}" -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_multiline.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, "''" 2 | TOKEN_STRING_CONTENT, " 3 | This is a multiline string :D 4 | indented by two 5 | \'\'\'\'\ 6 | ''${ interpolation was escaped } 7 | ''\${ interpolation was escaped } 8 | two single quotes: ''' 9 | three single quotes: '''' 10 | " 11 | TOKEN_STRING_END, "''" 12 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_multiline.nix: -------------------------------------------------------------------------------- 1 | '' 2 | This is a multiline string :D 3 | indented by two 4 | \'\'\'\'\ 5 | ''${ interpolation was escaped } 6 | ''\${ interpolation was escaped } 7 | two single quotes: ''' 8 | three single quotes: '''' 9 | '' -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_multiline_dollar_escape.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, "''" 2 | TOKEN_STRING_CONTENT, "$${test}" 3 | TOKEN_STRING_END, "''" 4 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_multiline_dollar_escape.nix: -------------------------------------------------------------------------------- 1 | ''$${test}'' 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_multiline_interp_escape.expect: -------------------------------------------------------------------------------- 1 | TOKEN_STRING_START, "''" 2 | TOKEN_STRING_CONTENT, "''$" 3 | TOKEN_INTERPOL_START, "${" 4 | TOKEN_IDENT, "test" 5 | TOKEN_INTERPOL_END, "}" 6 | TOKEN_STRING_END, "''" 7 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/string_multiline_interp_escape.nix: -------------------------------------------------------------------------------- 1 | ''''$${test}'' 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/uri.expect: -------------------------------------------------------------------------------- 1 | TOKEN_URI, "https://google.com/?q=Hello+World" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/uri.nix: -------------------------------------------------------------------------------- 1 | https://google.com/?q=Hello+World 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/uri_with_underscore.expect: -------------------------------------------------------------------------------- 1 | TOKEN_URI, "https://goo_gle.com/?q=Hello+World" 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/uri_with_underscore.nix: -------------------------------------------------------------------------------- 1 | https://goo_gle.com/?q=Hello+World 2 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/with.expect: -------------------------------------------------------------------------------- 1 | TOKEN_WITH, "with" 2 | TOKEN_WHITESPACE, " " 3 | TOKEN_IDENT, "namespace" 4 | TOKEN_SEMICOLON, ";" 5 | TOKEN_WHITESPACE, " " 6 | TOKEN_IDENT, "expr" 7 | -------------------------------------------------------------------------------- /test_data/tokenizer/success/with.nix: -------------------------------------------------------------------------------- 1 | with namespace; expr 2 | --------------------------------------------------------------------------------