├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE ├── README.md ├── build.zig ├── build.zig.zon ├── deps.nix ├── editors ├── helix │ ├── README.md │ └── languages.toml ├── nvim │ └── README.md └── vscode │ ├── .gitignore │ ├── .vscode │ └── launch.json │ ├── .vscodeignore │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── language-configuration.json │ ├── package-lock.json │ ├── package.json │ ├── schema.language-configuration.json │ ├── src │ ├── extension.ts │ ├── formatter.ts │ └── util.ts │ ├── syntaxes │ ├── schema.tmLanguage.json │ └── ziggy.tmLanguage.json │ └── tsconfig.json ├── flake.lock ├── flake.nix ├── src ├── cli │ ├── check.zig │ ├── convert.zig │ ├── convert │ │ └── json.zig │ ├── fmt.zig │ ├── load_schema.zig │ ├── logging.zig │ ├── lsp.zig │ ├── lsp │ │ ├── Document.zig │ │ ├── Schema.zig │ │ └── logic.zig │ └── query.zig ├── main.zig ├── root.zig ├── schema │ ├── Ast.zig │ ├── Diagnostic.zig │ ├── Schema.zig │ ├── Tokenizer.zig │ └── check_type.zig └── ziggy │ ├── Ast.zig │ ├── Diagnostic.zig │ ├── Parser.zig │ ├── Query.zig │ ├── RecoverAst.zig │ ├── ResilientParser.zig │ ├── Tokenizer.zig │ ├── dynamic.zig │ └── serializer.zig ├── tests ├── schema │ └── errors │ │ └── .keep ├── type_driven.zig └── ziggy │ ├── ast │ └── errors │ │ ├── .keep │ │ ├── missing_bottom_curly.ziggy │ │ ├── missing_bottom_curly_snap.txt │ │ ├── missing_comma.ziggy │ │ ├── missing_comma_snap.txt │ │ ├── struct.ziggy │ │ └── struct_snap.txt │ └── type-driven │ └── errors │ ├── .keep │ ├── duplicate_field.zig │ ├── duplicate_field.ziggy │ ├── duplicate_field_snap.txt │ ├── missing_bottom_curly.zig │ ├── missing_bottom_curly.ziggy │ ├── missing_bottom_curly_snap.txt │ ├── missing_comma.zig │ ├── missing_comma.ziggy │ ├── missing_comma_snap.txt │ ├── missing_field.zig │ ├── missing_field.ziggy │ ├── missing_field_snap.txt │ ├── struct.zig │ ├── struct.ziggy │ ├── struct_snap.txt │ ├── unknown_field.zig │ ├── unknown_field.ziggy │ └── unknown_field_snap.txt ├── tree-sitter-ziggy-schema ├── Cargo.toml ├── LICENSE ├── binding.gyp ├── bindings │ ├── node │ │ ├── binding.cc │ │ └── index.js │ └── rust │ │ ├── build.rs │ │ └── lib.rs ├── grammar.js ├── package-lock.json ├── package.json ├── queries │ ├── contexts.scm │ ├── highlights.scm │ ├── indents.scm │ └── rainbows.scm └── src │ ├── grammar.json │ ├── node-types.json │ ├── parser.c │ ├── root.zig │ └── tree_sitter │ └── parser.h └── tree-sitter-ziggy ├── Cargo.toml ├── LICENSE ├── binding.gyp ├── bindings ├── node │ ├── binding.cc │ └── index.js └── rust │ ├── build.rs │ └── lib.rs ├── grammar.js ├── package-lock.json ├── package.json ├── queries ├── contexts.scm ├── highlights.scm ├── indents.scm └── rainbows.scm └── src ├── grammar.json ├── node-types.json ├── parser.c ├── root.zig └── tree_sitter └── parser.h /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: push 3 | jobs: 4 | deploy: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v4 8 | with: 9 | fetch-depth: 0 # Change if you need git info 10 | 11 | - name: Setup Zig 12 | uses: mlugg/setup-zig@v1 13 | with: 14 | version: 0.14.0 15 | 16 | - name: Build 17 | run: zig build test 18 | 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | .zig-cache/ 3 | zig-out/ 4 | release/ 5 | result 6 | Cargo.lock 7 | target/ 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Loris Cro 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Ziggy 2 | A data serialization language for expressing clear API messages, config files, etc. 3 | 4 | ## Status 5 | Alpha, using Ziggy now means participating in its development. 6 | 7 | ## At a glance 8 | 9 | ```zig 10 | .id = @uuid("..."), 11 | .time = 1710085168, 12 | .payload = Command { 13 | .do = @action("clear_chat"), 14 | .sender = "kristoff-it", 15 | .roles = ["admin", "mod"], 16 | .extra = { 17 | "agent": "Mozilla/5.0", 18 | "os": "Linux/x64", 19 | }, 20 | } 21 | ``` 22 | ## Value Types 23 | Ziggy values can be of the following types: 24 | 25 | - Bytes `"🧑‍🚀"`, `"\x1B[?1000h gang"`, `\\multiline` 26 | - Numbers `123_000`, `1.23`, `0xff_ff_ff`, `0o7_5_5`, `0b01_01_01` 27 | - Null `null` 28 | - Bool `true`, `false` 29 | - Custom Literals `@date("2020-12-01")`, `@v("1.0.0")`, `@foo("bar")` 30 | - Array `[1, 2, 3]` 31 | - Struct `{ .fixed = "schema" }`, `Named { .for = "unions of structs" }` 32 | - Map `{ "custom": "keys" }` 33 | 34 | 35 | ## Documentation 36 | 37 | See the official website: https://ziggy-lang.io 38 | 39 | ## Development 40 | 41 | In order to build with nix using the correct dependencies please keep 42 | updated the [deps.nix](./deps.nix) file every time the [build.zig.zon](build.zig.zon) 43 | is changed. In order to do so use 44 | 45 | ```bash 46 | nix run .#update-deps 47 | ``` 48 | -------------------------------------------------------------------------------- /build.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const Build = std.Build; 3 | 4 | /// The full Ziggy parsing functionality is available at build time. 5 | pub usingnamespace @import("src/root.zig"); 6 | 7 | pub fn build(b: *Build) !void { 8 | const target = b.standardTargetOptions(.{}); 9 | const optimize = b.standardOptimizeOption(.{}); 10 | 11 | const ziggy = b.addModule("ziggy", .{ 12 | .root_source_file = b.path("src/root.zig"), 13 | .target = target, 14 | .optimize = optimize, 15 | .strip = false, 16 | }); 17 | 18 | const cli = b.addExecutable(.{ 19 | .name = "ziggy", 20 | .root_source_file = b.path("src/main.zig"), 21 | .target = target, 22 | .optimize = optimize, 23 | }); 24 | 25 | const folders = b.dependency("known_folders", .{}).module("known-folders"); 26 | const lsp = b.dependency("lsp_kit", .{}).module("lsp"); 27 | 28 | cli.root_module.addImport("ziggy", ziggy); 29 | cli.root_module.addImport("known-folders", folders); 30 | cli.root_module.addImport("lsp", lsp); 31 | 32 | const run_exe = b.addRunArtifact(cli); 33 | if (b.args) |args| run_exe.addArgs(args); 34 | const run_exe_step = b.step("run", "Run the Ziggy tool"); 35 | run_exe_step.dependOn(&run_exe.step); 36 | 37 | b.installArtifact(cli); 38 | 39 | const ziggy_check = b.addExecutable(.{ 40 | .name = "ziggy_check", 41 | .root_source_file = b.path("src/main.zig"), 42 | .target = target, 43 | .optimize = optimize, 44 | }); 45 | 46 | ziggy_check.root_module.addImport("ziggy", ziggy); 47 | ziggy_check.root_module.addImport("known-folders", folders); 48 | ziggy_check.root_module.addImport("lsp", lsp); 49 | const check = b.step("check", "Check if the project compiles"); 50 | check.dependOn(&ziggy_check.step); 51 | 52 | try setupTests(b, target, optimize, ziggy, cli); 53 | try setupReleaseStep(b, ziggy, folders, lsp); 54 | } 55 | 56 | pub fn setupReleaseStep( 57 | b: *Build, 58 | ziggy: *Build.Module, 59 | folders: *Build.Module, 60 | lsp: *Build.Module, 61 | ) !void { 62 | const release_step = b.step("release", "Create releases for the Ziggy CLI tool"); 63 | 64 | const targets: []const std.Target.Query = &.{ 65 | .{ .cpu_arch = .aarch64, .os_tag = .macos }, 66 | .{ .cpu_arch = .aarch64, .os_tag = .linux }, 67 | .{ .cpu_arch = .x86_64, .os_tag = .macos }, 68 | .{ .cpu_arch = .x86_64, .os_tag = .linux, .abi = .musl }, 69 | .{ .cpu_arch = .x86_64, .os_tag = .windows }, 70 | .{ .cpu_arch = .aarch64, .os_tag = .windows }, 71 | }; 72 | 73 | for (targets) |t| { 74 | const release_target = b.resolveTargetQuery(t); 75 | 76 | const release_exe = b.addExecutable(.{ 77 | .name = "ziggy", 78 | .root_source_file = b.path("src/main.zig"), 79 | .target = release_target, 80 | .optimize = .ReleaseFast, 81 | }); 82 | 83 | release_exe.root_module.addImport("ziggy", ziggy); 84 | release_exe.root_module.addImport("known-folders", folders); 85 | release_exe.root_module.addImport("lsp", lsp); 86 | 87 | const target_output = b.addInstallArtifact(release_exe, .{ 88 | .dest_dir = .{ 89 | .override = .{ 90 | .custom = try t.zigTriple(b.allocator), 91 | }, 92 | }, 93 | }); 94 | 95 | release_step.dependOn(&target_output.step); 96 | } 97 | } 98 | 99 | pub fn setupTests( 100 | b: *Build, 101 | target: std.Build.ResolvedTarget, 102 | optimize: std.builtin.OptimizeMode, 103 | ziggy: *Build.Module, 104 | cli: *Build.Step.Compile, 105 | ) !void { 106 | const test_step = b.step("test", "Run unit & snapshot tests"); 107 | 108 | const unit_tests = b.addTest(.{ 109 | .root_source_file = b.path("src/root.zig"), 110 | .target = target, 111 | .optimize = optimize, 112 | .filters = b.option([]const []const u8, "test-filter", "test filter") orelse &.{}, 113 | }); 114 | 115 | const run_unit_tests = b.addRunArtifact(unit_tests); 116 | if (b.args) |args| run_unit_tests.addArgs(args); 117 | test_step.dependOn(&run_unit_tests.step); 118 | 119 | const diff = b.addSystemCommand(&.{ 120 | "git", 121 | "diff", 122 | "--cached", 123 | "--exit-code", 124 | }); 125 | diff.addDirectoryArg(b.path("tests/")); 126 | diff.setName("git diff tests/"); 127 | test_step.dependOn(&diff.step); 128 | 129 | // We need to stage all of tests/ in order for untracked files to show up in 130 | // the diff. It's also not a bad automatism since it avoids the problem of 131 | // forgetting to stage new snapshot files. 132 | const git_add = b.addSystemCommand(&.{ "git", "add" }); 133 | git_add.addDirectoryArg(b.path("tests/")); 134 | git_add.setName("git add tests/"); 135 | diff.step.dependOn(&git_add.step); 136 | 137 | b.build_root.handle.access("tests/ziggy", .{}) catch { 138 | const fail = b.addFail("snapshot test folder is missing, can't run tests (note: snapshot tests are not included in the ziggy manifest)"); 139 | git_add.step.dependOn(&fail.step); 140 | return; 141 | }; 142 | 143 | // errors - ast 144 | { 145 | const base_path = b.pathJoin(&.{ "tests", "ziggy", "ast", "errors" }); 146 | const tests_dir = try b.build_root.handle.openDir(base_path, .{ 147 | .iterate = true, 148 | }); 149 | 150 | var it = tests_dir.iterateAssumeFirstIteration(); 151 | while (try it.next()) |entry| { 152 | if (entry.kind == .directory) continue; 153 | if (entry.name[0] == '.') continue; 154 | const ext = std.fs.path.extension(entry.name); 155 | if (!std.mem.eql(u8, ext, ".ziggy")) continue; 156 | 157 | const run_cli = b.addRunArtifact(cli); 158 | run_cli.addArg("fmt"); 159 | run_cli.addArg(entry.name); 160 | run_cli.setCwd(b.path(base_path)); 161 | run_cli.expectExitCode(1); 162 | run_cli.has_side_effects = true; 163 | 164 | const out = run_cli.captureStdErr(); 165 | const snap_name = b.fmt("{s}_snap.txt", .{ 166 | std.fs.path.stem(entry.name), 167 | }); 168 | 169 | const update_snap = b.addUpdateSourceFiles(); 170 | update_snap.addCopyFileToSource(out, b.pathJoin(&.{ 171 | base_path, 172 | snap_name, 173 | })); 174 | 175 | git_add.step.dependOn(&update_snap.step); 176 | } 177 | } 178 | 179 | // errors - type driven 180 | { 181 | const base_path = b.pathJoin(&.{ "tests", "ziggy", "type-driven", "errors" }); 182 | const tests_dir = try b.build_root.handle.openDir(base_path, .{ 183 | .iterate = true, 184 | }); 185 | 186 | var it = tests_dir.iterateAssumeFirstIteration(); 187 | while (try it.next()) |entry| { 188 | if (entry.kind == .directory) continue; 189 | if (entry.name[0] == '.') continue; 190 | const ext = std.fs.path.extension(entry.name); 191 | if (!std.mem.eql(u8, ext, ".ziggy")) continue; 192 | 193 | const basename = std.fs.path.stem(entry.name); 194 | 195 | const test_program = b.addExecutable(.{ 196 | .name = b.fmt("{s}_test", .{basename}), 197 | .root_source_file = b.path("tests/type_driven.zig"), 198 | .target = target, 199 | .optimize = optimize, 200 | }); 201 | 202 | const type_module_name = b.fmt("{s}.zig", .{basename}); 203 | const type_module = b.createModule(.{ 204 | .root_source_file = b.path(b.pathJoin(&.{ 205 | base_path, 206 | type_module_name, 207 | })), 208 | }); 209 | test_program.root_module.addImport("test_type", type_module); 210 | test_program.root_module.addImport("ziggy", ziggy); 211 | 212 | const run_cli = b.addRunArtifact(test_program); 213 | run_cli.addFileArg(b.path(b.pathJoin(&.{ base_path, entry.name }))); 214 | run_cli.expectExitCode(1); 215 | 216 | const out = run_cli.captureStdErr(); 217 | const snap_name = b.fmt("{s}_snap.txt", .{ 218 | std.fs.path.stem(entry.name), 219 | }); 220 | 221 | const update_snap = b.addUpdateSourceFiles(); 222 | update_snap.addCopyFileToSource(out, b.pathJoin(&.{ 223 | base_path, 224 | snap_name, 225 | })); 226 | 227 | git_add.step.dependOn(&update_snap.step); 228 | } 229 | } 230 | } 231 | -------------------------------------------------------------------------------- /build.zig.zon: -------------------------------------------------------------------------------- 1 | .{ 2 | .name = .ziggy, 3 | .version = "0.1.0", 4 | .fingerprint = 0x7d935ea7bf3c3891, 5 | .minimum_zig_version = "0.14.0-dev.3451+d8d2aa9af", 6 | .dependencies = .{ 7 | .known_folders = .{ 8 | .url = "git+https://github.com/ziglibs/known-folders#aa24df42183ad415d10bc0a33e6238c437fc0f59", 9 | .hash = "known_folders-0.0.0-Fy-PJtLDAADGDOwYwMkVydMSTp_aN-nfjCZw6qPQ2ECL", 10 | }, 11 | .lsp_kit = .{ 12 | .url = "git+https://github.com/kristoff-it/zig-lsp-kit#87ff3d537a0c852442e180137d9557711963802c", 13 | .hash = "lsp_kit-0.1.0-hAAxO9S9AADv_5D0iplASFtNCFXAPk54M0u-3jj2MRFk", 14 | }, 15 | .yaml = .{ 16 | .url = "git+https://github.com/kubkon/zig-yaml#27f63d3d2d13ed228d8fc077635205e6c2a405c7", 17 | .hash = "zig_yaml-0.1.0-C1161hVrAgDsyB2EZnq-Vp-QuZ9xJm2y0dECRXGG3UaP", 18 | }, 19 | }, 20 | .paths = .{ 21 | "build.zig", 22 | "build.zig.zon", 23 | "src", 24 | "LICENSE", 25 | "README.md", 26 | }, 27 | } 28 | -------------------------------------------------------------------------------- /deps.nix: -------------------------------------------------------------------------------- 1 | # generated by zon2nix (https://github.com/nix-community/zon2nix) 2 | 3 | { linkFarm, fetchzip }: 4 | 5 | linkFarm "zig-packages" [ 6 | { 7 | name = "12204a4669fa6e8ebb1720e3581a24c1a7f538f2f4ee3ebc91a9e36285c89572d761"; 8 | path = fetchzip { 9 | url = "https://github.com/MFAshby/zig-lsp-kit/archive/1c07e3e3305f8dd6355735173321c344fc152d3e.tar.gz"; 10 | hash = "sha256-WBJ7hbc69W3mtzrMLwehcKccSbVe/8Dy9sX4IA4VbcY="; 11 | }; 12 | } 13 | { 14 | name = "1220841471bd4891cbb199d27cc5e7e0fb0a5b7c5388a70bd24fa3eb7285755c396c"; 15 | path = fetchzip { 16 | url = "https://github.com/kubkon/zig-yaml/archive/beddd5da24de91d430ca7028b00986f7745b13e9.tar.gz"; 17 | hash = "sha256-CJms2LjwoYNlbhapFYzvOImuaMH/zikllYeQ2/VlHi0="; 18 | }; 19 | } 20 | { 21 | name = "12209cde192558f8b3dc098ac2330fc2a14fdd211c5433afd33085af75caa9183147"; 22 | path = fetchzip { 23 | url = "https://github.com/ziglibs/known-folders/archive/0ad514dcfb7525e32ae349b9acc0a53976f3a9fa.tar.gz"; 24 | hash = "sha256-X+XkFj56MkYxxN9LUisjnkfCxUfnbkzBWHy9pwg5M+g="; 25 | }; 26 | } 27 | ] 28 | -------------------------------------------------------------------------------- /editors/helix/README.md: -------------------------------------------------------------------------------- 1 | # Ziggy support for Helix 2 | 3 | 1. In your Helix **runtime directory** (https://docs.helix-editor.com/install.html#configuring-helixs-runtime-files), copy the Tree Sitter queries from our parsers. 4 | From the root of this repository, run the following two commands after replacing `HELIX_RUNTIME_PATH`: 5 | - `cp -rT tree-sitter-ziggy/queries HELIX_RUNTIME_PATH/queries/ziggy` 6 | - `cp -rT tree-sitter-ziggy-schema/queries HELIX_RUNTIME_PATH/queries/ziggy_schema` 7 | 8 | NOTE: '-T' makes it so you can run the command multiple times without nesting new copies of `queries` more deeply than intended. Also macOS doesn't support it. 9 | 10 | 11 | 2. In your Helix **config directory** (usually `~/.config/helix/`create `languages.toml` and copy in the relevant sections from the `languages.toml` file present in this directory. 12 | 13 | 3. Run `hx --grammar build` 14 | 15 | 16 | -------------------------------------------------------------------------------- /editors/helix/languages.toml: -------------------------------------------------------------------------------- 1 | [language-server.ziggy-lsp] 2 | command = "ziggy" 3 | args = ["lsp"] 4 | 5 | [[language]] 6 | name = "ziggy" 7 | scope = "text.ziggy" 8 | roots = [] 9 | injection-regex = "ziggy|zgy" 10 | file-types = ["ziggy", "zgy"] 11 | comment-token = "//" 12 | auto-format = true 13 | formatter = { command = "ziggy" , args = ["fmt", "--stdin"] } 14 | language-servers = [ "ziggy-lsp" ] 15 | 16 | [[grammar]] 17 | name = "ziggy" 18 | source = { git = "https://github.com/kristoff-it/ziggy", rev = "0e46579ed878bb28a78cf624c2e593eb39301648", subpath = "tree-sitter-ziggy" } 19 | 20 | [[language]] 21 | name = "ziggy_schema" 22 | scope = "text.ziggy_schema" 23 | roots = [] 24 | injection-regex = "ziggy-schema|zgy-schema" 25 | file-types = ["ziggy-schema", "zgy-schema"] 26 | comment-token = "///" 27 | indent = { tab-width = 4, unit = " " } 28 | formatter = { command = "ziggy" , args = ["fmt", "-", "--type", "schema"] } 29 | auto-format = true 30 | language-servers = [ "ziggy-schema-lsp" ] 31 | 32 | [[grammar]] 33 | name = "ziggy_schema" 34 | source = { git = "https://github.com/kristoff-it/ziggy", rev = "0e46579ed878bb28a78cf624c2e593eb39301648", subpath = "tree-sitter-ziggy-schema" } 35 | 36 | [language-server.ziggy-schema-lsp] 37 | command = "ziggy" 38 | args = ["lsp", "--schema"] 39 | -------------------------------------------------------------------------------- /editors/nvim/README.md: -------------------------------------------------------------------------------- 1 | # Ziggy support for Neovim 2 | 3 | ## Tree Sitter grammar and queries 4 | 5 | ### 1. Add the following lines to your `nvim-treesitter` config 6 | 7 | The following lines should be pasted inside of your config file. 8 | 9 | ```lua 10 | local parser_config = require("nvim-treesitter.parsers").get_parser_configs() 11 | parser_config.ziggy = { 12 | install_info = { 13 | url = "https://github.com/kristoff-it/ziggy", -- local path or git repo 14 | includes = {"tree-sitter-ziggy/src"}, 15 | files = {"tree-sitter-ziggy/src/parser.c"}, -- note that some parsers also require src/scanner.c or src/scanner.cc 16 | -- optional entries: 17 | branch = "main", -- default branch in case of git repo if different from master 18 | generate_requires_npm = false, -- if stand-alone parser without npm dependencies 19 | requires_generate_from_grammar = false, -- if folder contains pre-generated src/parser.c 20 | }, 21 | } 22 | 23 | parser_config.ziggy_schema = { 24 | install_info = { 25 | url = "https://github.com/kristoff-it/ziggy", -- local path or git repo 26 | files = {"tree-sitter-ziggy-schema/src/parser.c"}, -- note that some parsers also require src/scanner.c or src/scanner.cc 27 | -- optional entries: 28 | branch = "main", -- default branch in case of git repo if different from master 29 | generate_requires_npm = false, -- if stand-alone parser without npm dependencies 30 | requires_generate_from_grammar = false, -- if folder contains pre-generated src/parser.c 31 | }, 32 | filetype = "ziggy-schema", 33 | } 34 | 35 | vim.filetype.add({ 36 | extension = { 37 | ziggy = 'ziggy', 38 | ["ziggy-schema"] = "ziggy_schema", 39 | } 40 | }) 41 | ``` 42 | 43 | ### 2. Copy the queries into your runtime path 44 | 45 | NOTE: '-T' makes it so you can run the command multiple times without nesting new copies of `queries` more deeply than intended. Also macOS doesn't support it. 46 | 47 | - `cp -rT tree-sitter-ziggy/queries NVIM_RUNTIME_PATH/queries/ziggy` 48 | - `cp -rT tree-sitter-ziggy-schema/queries NVIM_RUNTIME_PATH/queries/ziggy_schema` 49 | 50 | ### 3. Open Neovim and compile the grammars 51 | 52 | - `:TSInstall ziggy` 53 | - `:TSInstall ziggy_schema` 54 | 55 | ## Autoformatting 56 | By using the `ziggy` CLI tool. 57 | 58 | In your conform.nvim config add two new formatter definitions and map them 59 | to their corresponding filetype: 60 | ```lua 61 | formatters = { 62 | ziggy = { 63 | inherit = false, 64 | command = "ziggy", 65 | stdin = true, 66 | args = { 'fmt', '--stdin' }, 67 | }, 68 | ziggy_schema = { 69 | inherit = false, 70 | command = "ziggy", 71 | stdin = true, 72 | args = { 'fmt', '--stdin-schema' }, 73 | }, 74 | }, 75 | 76 | formatters_by_ft = { 77 | ziggy = { 'ziggy' }, 78 | ziggy_schema = { 'ziggy_schema' }, 79 | }, 80 | ``` 81 | 82 | ## LSP 83 | Add the following to your Neovim config: 84 | 85 | ```lua 86 | vim.api.nvim_create_autocmd("FileType", { 87 | group = vim.api.nvim_create_augroup("ziggy", {}), 88 | pattern = "ziggy", 89 | callback = function() 90 | vim.lsp.start({ 91 | name = "Ziggy LSP", 92 | cmd = { "ziggy", "lsp" }, 93 | root_dir = vim.loop.cwd(), 94 | flags = { exit_timeout = 1000 }, 95 | }) 96 | end, 97 | }) 98 | vim.api.nvim_create_autocmd("FileType", { 99 | group = vim.api.nvim_create_augroup("ziggy_schema", {}), 100 | pattern = "ziggy_schema", 101 | callback = function() 102 | vim.lsp.start({ 103 | name = "Ziggy LSP", 104 | cmd = { "ziggy", "lsp", "--schema" }, 105 | root_dir = vim.loop.cwd(), 106 | flags = { exit_timeout = 1000 }, 107 | }) 108 | end, 109 | }) 110 | ``` 111 | -------------------------------------------------------------------------------- /editors/vscode/.gitignore: -------------------------------------------------------------------------------- 1 | out/ 2 | node_modules 3 | .vscode-test/ 4 | *.vsix 5 | -------------------------------------------------------------------------------- /editors/vscode/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | // A launch configuration that launches the extension inside a new window 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | { 6 | "version": "0.2.0", 7 | "configurations": [ 8 | { 9 | "name": "Extension", 10 | "type": "extensionHost", 11 | "request": "launch", 12 | "args": [ 13 | "--extensionDevelopmentPath=${workspaceFolder}" 14 | ] 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /editors/vscode/.vscodeignore: -------------------------------------------------------------------------------- 1 | .vscode/** 2 | .vscode-test/** 3 | *.vsix 4 | src/** 5 | node_modules/** 6 | .gitignore 7 | -------------------------------------------------------------------------------- /editors/vscode/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to the "ziggy" extension will be documented in this file. 4 | 5 | Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file. 6 | 7 | ## [Unreleased] 8 | 9 | - Initial release -------------------------------------------------------------------------------- /editors/vscode/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Loris Cro 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /editors/vscode/README.md: -------------------------------------------------------------------------------- 1 | # Ziggy VSCode Extension 2 | 3 | This extension gives you syntax highlighting for Ziggy Document files and Ziggy Schema files, and sets up LSP support. 4 | 5 | **NOTE: you will need to get the `ziggy` CLI tool and have it available in your PATH!** -------------------------------------------------------------------------------- /editors/vscode/language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | "lineComment": "//", 4 | }, 5 | "brackets": [ 6 | ["{", "}"], 7 | ["[", "]"], 8 | ["(", ")"] 9 | ], 10 | "autoClosingPairs": [ 11 | ["{", "}"], 12 | ["[", "]"], 13 | ["(", ")"], 14 | ["\"", "\""], 15 | ["'", "'"] 16 | ], 17 | "surroundingPairs": [ 18 | ["{", "}"], 19 | ["[", "]"], 20 | ["(", ")"], 21 | ["\"", "\""], 22 | ["'", "'"] 23 | ] 24 | } -------------------------------------------------------------------------------- /editors/vscode/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ziggy", 3 | "displayName": "ziggy", 4 | "description": "Ziggy document file format", 5 | "repository": "https://github.com/kristoff-it/ziggy", 6 | "publisher": "LorisCro", 7 | "version": "0.0.3", 8 | "engines": { 9 | "vscode": "^1.87.0" 10 | }, 11 | "categories": [ 12 | "Programming Languages" 13 | ], 14 | "contributes": { 15 | "languages": [ 16 | { 17 | "id": "ziggy", 18 | "aliases": [ 19 | "Ziggy", 20 | "ziggy" 21 | ], 22 | "extensions": [ 23 | ".ziggy", 24 | ".zgy" 25 | ], 26 | "configuration": "./language-configuration.json" 27 | }, 28 | { 29 | "id": "ziggy_schema", 30 | "aliases": [ 31 | "Ziggy Schema", 32 | "ziggy schema", 33 | "ziggy_schema", 34 | "ziggy-schema", 35 | "ziggy-schema", 36 | "zschema" 37 | ], 38 | "extensions": [ 39 | ".ziggy-schema", 40 | ".zgy-schema" 41 | ], 42 | "configuration": "./schema.language-configuration.json" 43 | } 44 | ], 45 | "grammars": [ 46 | { 47 | "language": "ziggy", 48 | "scopeName": "text.ziggy", 49 | "path": "./syntaxes/ziggy.tmLanguage.json" 50 | }, 51 | { 52 | "language": "ziggy_schema", 53 | "scopeName": "text.ziggy_schema", 54 | "path": "./syntaxes/schema.tmLanguage.json" 55 | } 56 | ] 57 | }, 58 | "main": "./out/extension", 59 | 60 | "devDependencies": { 61 | "@types/mocha": "^2.2.48", 62 | "@types/node": "^18.0.0", 63 | "@types/vscode": "^1.68.0", 64 | "@types/which": "^2.0.1", 65 | "@typescript-eslint/eslint-plugin": "^6.7.0", 66 | "@typescript-eslint/parser": "^6.7.0", 67 | "eslint": "^8.49.0", 68 | "vscode-test": "^1.4.0" 69 | }, 70 | 71 | "dependencies": { 72 | "@vscode/vsce": "^2.24.0", 73 | "camelcase": "^7.0.1", 74 | "esbuild": "^0.12.1", 75 | "lodash-es": "^4.17.21", 76 | "lodash.debounce": "^4.0.8", 77 | "mkdirp": "^2.1.3", 78 | "vscode-languageclient": "^9.0.1", 79 | "which": "^3.0.0" 80 | }, 81 | "scripts": { 82 | "vscode:prepublish": "npm run compile", 83 | "compile": "esbuild --bundle --sourcemap=external --minify --external:vscode src/extension.ts --outdir=out --platform=node --format=cjs", 84 | "watch": "esbuild --watch --bundle --sourcemap=external --external:vscode src/extension.ts --outdir=out --platform=node --format=cjs", 85 | "test": "npm run compile && node ./node_modules/vscode/bin/test", 86 | "lint": "eslint . --ext .ts" 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /editors/vscode/schema.language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | "lineComment": "//", 4 | }, 5 | "brackets": [ 6 | ["{", "}"], 7 | ["[", "]"], 8 | ["(", ")"] 9 | ], 10 | "autoClosingPairs": [ 11 | ["{", "}"], 12 | ["[", "]"], 13 | ["(", ")"], 14 | ["\"", "\""], 15 | ["'", "'"] 16 | ], 17 | "surroundingPairs": [ 18 | ["{", "}"], 19 | ["[", "]"], 20 | ["(", ")"], 21 | ["\"", "\""], 22 | ["'", "'"] 23 | ] 24 | } -------------------------------------------------------------------------------- /editors/vscode/src/extension.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import { workspace, ExtensionContext, window, languages } from 'vscode'; 3 | import { ZiggyFormatProvider, ZiggyRangeFormatProvider } from './formatter'; 4 | 5 | import { 6 | LanguageClient, 7 | LanguageClientOptions, 8 | ServerOptions 9 | } from 'vscode-languageclient/node'; 10 | 11 | let client: LanguageClient; 12 | 13 | const logChannel = window.createOutputChannel("ziggy"); 14 | 15 | export function activate(context: ExtensionContext) { 16 | context.subscriptions.push( 17 | languages.registerDocumentFormattingEditProvider( 18 | [{ scheme: "file", language: "ziggy"}], 19 | new ZiggyFormatProvider(logChannel), 20 | ), 21 | ); 22 | context.subscriptions.push( 23 | languages.registerDocumentRangeFormattingEditProvider( 24 | [{ scheme: "file", language: "ziggy"}], 25 | new ZiggyRangeFormatProvider(logChannel), 26 | ), 27 | ); 28 | 29 | 30 | // If the extension is launched in debug mode then the debug server options are used 31 | // Otherwise the run options are used 32 | const serverOptions: ServerOptions = { 33 | run: { command: "ziggy", args: ["lsp"] }, 34 | debug: { command: "ziggy", args: ["lsp"] }, 35 | }; 36 | 37 | // Options to control the language client 38 | const clientOptions: LanguageClientOptions = { 39 | // Register the server for plain text documents 40 | documentSelector: [ 41 | { scheme: "file", language: 'ziggy' }, 42 | { scheme: "file", language: 'ziggy_schema' }, 43 | ], 44 | synchronize: { 45 | // Notify the server about file changes to '.clientrc files contained in the workspace 46 | fileEvents: workspace.createFileSystemWatcher('**/.zgy') 47 | } 48 | }; 49 | 50 | // Create the language client and start the client. 51 | const client = new LanguageClient( 52 | "ziggy", 53 | "Ziggy Language Server", 54 | serverOptions, 55 | clientOptions 56 | ); 57 | 58 | client.start().catch(reason => { 59 | window.showWarningMessage(`Failed to run Ziggy Language Server: ${reason}`); 60 | }).then(() => { 61 | client.getFeature("textDocument/formatting").clear(); 62 | }); 63 | } 64 | 65 | export function deactivate(): Thenable | undefined { 66 | if (!client) { 67 | return undefined; 68 | } 69 | return client.stop(); 70 | } -------------------------------------------------------------------------------- /editors/vscode/src/formatter.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from "vscode"; 2 | import { OutputChannel, TextEdit } from "vscode"; 3 | import { execCmd, getZiggyPath } from "./util"; 4 | 5 | export class ZiggyFormatProvider implements vscode.DocumentFormattingEditProvider { 6 | private _channel: OutputChannel; 7 | 8 | constructor(logChannel: OutputChannel) { 9 | this._channel = logChannel; 10 | } 11 | 12 | provideDocumentFormattingEdits( 13 | document: vscode.TextDocument, 14 | ): Thenable { 15 | const logger = this._channel; 16 | return ziggyFormat(document) 17 | .then(({ stdout }) => { 18 | logger.clear(); 19 | const lastLineId = document.lineCount - 1; 20 | const wholeDocument = new vscode.Range( 21 | 0, 22 | 0, 23 | lastLineId, 24 | document.lineAt(lastLineId).text.length, 25 | ); 26 | return [new TextEdit(wholeDocument, stdout),]; 27 | }) 28 | .catch((reason) => { 29 | const config = vscode.workspace.getConfiguration("zig"); 30 | 31 | logger.clear(); 32 | logger.appendLine(reason.toString().replace("", document.fileName)); 33 | if (config.get("revealOutputChannelOnFormattingError")) { 34 | logger.show(true); 35 | } 36 | return null; 37 | }); 38 | } 39 | } 40 | 41 | // Same as full document formatter for now 42 | export class ZiggyRangeFormatProvider implements vscode.DocumentRangeFormattingEditProvider { 43 | private _channel: OutputChannel; 44 | constructor(logChannel: OutputChannel) { 45 | this._channel = logChannel; 46 | } 47 | 48 | provideDocumentRangeFormattingEdits( 49 | document: vscode.TextDocument, 50 | ): Thenable { 51 | const logger = this._channel; 52 | return ziggyFormat(document) 53 | .then(({ stdout }) => { 54 | logger.clear(); 55 | const lastLineId = document.lineCount - 1; 56 | const wholeDocument = new vscode.Range( 57 | 0, 58 | 0, 59 | lastLineId, 60 | document.lineAt(lastLineId).text.length, 61 | ); 62 | return [new TextEdit(wholeDocument, stdout),]; 63 | }) 64 | .catch((reason) => { 65 | const config = vscode.workspace.getConfiguration("zig"); 66 | 67 | logger.clear(); 68 | logger.appendLine(reason.toString().replace("", document.fileName)); 69 | if (config.get("revealOutputChannelOnFormattingError")) { 70 | logger.show(true); 71 | } 72 | return null; 73 | }); 74 | } 75 | } 76 | 77 | function ziggyFormat(document: vscode.TextDocument) { 78 | const ziggyPath = getZiggyPath(); 79 | 80 | const options = { 81 | cmdArguments: ["fmt", "--stdin"], 82 | notFoundText: "Could not find ziggy. Please add ziggy to your PATH or specify a custom path to the ziggy binary in your settings.", 83 | }; 84 | const format = execCmd(ziggyPath, options); 85 | 86 | format.stdin.write(document.getText()); 87 | format.stdin.end(); 88 | 89 | return format; 90 | } -------------------------------------------------------------------------------- /editors/vscode/src/util.ts: -------------------------------------------------------------------------------- 1 | import * as cp from "child_process"; 2 | import * as fs from "fs"; 3 | import * as os from "os"; 4 | import * as path from "path"; 5 | import { window, workspace } from "vscode"; 6 | import which from "which"; 7 | 8 | export const isWindows = process.platform === "win32"; 9 | 10 | /** Options for execCmd */ 11 | export interface ExecCmdOptions { 12 | /** The project root folder for this file is used as the cwd of the process */ 13 | fileName?: string; 14 | /** Any arguments */ 15 | cmdArguments?: string[]; 16 | /** Shows a message if an error occurs (in particular the command not being */ 17 | /* found), instead of rejecting. If this happens, the promise never resolves */ 18 | showMessageOnError?: boolean; 19 | /** Called after the process successfully starts */ 20 | onStart?: () => void; 21 | /** Called when data is sent to stdout */ 22 | onStdout?: (data: string) => void; 23 | /** Called when data is sent to stderr */ 24 | onStderr?: (data: string) => void; 25 | /** Called after the command (successfully or unsuccessfully) exits */ 26 | onExit?: () => void; 27 | /** Text to add when command is not found (maybe helping how to install) */ 28 | notFoundText?: string; 29 | } 30 | 31 | /** Type returned from execCmd. Is a promise for when the command completes 32 | * and also a wrapper to access ChildProcess-like methods. 33 | */ 34 | export interface ExecutingCmd 35 | extends Promise<{ stdout: string; stderr: string }> { 36 | /** The process's stdin */ 37 | stdin: NodeJS.WritableStream; 38 | /** End the process */ 39 | kill(); 40 | /** Is the process running */ 41 | isRunning: boolean; // tslint:disable-line 42 | } 43 | 44 | /** Executes a command. Shows an error message if the command isn't found */ 45 | export function execCmd 46 | (cmd: string, options: ExecCmdOptions = {}): ExecutingCmd { 47 | 48 | const { fileName, onStart, onStdout, onStderr, onExit } = options; 49 | let childProcess, firstResponse = true, wasKilledbyUs = false; 50 | 51 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 52 | const executingCmd: any = new Promise((resolve, reject) => { 53 | const cmdArguments = options ? options.cmdArguments : []; 54 | 55 | childProcess = 56 | cp.execFile(cmd, cmdArguments, { cwd: detectProjectRoot(fileName || workspace.rootPath + "/fakeFileName"), maxBuffer: 10 * 1024 * 1024 }, handleExit); 57 | 58 | 59 | childProcess.stdout.on("data", (data: Buffer) => { 60 | if (firstResponse && onStart) { 61 | onStart(); 62 | } 63 | firstResponse = false; 64 | if (onStdout) { 65 | onStdout(data.toString()); 66 | } 67 | }); 68 | 69 | childProcess.stderr.on("data", (data: Buffer) => { 70 | if (firstResponse && onStart) { 71 | onStart(); 72 | } 73 | firstResponse = false; 74 | if (onStderr) { 75 | onStderr(data.toString()); 76 | } 77 | }); 78 | 79 | function handleExit(err: Error, stdout: string, stderr: string) { 80 | executingCmd.isRunning = false; 81 | if (onExit) { 82 | onExit(); 83 | } 84 | if (!wasKilledbyUs) { 85 | if (err) { 86 | if (options.showMessageOnError) { 87 | const cmdName = cmd.split(" ", 1)[0]; 88 | const cmdWasNotFound = 89 | // Windows method apparently still works on non-English systems 90 | (isWindows && 91 | err.message.includes(`'${cmdName}' is not recognized`)) || 92 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 93 | (!isWindows && (err).code === 127); 94 | 95 | if (cmdWasNotFound) { 96 | const notFoundText = options ? options.notFoundText : ""; 97 | window.showErrorMessage( 98 | `${cmdName} is not available in your path. ` + notFoundText, 99 | ); 100 | } else { 101 | window.showErrorMessage(err.message); 102 | } 103 | } else { 104 | reject(err); 105 | } 106 | } else { 107 | resolve({ stdout: stdout, stderr: stderr }); 108 | } 109 | } 110 | } 111 | }); 112 | executingCmd.stdin = childProcess.stdin; 113 | executingCmd.kill = killProcess; 114 | executingCmd.isRunning = true; 115 | 116 | return executingCmd as ExecutingCmd; 117 | 118 | function killProcess() { 119 | wasKilledbyUs = true; 120 | if (isWindows) { 121 | cp.spawn("taskkill", ["/pid", childProcess.pid.toString(), "/f", "/t"]); 122 | } else { 123 | childProcess.kill("SIGINT"); 124 | } 125 | } 126 | } 127 | 128 | const buildFile = "build.zig"; 129 | 130 | export function findProj(dir: string, parent: string): string { 131 | if (dir === "" || dir === parent) { 132 | return ""; 133 | } 134 | if (fs.lstatSync(dir).isDirectory()) { 135 | const build = path.join(dir, buildFile); 136 | if (fs.existsSync(build)) { 137 | return dir; 138 | } 139 | } 140 | return findProj(path.dirname(dir), dir); 141 | } 142 | 143 | export function detectProjectRoot(fileName: string): string { 144 | const proj = findProj(path.dirname(fileName), ""); 145 | if (proj !== "") { 146 | return proj; 147 | } 148 | return undefined; 149 | } 150 | 151 | export function getExePath(exePath: string | null, exeName: string, optionName: string): string { 152 | // Allow passing the ${workspaceFolder} predefined variable 153 | // See https://code.visualstudio.com/docs/editor/variables-reference#_predefined-variables 154 | if (exePath && exePath.includes("${workspaceFolder}")) { 155 | // We choose the first workspaceFolder since it is ambiguous which one to use in this context 156 | if (workspace.workspaceFolders && workspace.workspaceFolders.length > 0) { 157 | // older versions of Node (which VSCode uses) may not have String.prototype.replaceAll 158 | exePath = exePath.replace(/\$\{workspaceFolder\}/gm, workspace.workspaceFolders[0].uri.fsPath); 159 | } 160 | } 161 | 162 | if (!exePath) { 163 | exePath = which.sync(exeName, { nothrow: true }); 164 | } else if (exePath.startsWith("~")) { 165 | exePath = path.join(os.homedir(), exePath.substring(1)); 166 | } else if (!path.isAbsolute(exePath)) { 167 | exePath = which.sync(exePath, { nothrow: true }); 168 | } 169 | 170 | let message; 171 | if (!exePath) { 172 | message = `Could not find ${exeName} in PATH`; 173 | } else if (!fs.existsSync(exePath)) { 174 | message = `\`${optionName}\` ${exePath} does not exist` 175 | } else { 176 | try { 177 | fs.accessSync(exePath, fs.constants.R_OK | fs.constants.X_OK); 178 | return exePath; 179 | } catch { 180 | message = `\`${optionName}\` ${exePath} is not an executable`; 181 | } 182 | } 183 | window.showErrorMessage(message); 184 | throw Error(message); 185 | } 186 | 187 | export function getZiggyPath(): string { 188 | const configuration = workspace.getConfiguration("ziggy"); 189 | const ziggyPath = configuration.get("path"); 190 | return getExePath(ziggyPath, "ziggy", "ziggy.path"); 191 | } 192 | 193 | -------------------------------------------------------------------------------- /editors/vscode/syntaxes/schema.tmLanguage.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", 3 | "name": "Ziggy Schema", 4 | "patterns": [ 5 | { 6 | "include": "#keywords" 7 | }, 8 | { 9 | "include": "#operators" 10 | }, 11 | { 12 | "include": "#builtins" 13 | }, 14 | { 15 | "include": "#tag_names" 16 | }, 17 | { 18 | "include": "#comments" 19 | }, 20 | { 21 | "include": "#variables" 22 | } 23 | ], 24 | "repository": { 25 | "variables": { 26 | "patterns": [ 27 | { 28 | "name": "meta.variable.ziggy_schema", 29 | "patterns": [ 30 | { 31 | "match": "\\b[_a-zA-Z][_a-zA-Z0-9]*\\b", 32 | "name": "variable.ziggy_schema" 33 | }, 34 | { 35 | "begin": "@\"", 36 | "end": "\"", 37 | "name": "variable.string.ziggy_schema", 38 | "patterns": [ 39 | { 40 | "include": "#stringcontent" 41 | } 42 | ] 43 | } 44 | ] 45 | } 46 | ] 47 | }, 48 | "stringcontent": { 49 | "patterns": [ 50 | { 51 | "name": "constant.character.escape.ziggy_schema", 52 | "match": "\\\\([nrt'\"\\\\]|(x[0-9a-fA-F]{2})|(u\\{[0-9a-fA-F]+\\}))" 53 | }, 54 | { 55 | "name": "invalid.illegal.unrecognized-string-escape.ziggy_schema", 56 | "match": "\\\\." 57 | } 58 | ] 59 | }, 60 | "keywords": { 61 | "patterns": [ 62 | { 63 | "name": "keyword.control.ziggy_schema", 64 | "match": "\\b(root|struct|map|any)\\b" 65 | } 66 | ] 67 | }, 68 | "builtins": { 69 | "patterns": [ 70 | { 71 | "name": "support.type.ziggy_schema", 72 | "match": "\\b(bytes|int|bool|float)\\b" 73 | } 74 | ] 75 | }, 76 | "tag_names": { 77 | "comment": "Built-in functions", 78 | "name": "support.function.builtin.ziggy_schema", 79 | "match": "@[_a-zA-Z][_a-zA-Z0-9]*" 80 | }, 81 | "operators": { 82 | "patterns": [ 83 | { 84 | "name": "keyword.control.ziggy_schema", 85 | "match": "\\?" 86 | } 87 | ] 88 | }, 89 | "comments": { 90 | "patterns": [ 91 | { 92 | "name": "comment.line.documentation.ziggy_schema", 93 | "begin": "//[!/](?=[^/])", 94 | "end": "$", 95 | "patterns": [ 96 | { 97 | "include": "#commentContents" 98 | } 99 | ] 100 | } 101 | ] 102 | }, 103 | "commentContents": { 104 | "patterns": [ 105 | { 106 | "match": "\\b(NOTE)\\b:?", 107 | "name": "keyword.todo.ziggy_schema" 108 | } 109 | ] 110 | } 111 | }, 112 | "scopeName": "text.ziggy_schema" 113 | } -------------------------------------------------------------------------------- /editors/vscode/syntaxes/ziggy.tmLanguage.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", 3 | "name": "Ziggy", 4 | "patterns": [ 5 | { 6 | "include": "#strings" 7 | }, 8 | { 9 | "include": "#tag_names" 10 | }, 11 | { 12 | "include": "#builtins" 13 | }, 14 | { 15 | "include": "#comments" 16 | }, 17 | { 18 | "include": "#variables" 19 | } 20 | ], 21 | "repository": { 22 | "tag_names": { 23 | "comment": "Built-in functions", 24 | "name": "support.function.builtin.ziggy", 25 | "match": "@[_a-zA-Z][_a-zA-Z0-9]*" 26 | }, 27 | "builtins": { 28 | "patterns": [ 29 | { 30 | "name": "keyword.constant.default.zig", 31 | "match": "\\b(null)\\b" 32 | }, 33 | { 34 | "name": "keyword.constant.bool.zig", 35 | "match": "\\b(true|false)\\b" 36 | } 37 | ] 38 | }, 39 | "comments": { 40 | "patterns": [ 41 | { 42 | "name": "comment.line.double-slash.ziggy", 43 | "begin": "//", 44 | "end": "$", 45 | "patterns": [ 46 | { 47 | "include": "#commentContents" 48 | } 49 | ] 50 | } 51 | ] 52 | }, 53 | "commentContents": { 54 | "patterns": [ 55 | { 56 | "match": "\\b(TODO|FIXME|XXX|NOTE)\\b:?", 57 | "name": "keyword.todo.ziggy" 58 | }, 59 | { 60 | "match": "\\b(ziggy-schema)\\b:?", 61 | "name": "keyword.ziggy" 62 | } 63 | ] 64 | }, 65 | "variables": { 66 | "patterns": [ 67 | { 68 | "name": "meta.variable.ziggy", 69 | "patterns": [ 70 | { 71 | "match": "\\b[_a-zA-Z][_a-zA-Z0-9]*\\b", 72 | "name": "variable.ziggy" 73 | }, 74 | { 75 | "begin": "@\"", 76 | "end": "\"", 77 | "name": "variable.string.ziggy", 78 | "patterns": [ 79 | { 80 | "include": "#stringcontent" 81 | } 82 | ] 83 | } 84 | ] 85 | } 86 | ] 87 | }, 88 | "strings": { 89 | "patterns": [ 90 | { 91 | "name": "string.quoted.double.ziggy", 92 | "begin": "\"", 93 | "end": "\"", 94 | "patterns": [ 95 | { 96 | "include": "#stringcontent" 97 | } 98 | ] 99 | }, 100 | { 101 | "name": "string.multiline.ziggy", 102 | "begin": "\\\\\\\\", 103 | "end": "$" 104 | }, 105 | { 106 | "name": "string.quoted.single.ziggy", 107 | "match": "'([^'\\\\]|\\\\(x\\h{2}|[0-2][0-7]{,2}|3[0-6][0-7]?|37[0-7]?|[4-7][0-7]?|.))'" 108 | } 109 | ] 110 | }, 111 | "stringcontent": { 112 | "patterns": [ 113 | { 114 | "name": "constant.character.escape.ziggy", 115 | "match": "\\\\([nrt'\"\\\\]|(x[0-9a-fA-F]{2})|(u\\{[0-9a-fA-F]+\\}))" 116 | }, 117 | { 118 | "name": "invalid.illegal.unrecognized-string-escape.ziggy", 119 | "match": "\\\\." 120 | } 121 | ] 122 | }, 123 | "numbers": { 124 | "patterns": [ 125 | { 126 | "name": "constant.numeric.hexfloat.ziggy", 127 | "match": "\\b0x[0-9a-fA-F][0-9a-fA-F_]*(\\.[0-9a-fA-F][0-9a-fA-F_]*)?([pP][+-]?[0-9a-fA-F_]+)?\\b" 128 | }, 129 | { 130 | "name": "constant.numeric.float.ziggy", 131 | "match": "\\b[0-9][0-9_]*(\\.[0-9][0-9_]*)?([eE][+-]?[0-9_]+)?\\b" 132 | }, 133 | { 134 | "name": "constant.numeric.decimal.ziggy", 135 | "match": "\\b[0-9][0-9_]*\\b" 136 | }, 137 | { 138 | "name": "constant.numeric.hexadecimal.ziggy", 139 | "match": "\\b0x[a-fA-F0-9_]+\\b" 140 | }, 141 | { 142 | "name": "constant.numeric.octal.ziggy", 143 | "match": "\\b0o[0-7_]+\\b" 144 | }, 145 | { 146 | "name": "constant.numeric.binary.ziggy", 147 | "match": "\\b0b[01_]+\\b" 148 | }, 149 | { 150 | "name": "constant.numeric.invalid.ziggy", 151 | "match": "\\b[0-9](([eEpP][+-])|[0-9a-zA-Z_])*(\\.(([eEpP][+-])|[0-9a-zA-Z_])*)?([eEpP][+-])?[0-9a-zA-Z_]*\\b" 152 | } 153 | ] 154 | } 155 | }, 156 | "scopeName": "text.ziggy" 157 | } -------------------------------------------------------------------------------- /editors/vscode/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "ESNext", 5 | "outDir": "out", 6 | "esModuleInterop": true, 7 | "lib": ["esnext"], 8 | "sourceMap": true, 9 | "rootDir": "src" 10 | }, 11 | "exclude": ["node_modules", ".vscode-test"] 12 | } -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-parts": { 4 | "inputs": { 5 | "nixpkgs-lib": "nixpkgs-lib" 6 | }, 7 | "locked": { 8 | "lastModified": 1719994518, 9 | "narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=", 10 | "owner": "hercules-ci", 11 | "repo": "flake-parts", 12 | "rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7", 13 | "type": "github" 14 | }, 15 | "original": { 16 | "owner": "hercules-ci", 17 | "repo": "flake-parts", 18 | "type": "github" 19 | } 20 | }, 21 | "nixpkgs": { 22 | "locked": { 23 | "lastModified": 1721924956, 24 | "narHash": "sha256-Sb1jlyRO+N8jBXEX9Pg9Z1Qb8Bw9QyOgLDNMEpmjZ2M=", 25 | "owner": "NixOS", 26 | "repo": "nixpkgs", 27 | "rev": "5ad6a14c6bf098e98800b091668718c336effc95", 28 | "type": "github" 29 | }, 30 | "original": { 31 | "owner": "NixOS", 32 | "ref": "nixos-unstable", 33 | "repo": "nixpkgs", 34 | "type": "github" 35 | } 36 | }, 37 | "nixpkgs-lib": { 38 | "locked": { 39 | "lastModified": 1719876945, 40 | "narHash": "sha256-Fm2rDDs86sHy0/1jxTOKB1118Q0O3Uc7EC0iXvXKpbI=", 41 | "type": "tarball", 42 | "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" 43 | }, 44 | "original": { 45 | "type": "tarball", 46 | "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" 47 | } 48 | }, 49 | "root": { 50 | "inputs": { 51 | "flake-parts": "flake-parts", 52 | "nixpkgs": "nixpkgs" 53 | } 54 | } 55 | }, 56 | "root": "root", 57 | "version": 7 58 | } 59 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Description for the project"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; 6 | flake-parts.url = "github:hercules-ci/flake-parts"; 7 | }; 8 | 9 | outputs = inputs@{ flake-parts, ... }: 10 | flake-parts.lib.mkFlake { inherit inputs; } { 11 | systems = 12 | [ "x86_64-linux" "aarch64-linux" "aarch64-darwin" "x86_64-darwin" ]; 13 | perSystem = { pkgs, lib, config, ... }: { 14 | packages = { 15 | ziggy = pkgs.stdenv.mkDerivation { 16 | name = "ziggy"; 17 | version = "0.0.0"; 18 | src = ./.; 19 | postPatch = '' 20 | ln -s ${pkgs.callPackage ./deps.nix { }} $ZIG_GLOBAL_CACHE_DIR/p 21 | ''; 22 | nativeBuildInputs = [ pkgs.zig.hook ]; 23 | }; 24 | default = config.packages.ziggy; 25 | update-deps = pkgs.writeShellApplication { 26 | name = "update-deps"; 27 | text = "${lib.getExe pkgs.zon2nix} > deps.nix"; 28 | }; 29 | }; 30 | devShells.default = pkgs.mkShell { 31 | buildInputs = [ config.packages.default.nativeBuildInputs ]; 32 | }; 33 | }; 34 | }; 35 | } 36 | -------------------------------------------------------------------------------- /src/cli/check.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const ziggy = @import("ziggy"); 3 | const Diagnostic = ziggy.Diagnostic; 4 | const Ast = ziggy.Ast; 5 | 6 | pub fn run(gpa: std.mem.Allocator, args: []const []const u8) !void { 7 | const cmd = Command.parse(args); 8 | 9 | const schema_file = std.fs.cwd().readFileAllocOptions( 10 | gpa, 11 | cmd.schema_path, 12 | ziggy.max_size, 13 | null, 14 | 1, 15 | 0, 16 | ) catch |err| { 17 | std.debug.print("Error reading '{s}': {s}\n", .{ 18 | cmd.schema_path, 19 | @errorName(err), 20 | }); 21 | std.process.exit(1); 22 | }; 23 | 24 | var schema_diag: ziggy.schema.Diagnostic = .{ .path = cmd.schema_path }; 25 | const schema_ast = ziggy.schema.Ast.init( 26 | gpa, 27 | schema_file, 28 | &schema_diag, 29 | ) catch fatalDiag(schema_diag); 30 | const schema = ziggy.schema.Schema.init( 31 | gpa, 32 | schema_ast.nodes.items, 33 | schema_file, 34 | &schema_diag, 35 | ) catch fatalDiag(schema_diag); 36 | 37 | // checkFile will reset the arena at the end of the call 38 | var arena_impl = std.heap.ArenaAllocator.init(gpa); 39 | for (cmd.doc_paths) |path| { 40 | checkFile(&arena_impl, std.fs.cwd(), path, schema) catch |err| switch (err) { 41 | error.IsDir, error.AccessDenied => { 42 | checkDir(gpa, &arena_impl, path, schema) catch |dir_err| { 43 | std.debug.print("Error walking dir '{s}': {s}\n", .{ 44 | path, 45 | @errorName(dir_err), 46 | }); 47 | }; 48 | }, 49 | else => { 50 | std.debug.print("Error while accessing '{s}': {s}\n", .{ 51 | path, @errorName(err), 52 | }); 53 | }, 54 | }; 55 | } 56 | } 57 | 58 | fn checkDir( 59 | gpa: std.mem.Allocator, 60 | arena_impl: *std.heap.ArenaAllocator, 61 | path: []const u8, 62 | schema: ziggy.schema.Schema, 63 | ) !void { 64 | var dir = try std.fs.cwd().openDir(path, .{ .iterate = true }); 65 | defer dir.close(); 66 | var walker = dir.walk(gpa) catch oom(); 67 | defer walker.deinit(); 68 | while (try walker.next()) |item| { 69 | switch (item.kind) { 70 | .file => { 71 | if (std.mem.endsWith(u8, item.basename, ".ziggy")) { 72 | try checkFile(arena_impl, item.dir, item.basename, schema); 73 | } 74 | }, 75 | else => {}, 76 | } 77 | } 78 | } 79 | 80 | fn checkFile( 81 | arena_impl: *std.heap.ArenaAllocator, 82 | base_dir: std.fs.Dir, 83 | sub_path: []const u8, 84 | schema: ziggy.schema.Schema, 85 | ) !void { 86 | defer _ = arena_impl.reset(.retain_capacity); 87 | const arena = arena_impl.allocator(); 88 | 89 | const doc_file = try base_dir.readFileAllocOptions( 90 | arena, 91 | sub_path, 92 | ziggy.max_size, 93 | null, 94 | 1, 95 | 0, 96 | ); 97 | var diag: ziggy.Diagnostic = .{ .path = sub_path }; 98 | const doc_ast = ziggy.Ast.init( 99 | arena, 100 | doc_file, 101 | true, 102 | true, 103 | false, 104 | &diag, 105 | ) catch fatalDiag(diag); 106 | 107 | doc_ast.check(arena, schema, &diag) catch fatalDiag(diag); 108 | std.debug.print("{}\n", .{diag}); 109 | } 110 | 111 | fn fatalDiag(diag: anytype) noreturn { 112 | std.debug.print("{}\n", .{diag}); 113 | std.process.exit(1); 114 | } 115 | 116 | fn oom() noreturn { 117 | std.debug.print("Out of memory\n", .{}); 118 | std.process.exit(1); 119 | } 120 | 121 | pub const Command = struct { 122 | schema_path: []const u8, 123 | doc_paths: []const []const u8, 124 | 125 | fn parse(args: []const []const u8) Command { 126 | var idx: usize = 0; 127 | while (idx < args.len) : (idx += 1) { 128 | const arg = args[idx]; 129 | if (std.mem.eql(u8, arg, "--help") or 130 | std.mem.eql(u8, arg, "-h")) 131 | { 132 | fatalHelp(); 133 | } 134 | } 135 | 136 | if (args.len < 2) { 137 | std.debug.print("missing argument(s)\n\n", .{}); 138 | fatalHelp(); 139 | } 140 | 141 | const cmd: Command = .{ 142 | .schema_path = args[0], 143 | .doc_paths = args[1..], 144 | }; 145 | 146 | return cmd; 147 | } 148 | 149 | fn fatalHelp() noreturn { 150 | std.debug.print( 151 | \\Usage: ziggy check SCHEMA DOC [DOC...] [OPTIONS] 152 | \\ 153 | \\ Checks input paths against a Ziggy Schema. 154 | \\ If DOC is a directory, it will be searched 155 | \\ recursively for Ziggy files. 156 | \\ 157 | \\Options: 158 | \\ 159 | \\--help, -h Print this help and exit. 160 | , .{}); 161 | 162 | std.process.exit(1); 163 | } 164 | }; 165 | -------------------------------------------------------------------------------- /src/cli/fmt.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const ziggy = @import("ziggy"); 3 | const loadSchema = @import("load_schema.zig").loadSchema; 4 | const Diagnostic = ziggy.Diagnostic; 5 | const Ast = ziggy.Ast; 6 | 7 | const FileType = enum { ziggy, ziggy_schema }; 8 | 9 | pub fn run(gpa: std.mem.Allocator, args: []const []const u8) !void { 10 | const cmd = Command.parse(args); 11 | const schema = loadSchema(gpa, cmd.schema); 12 | var any_error = false; 13 | switch (cmd.mode) { 14 | .stdin => { 15 | var buf = std.ArrayList(u8).init(gpa); 16 | try std.io.getStdIn().reader().readAllArrayList(&buf, ziggy.max_size); 17 | const in_bytes = try buf.toOwnedSliceSentinel(0); 18 | 19 | const out_bytes = try fmtZiggy(gpa, null, in_bytes, schema); 20 | try std.io.getStdOut().writeAll(out_bytes); 21 | }, 22 | .stdin_schema => { 23 | var buf = std.ArrayList(u8).init(gpa); 24 | try std.io.getStdIn().reader().readAllArrayList(&buf, ziggy.max_size); 25 | const in_bytes = try buf.toOwnedSliceSentinel(0); 26 | 27 | const out_bytes = try fmtSchema(gpa, null, in_bytes); 28 | try std.io.getStdOut().writeAll(out_bytes); 29 | }, 30 | .paths => |paths| { 31 | // checkFile will reset the arena at the end of each call 32 | var arena_impl = std.heap.ArenaAllocator.init(gpa); 33 | for (paths) |path| { 34 | formatFile( 35 | &arena_impl, 36 | cmd.check, 37 | std.fs.cwd(), 38 | path, 39 | path, 40 | schema, 41 | &any_error, 42 | ) catch |err| switch (err) { 43 | error.IsDir, error.AccessDenied => { 44 | formatDir( 45 | gpa, 46 | &arena_impl, 47 | cmd.check, 48 | path, 49 | schema, 50 | &any_error, 51 | ) catch |dir_err| { 52 | std.debug.print("Error walking dir '{s}': {s}\n", .{ 53 | path, 54 | @errorName(dir_err), 55 | }); 56 | std.process.exit(1); 57 | }; 58 | }, 59 | else => { 60 | std.debug.print("Error while accessing '{s}': {s}\n", .{ 61 | path, @errorName(err), 62 | }); 63 | std.process.exit(1); 64 | }, 65 | }; 66 | } 67 | }, 68 | } 69 | 70 | if (any_error) { 71 | std.process.exit(1); 72 | } 73 | } 74 | 75 | fn formatDir( 76 | gpa: std.mem.Allocator, 77 | arena_impl: *std.heap.ArenaAllocator, 78 | check: bool, 79 | path: []const u8, 80 | schema: ziggy.schema.Schema, 81 | any_error: *bool, 82 | ) !void { 83 | var dir = try std.fs.cwd().openDir(path, .{ .iterate = true }); 84 | defer dir.close(); 85 | var walker = dir.walk(gpa) catch oom(); 86 | defer walker.deinit(); 87 | while (try walker.next()) |item| { 88 | switch (item.kind) { 89 | .file => { 90 | try formatFile( 91 | arena_impl, 92 | check, 93 | item.dir, 94 | item.basename, 95 | item.path, 96 | schema, 97 | any_error, 98 | ); 99 | }, 100 | else => {}, 101 | } 102 | } 103 | } 104 | 105 | fn formatFile( 106 | arena_impl: *std.heap.ArenaAllocator, 107 | check: bool, 108 | base_dir: std.fs.Dir, 109 | sub_path: []const u8, 110 | full_path: []const u8, 111 | schema: ziggy.schema.Schema, 112 | any_error: *bool, 113 | ) !void { 114 | defer _ = arena_impl.reset(.retain_capacity); 115 | const arena = arena_impl.allocator(); 116 | 117 | const file = try base_dir.openFile(sub_path, .{}); 118 | defer file.close(); 119 | 120 | const stat = try file.stat(); 121 | if (stat.kind == .directory) 122 | return error.IsDir; 123 | 124 | const file_type: FileType = blk: { 125 | const ext = std.fs.path.extension(sub_path); 126 | if (std.mem.eql(u8, ext, ".ziggy") or 127 | std.mem.eql(u8, ext, ".zgy")) 128 | { 129 | break :blk .ziggy; 130 | } 131 | 132 | if (std.mem.eql(u8, ext, ".ziggy-schema") or 133 | std.mem.eql(u8, ext, ".zgy-schema")) 134 | { 135 | break :blk .ziggy_schema; 136 | } 137 | return; 138 | }; 139 | 140 | var buf = std.ArrayList(u8).init(arena); 141 | defer buf.deinit(); 142 | 143 | try file.reader().readAllArrayList(&buf, ziggy.max_size); 144 | 145 | const in_bytes = try buf.toOwnedSliceSentinel(0); 146 | 147 | const out_bytes = switch (file_type) { 148 | .ziggy => try fmtZiggy( 149 | arena, 150 | full_path, 151 | in_bytes, 152 | schema, 153 | ), 154 | .ziggy_schema => try fmtSchema( 155 | arena, 156 | full_path, 157 | in_bytes, 158 | ), 159 | }; 160 | 161 | if (std.mem.eql(u8, out_bytes, in_bytes)) return; 162 | 163 | const stdout = std.io.getStdOut().writer(); 164 | if (check) { 165 | any_error.* = true; 166 | try stdout.print("{s}\n", .{full_path}); 167 | return; 168 | } 169 | 170 | var af = try base_dir.atomicFile(sub_path, .{ .mode = stat.mode }); 171 | defer af.deinit(); 172 | 173 | try af.file.writeAll(out_bytes); 174 | try af.finish(); 175 | try stdout.print("{s}\n", .{full_path}); 176 | } 177 | 178 | pub fn fmtZiggy( 179 | gpa: std.mem.Allocator, 180 | path: ?[]const u8, 181 | code: [:0]const u8, 182 | schema: ziggy.schema.Schema, 183 | ) ![]const u8 { 184 | var diag: Diagnostic = .{ .path = path }; 185 | const doc = Ast.init(gpa, code, true, false, false, &diag) catch { 186 | if (diag.errors.items.len != 0) { 187 | std.debug.print("{}\n", .{diag.fmt(code)}); 188 | } 189 | std.process.exit(1); 190 | }; 191 | 192 | doc.check(gpa, schema, &diag) catch { 193 | if (diag.errors.items.len != 0) { 194 | std.debug.print("{}\n", .{diag.fmt(code)}); 195 | } 196 | std.process.exit(1); 197 | }; 198 | 199 | return std.fmt.allocPrint(gpa, "{}\n", .{doc}); 200 | } 201 | 202 | fn fmtSchema( 203 | gpa: std.mem.Allocator, 204 | path: ?[]const u8, 205 | code: [:0]const u8, 206 | ) ![]const u8 { 207 | var diag: ziggy.schema.Diagnostic = .{ .path = path }; 208 | const ast = ziggy.schema.Ast.init(gpa, code, &diag) catch { 209 | std.debug.print("{}\n", .{diag}); 210 | std.process.exit(1); 211 | }; 212 | 213 | return std.fmt.allocPrint(gpa, "{}", .{ast}); 214 | } 215 | 216 | fn oom() noreturn { 217 | std.debug.print("Out of memory\n", .{}); 218 | std.process.exit(1); 219 | } 220 | 221 | pub const Command = struct { 222 | check: bool, 223 | schema: ?[]const u8, 224 | mode: Mode, 225 | 226 | const Mode = union(enum) { 227 | stdin, 228 | stdin_schema, 229 | paths: []const []const u8, 230 | }; 231 | 232 | fn parse(args: []const []const u8) Command { 233 | var check: bool = false; 234 | var schema: ?[]const u8 = null; 235 | var mode: ?Mode = null; 236 | 237 | var idx: usize = 0; 238 | while (idx < args.len) : (idx += 1) { 239 | const arg = args[idx]; 240 | if (std.mem.eql(u8, arg, "--help") or 241 | std.mem.eql(u8, arg, "-h")) 242 | { 243 | fatalHelp(); 244 | } 245 | 246 | if (std.mem.eql(u8, arg, "--check")) { 247 | if (check) { 248 | std.debug.print("error: duplicate '--check' flag\n\n", .{}); 249 | std.process.exit(1); 250 | } 251 | 252 | check = true; 253 | continue; 254 | } 255 | 256 | if (std.mem.eql(u8, arg, "--schema")) { 257 | if (schema != null) { 258 | std.debug.print("error: duplicate '--schema' option\n\n", .{}); 259 | std.process.exit(1); 260 | } 261 | 262 | idx += 1; 263 | if (idx == args.len) { 264 | std.debug.print("error: missing '--schema' option value\n\n", .{}); 265 | std.process.exit(1); 266 | } 267 | 268 | schema = args[idx]; 269 | continue; 270 | } 271 | 272 | if (std.mem.startsWith(u8, arg, "-")) { 273 | if (std.mem.eql(u8, arg, "--stdin") or 274 | std.mem.eql(u8, arg, "-")) 275 | { 276 | if (mode != null) { 277 | std.debug.print("unexpected flag: '{s}'\n", .{arg}); 278 | std.process.exit(1); 279 | } 280 | 281 | mode = .stdin; 282 | } else if (std.mem.eql(u8, arg, "--stdin-schema")) { 283 | if (mode != null) { 284 | std.debug.print("unexpected flag: '{s}'\n", .{arg}); 285 | std.process.exit(1); 286 | } 287 | 288 | mode = .stdin_schema; 289 | } else { 290 | std.debug.print("unexpected flag: '{s}'\n", .{arg}); 291 | std.process.exit(1); 292 | } 293 | } else { 294 | const paths_start = idx; 295 | while (idx < args.len) : (idx += 1) { 296 | if (std.mem.startsWith(u8, args[idx], "-")) { 297 | break; 298 | } 299 | } 300 | idx -= 1; 301 | 302 | if (mode != null) { 303 | std.debug.print( 304 | "unexpected path argument(s): '{s}'...\n", 305 | .{args[paths_start]}, 306 | ); 307 | std.process.exit(1); 308 | } 309 | 310 | const paths = args[paths_start .. idx + 1]; 311 | mode = .{ .paths = paths }; 312 | } 313 | } 314 | 315 | const m = mode orelse { 316 | std.debug.print("missing argument(s)\n\n", .{}); 317 | fatalHelp(); 318 | }; 319 | 320 | return .{ 321 | .check = check, 322 | .schema = schema, 323 | .mode = m, 324 | }; 325 | } 326 | 327 | fn fatalHelp() noreturn { 328 | std.debug.print( 329 | \\Usage: ziggy fmt PATH [PATH...] [OPTIONS] 330 | \\ 331 | \\ Formats input paths inplace. If PATH is a directory, it will 332 | \\ be searched recursively for Ziggy and Ziggy Schema files. 333 | \\ 334 | \\ Detected extensions: 335 | \\ Ziggy .ziggy, .zgy 336 | \\ Ziggy Schema .ziggy-schema, .zgy-schema 337 | \\ 338 | \\Options: 339 | \\ 340 | \\--stdin Format bytes from stdin and ouptut to stdout. 341 | \\ Mutually exclusive with other input aguments. 342 | \\ 343 | \\--stdin-schema Same as --stdin but for Ziggy Schema files. 344 | \\ 345 | \\--schema PATH Path to a Ziggy schema file used when formatting 346 | \\ Ziggy files. 347 | \\ 348 | \\--check List non-conforming files and exit with an 349 | \\ error if the list is not empty. 350 | \\ 351 | \\--help, -h Prints this help and extits. 352 | , .{}); 353 | 354 | std.process.exit(1); 355 | } 356 | }; 357 | -------------------------------------------------------------------------------- /src/cli/load_schema.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const ziggy = @import("ziggy"); 3 | 4 | pub fn loadSchema(gpa: std.mem.Allocator, path: ?[]const u8) ziggy.schema.Schema { 5 | const p = path orelse return defaultSchema(); 6 | 7 | var diag: ziggy.schema.Diagnostic = .{ .path = p }; 8 | 9 | const schema_file = std.fs.cwd().readFileAllocOptions( 10 | gpa, 11 | p, 12 | ziggy.max_size, 13 | null, 14 | 1, 15 | 0, 16 | ) catch |err| { 17 | std.debug.print("error while reading the --schema file: {s}\n\n", .{ 18 | @errorName(err), 19 | }); 20 | std.process.exit(1); 21 | }; 22 | 23 | const schema_ast = ziggy.schema.Ast.init( 24 | gpa, 25 | schema_file, 26 | &diag, 27 | ) catch |err| { 28 | std.debug.print("error while parsing the --schema file: {s}\n\n", .{ 29 | @errorName(err), 30 | }); 31 | std.debug.print("{}\n", .{diag}); 32 | std.process.exit(1); 33 | }; 34 | 35 | const schema = ziggy.schema.Schema.init( 36 | gpa, 37 | schema_ast.nodes.items, 38 | schema_file, 39 | &diag, 40 | ) catch |err| { 41 | std.debug.print("error while parsing the --schema file: {s}\n\n", .{ 42 | @errorName(err), 43 | }); 44 | std.debug.print("{}\n", .{diag}); 45 | std.process.exit(1); 46 | }; 47 | 48 | return schema; 49 | } 50 | 51 | pub fn defaultSchema() ziggy.schema.Schema { 52 | return .{ 53 | .root = .{ .node = 1 }, 54 | .code = "unknown", 55 | .nodes = &.{ 56 | .{ 57 | .tag = .root, 58 | .loc = .{ 59 | .start = 0, 60 | .end = "unknown".len, 61 | }, 62 | .parent_id = 0, 63 | }, 64 | .{ 65 | .tag = .any, 66 | .loc = .{ 67 | .start = 0, 68 | .end = "unknown".len, 69 | }, 70 | .parent_id = 0, 71 | }, 72 | }, 73 | }; 74 | } 75 | -------------------------------------------------------------------------------- /src/cli/logging.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const builtin = @import("builtin"); 3 | const folders = @import("known-folders"); 4 | 5 | pub var log_file: ?std.fs.File = switch (builtin.target.os.tag) { 6 | .linux, .macos => std.io.getStdErr(), 7 | else => null, 8 | }; 9 | 10 | pub fn logFn( 11 | comptime level: std.log.Level, 12 | comptime scope: @Type(.enum_literal), 13 | comptime format: []const u8, 14 | args: anytype, 15 | ) void { 16 | // if (scope != .ws and scope != .network) return; 17 | 18 | const l = log_file orelse return; 19 | const scope_prefix = "(" ++ @tagName(scope) ++ "): "; 20 | const prefix = "[" ++ @tagName(level) ++ "] " ++ scope_prefix; 21 | std.debug.lockStdErr(); 22 | defer std.debug.unlockStdErr(); 23 | 24 | const writer = l.writer(); 25 | writer.print(prefix ++ format ++ "\n", args) catch return; 26 | } 27 | 28 | pub fn setup(gpa: std.mem.Allocator) void { 29 | std.debug.lockStdErr(); 30 | defer std.debug.unlockStdErr(); 31 | 32 | setupInternal(gpa) catch { 33 | log_file = null; 34 | }; 35 | } 36 | 37 | fn setupInternal(gpa: std.mem.Allocator) !void { 38 | const cache_base = try folders.open(gpa, .cache, .{}) orelse return error.Failure; 39 | try cache_base.makePath("ziggy"); 40 | 41 | const log_name = "ziggy.log"; 42 | const log_path = try std.fmt.allocPrint(gpa, "ziggy/{s}", .{log_name}); 43 | defer gpa.free(log_path); 44 | 45 | const file = try cache_base.createFile(log_path, .{ .truncate = false }); 46 | const end = try file.getEndPos(); 47 | try file.seekTo(end); 48 | 49 | log_file = file; 50 | } 51 | -------------------------------------------------------------------------------- /src/cli/lsp.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const assert = std.debug.assert; 3 | const ziggy = @import("ziggy"); 4 | const Document = @import("lsp/Document.zig"); 5 | const Schema = @import("lsp/Schema.zig"); 6 | const lsp = @import("lsp"); 7 | const types = lsp.types; 8 | const offsets = lsp.offsets; 9 | const ResultType = lsp.server.ResultType; 10 | const Message = lsp.server.Message; 11 | 12 | const log = std.log.scoped(.ziggy_lsp); 13 | 14 | const ZiggyLsp = lsp.server.Server(Handler); 15 | 16 | pub fn run(gpa: std.mem.Allocator, args: []const []const u8) !void { 17 | _ = args; 18 | 19 | log.debug("Ziggy LSP started!", .{}); 20 | 21 | var transport = lsp.Transport.init( 22 | std.io.getStdIn().reader(), 23 | std.io.getStdOut().writer(), 24 | ); 25 | transport.message_tracing = false; 26 | 27 | var server: ZiggyLsp = undefined; 28 | var handler: Handler = .{ 29 | .gpa = gpa, 30 | .server = &server, 31 | }; 32 | server = try ZiggyLsp.init(gpa, &transport, &handler); 33 | 34 | try server.loop(); 35 | } 36 | 37 | pub const Handler = struct { 38 | gpa: std.mem.Allocator, 39 | server: *ZiggyLsp, 40 | files: std.StringHashMapUnmanaged(Handler.File) = .{}, 41 | 42 | usingnamespace @import("lsp/logic.zig"); 43 | 44 | pub fn initialize( 45 | self: Handler, 46 | _: std.mem.Allocator, 47 | request: types.InitializeParams, 48 | offset_encoding: offsets.Encoding, 49 | ) !lsp.types.InitializeResult { 50 | _ = self; 51 | 52 | if (request.clientInfo) |clientInfo| { 53 | log.info("client is '{s}-{s}'", .{ clientInfo.name, clientInfo.version orelse "" }); 54 | } 55 | 56 | return .{ 57 | .serverInfo = .{ 58 | .name = "Ziggy LSP", 59 | .version = "0.0.1", 60 | }, 61 | .capabilities = .{ 62 | .positionEncoding = switch (offset_encoding) { 63 | .@"utf-8" => .@"utf-8", 64 | .@"utf-16" => .@"utf-16", 65 | .@"utf-32" => .@"utf-32", 66 | }, 67 | .textDocumentSync = .{ 68 | .TextDocumentSyncOptions = .{ 69 | .openClose = true, 70 | .change = .Full, 71 | .save = .{ .bool = true }, 72 | }, 73 | }, 74 | .completionProvider = .{ 75 | .triggerCharacters = &[_][]const u8{ ".", ":", "@", "\"" }, 76 | }, 77 | .hoverProvider = .{ .bool = true }, 78 | .definitionProvider = .{ .bool = true }, 79 | .referencesProvider = .{ .bool = true }, 80 | .documentFormattingProvider = .{ .bool = true }, 81 | .semanticTokensProvider = .{ 82 | .SemanticTokensOptions = .{ 83 | .full = .{ .bool = true }, 84 | .legend = .{ 85 | .tokenTypes = std.meta.fieldNames(types.SemanticTokenTypes), 86 | .tokenModifiers = std.meta.fieldNames(types.SemanticTokenModifiers), 87 | }, 88 | }, 89 | }, 90 | .inlayHintProvider = .{ .bool = true }, 91 | }, 92 | }; 93 | } 94 | 95 | pub fn initialized( 96 | self: Handler, 97 | _: std.mem.Allocator, 98 | notification: types.InitializedParams, 99 | ) !void { 100 | _ = self; 101 | _ = notification; 102 | } 103 | 104 | pub fn shutdown( 105 | _: Handler, 106 | _: std.mem.Allocator, 107 | notification: void, 108 | ) !?void { 109 | _ = notification; 110 | } 111 | 112 | pub fn documentSymbol( 113 | _: Handler, 114 | _: std.mem.Allocator, 115 | _: types.DocumentSymbolParams, 116 | ) !ResultType("textDocument/documentSymbol") { 117 | return null; 118 | } 119 | 120 | pub fn exit( 121 | _: Handler, 122 | _: std.mem.Allocator, 123 | notification: void, 124 | ) !void { 125 | _ = notification; 126 | } 127 | 128 | pub fn openDocument( 129 | self: *Handler, 130 | arena: std.mem.Allocator, 131 | notification: types.DidOpenTextDocumentParams, 132 | ) !void { 133 | const new_text = try self.gpa.dupeZ(u8, notification.textDocument.text); // We informed the client that we only do full document syncs 134 | errdefer self.gpa.free(new_text); 135 | 136 | const language_id = notification.textDocument.languageId; 137 | const language = std.meta.stringToEnum(Handler.Language, language_id) orelse { 138 | log.debug("unrecognized language id: '{s}'", .{language_id}); 139 | return; 140 | }; 141 | try self.loadFile( 142 | arena, 143 | new_text, 144 | notification.textDocument.uri, 145 | language, 146 | ); 147 | } 148 | 149 | pub fn changeDocument( 150 | self: *Handler, 151 | arena: std.mem.Allocator, 152 | notification: types.DidChangeTextDocumentParams, 153 | ) !void { 154 | if (notification.contentChanges.len == 0) return; 155 | 156 | const new_text = try self.gpa.dupeZ(u8, notification.contentChanges[notification.contentChanges.len - 1].literal_1.text); // We informed the client that we only do full document syncs 157 | errdefer self.gpa.free(new_text); 158 | 159 | // TODO: this is a hack while we wait for actual incremental reloads 160 | const file = self.files.get(notification.textDocument.uri) orelse return; 161 | 162 | log.debug("LOAD FILE URI: {s}, file tag = {s}", .{ 163 | notification.textDocument.uri, 164 | @tagName(file), 165 | }); 166 | try self.loadFile( 167 | arena, 168 | new_text, 169 | notification.textDocument.uri, 170 | file, 171 | ); 172 | } 173 | 174 | pub fn saveDocument( 175 | _: Handler, 176 | arena: std.mem.Allocator, 177 | notification: types.DidSaveTextDocumentParams, 178 | ) !void { 179 | _ = arena; 180 | _ = notification; 181 | } 182 | 183 | pub fn closeDocument( 184 | self: *Handler, 185 | _: std.mem.Allocator, 186 | notification: types.DidCloseTextDocumentParams, 187 | ) error{}!void { 188 | var kv = self.files.fetchRemove(notification.textDocument.uri) orelse return; 189 | self.gpa.free(kv.key); 190 | kv.value.deinit(); 191 | } 192 | 193 | pub fn completion( 194 | self: Handler, 195 | arena: std.mem.Allocator, 196 | request: types.CompletionParams, 197 | ) !ResultType("textDocument/completion") { 198 | const file = self.files.get(request.textDocument.uri) orelse return .{ 199 | .CompletionList = types.CompletionList{ 200 | .isIncomplete = false, 201 | .items = &.{}, 202 | }, 203 | }; 204 | const offset = file.offsetFromPosition( 205 | request.position.line, 206 | request.position.character, 207 | ); 208 | 209 | log.debug("completion at offset {}", .{offset}); 210 | 211 | switch (file) { 212 | .supermd, .ziggy => |z| { 213 | const ast = z.ast orelse return .{ 214 | .CompletionList = types.CompletionList{ 215 | .isIncomplete = false, 216 | .items = &.{}, 217 | }, 218 | }; 219 | 220 | const ziggy_completion = ast.completionsForOffset(offset); 221 | 222 | const completions = try arena.alloc( 223 | types.CompletionItem, 224 | ziggy_completion.len, 225 | ); 226 | 227 | for (completions, ziggy_completion) |*c, zc| { 228 | c.* = .{ 229 | .label = zc.name, 230 | .labelDetails = .{ .detail = zc.type }, 231 | .kind = .Field, 232 | .insertText = zc.snippet, 233 | .insertTextFormat = .Snippet, 234 | .documentation = .{ 235 | .MarkupContent = .{ 236 | .kind = .markdown, 237 | .value = zc.desc, 238 | }, 239 | }, 240 | }; 241 | } 242 | 243 | return .{ 244 | .CompletionList = types.CompletionList{ 245 | .isIncomplete = false, 246 | .items = completions, 247 | }, 248 | }; 249 | }, 250 | .ziggy_schema => return .{ 251 | .CompletionList = types.CompletionList{ 252 | .isIncomplete = false, 253 | .items = &.{}, 254 | }, 255 | }, 256 | } 257 | } 258 | 259 | pub fn gotoDefinition( 260 | self: Handler, 261 | arena: std.mem.Allocator, 262 | request: types.DefinitionParams, 263 | ) !ResultType("textDocument/definition") { 264 | const file = self.files.get(request.textDocument.uri) orelse return null; 265 | if (file == .ziggy_schema) return null; 266 | 267 | return .{ 268 | .Definition = types.Definition{ 269 | .Location = .{ 270 | .uri = try std.fmt.allocPrint(arena, "{s}-schema", .{request.textDocument.uri}), 271 | .range = .{ 272 | .start = .{ .line = 0, .character = 0 }, 273 | .end = .{ .line = 0, .character = 0 }, 274 | }, 275 | }, 276 | }, 277 | }; 278 | } 279 | 280 | pub fn hover( 281 | self: Handler, 282 | arena: std.mem.Allocator, 283 | request: types.HoverParams, 284 | offset_encoding: offsets.Encoding, 285 | ) !?types.Hover { 286 | _ = offset_encoding; // autofix 287 | _ = arena; // autofix 288 | 289 | const file = self.files.get(request.textDocument.uri) orelse return null; 290 | 291 | const doc = switch (file) { 292 | .supermd, .ziggy => |doc| doc, 293 | .ziggy_schema => return null, 294 | }; 295 | 296 | const offset = file.offsetFromPosition( 297 | request.position.line, 298 | request.position.character, 299 | ); 300 | log.debug("hover at offset {}", .{offset}); 301 | 302 | const ast = doc.ast orelse return null; 303 | const h = ast.hoverForOffset(offset) orelse return null; 304 | 305 | return types.Hover{ 306 | .contents = .{ 307 | .MarkupContent = .{ 308 | .kind = .markdown, 309 | .value = h, 310 | }, 311 | }, 312 | }; 313 | } 314 | 315 | pub fn references( 316 | _: Handler, 317 | arena: std.mem.Allocator, 318 | request: types.ReferenceParams, 319 | ) !?[]types.Location { 320 | _ = arena; 321 | _ = request; 322 | return null; 323 | } 324 | 325 | pub fn formatting( 326 | _: Handler, 327 | arena: std.mem.Allocator, 328 | request: types.DocumentFormattingParams, 329 | ) !?[]types.TextEdit { 330 | _ = arena; 331 | _ = request; 332 | return null; 333 | } 334 | 335 | pub fn semanticTokensFull( 336 | _: Handler, 337 | arena: std.mem.Allocator, 338 | request: types.SemanticTokensParams, 339 | ) !?types.SemanticTokens { 340 | _ = arena; 341 | _ = request; 342 | return null; 343 | } 344 | 345 | pub fn inlayHint( 346 | _: Handler, 347 | arena: std.mem.Allocator, 348 | request: types.InlayHintParams, 349 | ) !?[]types.InlayHint { 350 | _ = arena; 351 | _ = request; 352 | return null; 353 | } 354 | 355 | /// Handle a reponse that we have received from the client. 356 | /// Doesn't usually happen unless we explicitly send a request to the client. 357 | pub fn response(self: Handler, _response: Message.Response) !void { 358 | _ = self; 359 | const id: []const u8 = switch (_response.id) { 360 | .string => |id| id, 361 | .number => |id| { 362 | log.warn("received response from client with id '{d}' that has no handler!", .{id}); 363 | return; 364 | }, 365 | }; 366 | 367 | if (_response.data == .@"error") { 368 | const err = _response.data.@"error"; 369 | log.err("Error response for '{s}': {}, {s}", .{ id, err.code, err.message }); 370 | return; 371 | } 372 | 373 | log.warn("received response from client with id '{s}' that has no handler!", .{id}); 374 | } 375 | }; 376 | -------------------------------------------------------------------------------- /src/cli/lsp/Document.zig: -------------------------------------------------------------------------------- 1 | const Document = @This(); 2 | 3 | const std = @import("std"); 4 | const assert = std.debug.assert; 5 | const ziggy = @import("ziggy"); 6 | const Token = ziggy.Tokenizer.Token; 7 | const Schema = @import("Schema.zig"); 8 | 9 | const log = std.log.scoped(.lsp_document); 10 | 11 | arena: std.heap.ArenaAllocator, 12 | bytes: [:0]const u8, 13 | diagnostic: ziggy.Diagnostic, 14 | frontmatter: bool, 15 | ast: ?if (ziggy.lsp_parser == .recover) ziggy.LanguageServerAst else ziggy.LanguageServerAst.Tree = null, 16 | schema: ?Schema, 17 | 18 | pub fn deinit(doc: *Document) void { 19 | doc.arena.deinit(); 20 | } 21 | 22 | pub fn init( 23 | gpa: std.mem.Allocator, 24 | src: [:0]const u8, 25 | frontmatter: bool, 26 | schema: ?Schema, 27 | ) error{OutOfMemory}!Document { 28 | const bytes = if (!frontmatter) src else blk: { 29 | var it = std.mem.tokenizeScalar(u8, src, '\n'); 30 | 31 | if (it.next()) |first_line| { 32 | const eql = std.mem.eql; 33 | const trim = std.mem.trim; 34 | if (eql(u8, trim(u8, first_line, &std.ascii.whitespace), "---")) { 35 | while (it.next()) |close_line| { 36 | if (eql(u8, trim(u8, close_line, &std.ascii.whitespace), "---")) { 37 | break :blk try gpa.dupeZ(u8, src[0 .. it.index - close_line.len]); 38 | } 39 | } 40 | 41 | // error.OpenFrontmatter; 42 | } 43 | } 44 | break :blk ""; 45 | }; 46 | 47 | log.debug("TRIMMED SRC = \n\n{s}\n\n", .{src}); 48 | 49 | var doc: Document = .{ 50 | .arena = std.heap.ArenaAllocator.init(gpa), 51 | .bytes = bytes, 52 | .frontmatter = frontmatter, 53 | .diagnostic = .{ .path = null }, 54 | .schema = schema, 55 | }; 56 | 57 | const arena = doc.arena.allocator(); 58 | 59 | if (schema) |s| { 60 | if (s.diagnostic.err != .none) { 61 | try doc.diagnostic.errors.append(arena, .{ 62 | .schema = .{ 63 | .sel = (Token.Loc{ .start = 0, .end = @intCast(bytes.len) }).getSelection(bytes), 64 | .err = @tagName(s.diagnostic.err), 65 | }, 66 | }); 67 | return doc; 68 | } 69 | } 70 | 71 | log.debug("parsing ziggy ast", .{}); 72 | var ast = ziggy.LanguageServerAst.init( 73 | arena, 74 | bytes, 75 | true, 76 | frontmatter, 77 | &doc.diagnostic, 78 | ) catch return doc; 79 | 80 | log.debug("schema: applying", .{}); 81 | 82 | if (schema) |s| { 83 | if (s.rules) |rules| { 84 | ast.check(arena, rules, &doc.diagnostic, bytes) catch return doc; 85 | } 86 | } 87 | doc.ast = ast; 88 | 89 | return doc; 90 | } 91 | -------------------------------------------------------------------------------- /src/cli/lsp/Schema.zig: -------------------------------------------------------------------------------- 1 | const Schema = @This(); 2 | 3 | const std = @import("std"); 4 | const assert = std.debug.assert; 5 | const ziggy = @import("ziggy"); 6 | 7 | const log = std.log.scoped(.lsp_document); 8 | 9 | arena: std.heap.ArenaAllocator, 10 | bytes: [:0]const u8, 11 | diagnostic: ziggy.schema.Diagnostic, 12 | ast: ?ziggy.schema.Ast = null, 13 | rules: ?ziggy.schema.Schema = null, 14 | 15 | pub fn deinit(doc: *Schema) void { 16 | doc.arena.deinit(); 17 | } 18 | 19 | pub fn init(gpa: std.mem.Allocator, bytes: [:0]const u8) Schema { 20 | var schema: Schema = .{ 21 | .arena = std.heap.ArenaAllocator.init(gpa), 22 | .bytes = bytes, 23 | .diagnostic = .{ .path = null }, 24 | }; 25 | 26 | const arena = schema.arena.allocator(); 27 | 28 | log.debug("schema: parsing", .{}); 29 | const ast = ziggy.schema.Ast.init(arena, bytes, &schema.diagnostic) catch return schema; 30 | if (schema.diagnostic.err != .none) return schema; 31 | 32 | schema.ast = ast; 33 | 34 | log.debug("schema: analysis", .{}); 35 | const rules = ziggy.schema.Schema.init( 36 | arena, 37 | ast.nodes.items, 38 | bytes, 39 | &schema.diagnostic, 40 | ) catch return schema; 41 | 42 | schema.rules = rules; 43 | 44 | log.debug("schema: done", .{}); 45 | return schema; 46 | } 47 | -------------------------------------------------------------------------------- /src/cli/lsp/logic.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const lsp = @import("lsp"); 3 | const ziggy = @import("ziggy"); 4 | const Handler = @import("../lsp.zig").Handler; 5 | const Document = @import("Document.zig"); 6 | const Schema = @import("Schema.zig"); 7 | 8 | pub const Language = enum { ziggy, ziggy_schema, supermd }; 9 | pub const File = union(Language) { 10 | ziggy: Document, 11 | ziggy_schema: Schema, 12 | supermd: Document, 13 | 14 | pub fn deinit(f: *File) void { 15 | switch (f.*) { 16 | inline else => |*x| x.deinit(), 17 | } 18 | } 19 | // Clamps the returned value to code.len 20 | pub fn offsetFromPosition(f: File, line: u32, col: u32) u32 { 21 | const code = switch (f) { 22 | inline else => |d| d.bytes, 23 | }; 24 | 25 | var count: u32 = 0; 26 | var idx: u32 = 0; 27 | while (count < line) : (idx += 1) { 28 | if (code[idx] == '\n') { 29 | count += 1; 30 | } 31 | } 32 | 33 | return @min(code.len, idx + col); 34 | } 35 | }; 36 | 37 | const log = std.log.scoped(.ziggy_lsp); 38 | 39 | pub fn loadFile( 40 | self: *Handler, 41 | arena: std.mem.Allocator, 42 | new_text: [:0]const u8, 43 | uri: []const u8, 44 | language: Language, 45 | ) !void { 46 | var res: lsp.types.PublishDiagnosticsParams = .{ 47 | .uri = uri, 48 | .diagnostics = &.{}, 49 | }; 50 | 51 | switch (language) { 52 | .ziggy_schema => { 53 | var sk = Schema.init(self.gpa, new_text); 54 | errdefer sk.deinit(); 55 | 56 | const gop = try self.files.getOrPut(self.gpa, uri); 57 | errdefer _ = self.files.remove(uri); 58 | 59 | if (gop.found_existing) { 60 | gop.value_ptr.deinit(); 61 | } else { 62 | gop.key_ptr.* = try self.gpa.dupe(u8, uri); 63 | } 64 | 65 | gop.value_ptr.* = .{ .ziggy_schema = sk }; 66 | 67 | switch (sk.diagnostic.err) { 68 | .none => {}, 69 | else => { 70 | const msg = try std.fmt.allocPrint(arena, "{lsp}", .{sk.diagnostic}); 71 | const sel = sk.diagnostic.tok.loc.getSelection(sk.bytes); 72 | res.diagnostics = &.{ 73 | .{ 74 | .range = .{ 75 | .start = .{ 76 | .line = sel.start.line - 1, 77 | .character = sel.start.col - 1, 78 | }, 79 | .end = .{ 80 | .line = sel.end.line - 1, 81 | .character = sel.end.col - 1, 82 | }, 83 | }, 84 | .severity = .Error, 85 | .message = msg, 86 | }, 87 | }; 88 | }, 89 | } 90 | }, 91 | .supermd, .ziggy => { 92 | const schema = try schemaForZiggy(self, arena, uri); 93 | 94 | var doc = try Document.init( 95 | self.gpa, 96 | new_text, 97 | language == .supermd, 98 | schema, 99 | ); 100 | errdefer doc.deinit(); 101 | 102 | log.debug("document init", .{}); 103 | 104 | const gop = try self.files.getOrPut(self.gpa, uri); 105 | errdefer _ = self.files.remove(uri); 106 | 107 | if (gop.found_existing) { 108 | gop.value_ptr.deinit(); 109 | } else { 110 | gop.key_ptr.* = try self.gpa.dupe(u8, uri); 111 | } 112 | 113 | gop.value_ptr.* = switch (language) { 114 | else => unreachable, 115 | .supermd => .{ .supermd = doc }, 116 | .ziggy => .{ .ziggy = doc }, 117 | }; 118 | 119 | log.debug("sending {} diagnostic errors", .{doc.diagnostic.errors.items.len}); 120 | 121 | const diags = try arena.alloc(lsp.types.Diagnostic, doc.diagnostic.errors.items.len); 122 | for (doc.diagnostic.errors.items, 0..) |e, idx| { 123 | const msg = try std.fmt.allocPrint(arena, "{lsp}", .{e.fmt(doc.bytes, null)}); 124 | const sel = e.getErrorSelection(); 125 | diags[idx] = .{ 126 | .range = .{ 127 | .start = .{ 128 | .line = sel.start.line - 1, 129 | .character = sel.start.col - 1, 130 | }, 131 | .end = .{ 132 | .line = sel.end.line - 1, 133 | .character = sel.end.col - 1, 134 | }, 135 | }, 136 | .severity = .Error, 137 | .message = msg, 138 | }; 139 | } 140 | 141 | res.diagnostics = diags; 142 | }, 143 | } 144 | log.debug("sending diags!", .{}); 145 | const msg = try self.server.sendToClientNotification( 146 | "textDocument/publishDiagnostics", 147 | res, 148 | ); 149 | 150 | defer self.gpa.free(msg); 151 | } 152 | 153 | pub fn schemaForZiggy(self: *Handler, arena: std.mem.Allocator, uri: []const u8) !?Schema { 154 | const path = try std.fmt.allocPrint(arena, "{s}-schema", .{uri["file://".len..]}); 155 | log.debug("trying to find schema at '{s}'", .{path}); 156 | const result = self.files.get(path) orelse { 157 | const bytes = std.fs.cwd().readFileAllocOptions( 158 | self.gpa, 159 | path, 160 | ziggy.max_size, 161 | null, 162 | 1, 163 | 0, 164 | ) catch return null; 165 | log.debug("schema loaded", .{}); 166 | var schema = Schema.init(self.gpa, bytes); 167 | errdefer schema.deinit(); 168 | 169 | const gpa_path = try self.gpa.dupe(u8, path); 170 | errdefer self.gpa.free(gpa_path); 171 | 172 | try self.files.putNoClobber( 173 | self.gpa, 174 | gpa_path, 175 | .{ .ziggy_schema = schema }, 176 | ); 177 | return schema; 178 | }; 179 | 180 | if (result == .ziggy_schema) return result.ziggy_schema; 181 | return null; 182 | } 183 | -------------------------------------------------------------------------------- /src/cli/query.zig: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /src/main.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const builtin = @import("builtin"); 3 | const folders = @import("known-folders"); 4 | const ziggy = @import("ziggy"); 5 | const logging = @import("cli/logging.zig"); 6 | const lsp_exe = @import("cli/lsp.zig"); 7 | const fmt_exe = @import("cli/fmt.zig"); 8 | const check_exe = @import("cli/check.zig"); 9 | const convert_exe = @import("cli/convert.zig"); 10 | 11 | pub const known_folders_config: folders.KnownFolderConfig = .{ 12 | .xdg_force_default = true, 13 | .xdg_on_mac = true, 14 | }; 15 | 16 | pub const std_options: std.Options = .{ 17 | .logFn = logging.logFn, 18 | }; 19 | 20 | var lsp_mode = false; 21 | pub fn panic( 22 | msg: []const u8, 23 | trace: ?*std.builtin.StackTrace, 24 | ret_addr: ?usize, 25 | ) noreturn { 26 | if (lsp_mode) { 27 | std.log.err("{s}\n\n{?}", .{ msg, trace }); 28 | } else { 29 | std.debug.print("{s}\n\n{?}", .{ msg, trace }); 30 | } 31 | blk: { 32 | const out = if (!lsp_mode) std.io.getStdErr() else logging.log_file orelse break :blk; 33 | const w = out.writer(); 34 | if (builtin.strip_debug_info) { 35 | w.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return; 36 | break :blk; 37 | } 38 | const debug_info = std.debug.getSelfDebugInfo() catch |err| { 39 | w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch break :blk; 40 | break :blk; 41 | }; 42 | std.debug.writeCurrentStackTrace(w, debug_info, .no_color, ret_addr) catch |err| { 43 | w.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch break :blk; 44 | break :blk; 45 | }; 46 | } 47 | if (builtin.mode == .Debug) @breakpoint(); 48 | std.process.exit(1); 49 | } 50 | 51 | pub const Command = enum { lsp, query, fmt, check, convert, help }; 52 | 53 | pub fn main() !void { 54 | var gpa_impl: std.heap.GeneralPurposeAllocator(.{}) = .{}; 55 | const gpa = gpa_impl.allocator(); 56 | 57 | logging.setup(gpa); 58 | 59 | const args = std.process.argsAlloc(gpa) catch fatal("oom\n", .{}); 60 | defer std.process.argsFree(gpa, args); 61 | 62 | if (args.len < 2) fatalHelp(); 63 | 64 | const cmd = std.meta.stringToEnum(Command, args[1]) orelse { 65 | std.debug.print("unrecognized subcommand: '{s}'\n\n", .{args[1]}); 66 | fatalHelp(); 67 | }; 68 | 69 | if (cmd == .lsp) lsp_mode = true; 70 | 71 | _ = switch (cmd) { 72 | .lsp => lsp_exe.run(gpa, args[2..]), 73 | .fmt => fmt_exe.run(gpa, args[2..]), 74 | .check => check_exe.run(gpa, args[2..]), 75 | .convert => convert_exe.run(gpa, args[2..]), 76 | .help => fatalHelp(), 77 | else => std.debug.panic("TODO cmd={s}", .{@tagName(cmd)}), 78 | } catch |err| fatal("unexpected error: {s}\n", .{@errorName(err)}); 79 | } 80 | 81 | fn fatal(comptime fmt: []const u8, args: anytype) noreturn { 82 | std.debug.print(fmt, args); 83 | std.process.exit(1); 84 | } 85 | 86 | fn fatalHelp() noreturn { 87 | fatal( 88 | \\Usage: ziggy COMMAND [OPTIONS] 89 | \\ 90 | \\Commands: 91 | \\ fmt Format Ziggy files 92 | \\ query, q Query Ziggy files 93 | \\ check Check Ziggy files against a Ziggy schema 94 | \\ convert Convert between JSON, YAML, TOML files and Ziggy 95 | \\ lsp Start the Ziggy LSP 96 | \\ help Show this menu and exit 97 | \\ 98 | \\General Options: 99 | \\ --help, -h Print command specific usage 100 | \\ 101 | \\ 102 | , .{}); 103 | } 104 | -------------------------------------------------------------------------------- /src/root.zig: -------------------------------------------------------------------------------- 1 | pub const Tokenizer = @import("ziggy/Tokenizer.zig"); 2 | pub const Parser = @import("ziggy/Parser.zig"); 3 | pub const dynamic = @import("ziggy/dynamic.zig"); 4 | pub const Ast = @import("ziggy/Ast.zig"); 5 | pub const Diagnostic = @import("ziggy/Diagnostic.zig"); 6 | pub const parseLeaky = Parser.parseLeaky; 7 | pub const ParseOptions = Parser.ParseOptions; 8 | pub const FrontmatterMeta = ParseOptions.FrontmatterMeta; 9 | pub const FrontmatterError = Parser.FrontmatterError; 10 | pub const serializer = @import("ziggy/serializer.zig"); 11 | pub const stringify = serializer.stringify; 12 | 13 | pub const lsp_parser: enum { recover, resilient } = .resilient; 14 | pub const LanguageServerAst = switch (lsp_parser) { 15 | .recover => @import("ziggy/RecoverAst.zig"), 16 | .resilient => @import("ziggy/ResilientParser.zig"), 17 | }; 18 | 19 | pub const schema = struct { 20 | pub const Diagnostic = @import("schema/Diagnostic.zig"); 21 | pub const Tokenizer = @import("schema/Tokenizer.zig"); 22 | pub const Schema = @import("schema/Schema.zig"); 23 | pub const Ast = @import("schema/Ast.zig"); 24 | pub const checkType = @import("schema/check_type.zig").checkType; 25 | }; 26 | 27 | // Ziggy documents and schemas can have a maximum size of 4GB 28 | pub const max_size = 4 * 1024 * 1024 * 1024; 29 | 30 | test { 31 | _ = Tokenizer; 32 | _ = Parser; 33 | _ = Diagnostic; 34 | _ = Ast; 35 | 36 | _ = dynamic; 37 | _ = serializer; 38 | _ = @import("ziggy/RecoverAst.zig"); 39 | _ = @import("ziggy/ResilientParser.zig"); 40 | } 41 | test { 42 | _ = schema.Diagnostic; 43 | _ = schema.Tokenizer; 44 | _ = schema.Schema; 45 | _ = schema.Ast; 46 | _ = @import("schema/check_type.zig"); 47 | } 48 | -------------------------------------------------------------------------------- /src/schema/Diagnostic.zig: -------------------------------------------------------------------------------- 1 | const Diagnostic = @This(); 2 | 3 | const std = @import("std"); 4 | const Tokenizer = @import("Tokenizer.zig"); 5 | const Token = Tokenizer.Token; 6 | 7 | /// The data being parsed, this field should not be set manually by users. 8 | code: [:0]const u8 = "", 9 | 10 | /// A path to the file, used to display diagnostics. 11 | /// If not present, error positions will be printed as "line: XX col: XX". 12 | /// This field should be set as needed by users. 13 | path: ?[]const u8, 14 | 15 | tok: Token = .{ 16 | .tag = .eof, 17 | .loc = .{ .start = 0, .end = 0 }, 18 | }, 19 | err: Error = .none, 20 | 21 | pub const Error = union(enum) { 22 | none, 23 | unexpected_token: struct { 24 | expected: []const Token.Tag, 25 | }, 26 | invalid_token, 27 | 28 | duplicate_field: struct { 29 | first_loc: Token.Loc, 30 | }, 31 | missing_field: struct { 32 | name: []const u8, 33 | }, 34 | empty_enum, 35 | unknown_field, 36 | }; 37 | 38 | pub fn debug(self: Diagnostic) void { 39 | std.debug.print("{}", .{self}); 40 | } 41 | 42 | pub fn format( 43 | self: Diagnostic, 44 | comptime fmt: []const u8, 45 | options: std.fmt.FormatOptions, 46 | out_stream: anytype, 47 | ) !void { 48 | _ = options; 49 | 50 | const lsp = std.mem.eql(u8, fmt, "lsp"); 51 | 52 | if (!lsp) { 53 | const start = self.tok.loc.getSelection(self.code).start; 54 | if (self.path) |p| { 55 | try out_stream.print("{s}:{}:{}\n", .{ 56 | p, 57 | start.line, 58 | start.col, 59 | }); 60 | } else { 61 | try out_stream.print("line: {} col: {}\n", .{ 62 | start.line, 63 | start.col, 64 | }); 65 | } 66 | } 67 | 68 | switch (self.err) { 69 | .none => {}, 70 | .empty_enum => { 71 | try out_stream.print("empty enum", .{}); 72 | }, 73 | .invalid_token => { 74 | try out_stream.print("invalid token", .{}); 75 | if (!lsp) { 76 | try out_stream.print(": '{s}'", .{ 77 | self.tok.loc.src(self.code), 78 | }); 79 | } 80 | }, 81 | .unexpected_token => |u| { 82 | if (self.tok.tag == .eof) { 83 | if (!lsp) { 84 | try out_stream.print("unexpected EOF, ", .{}); 85 | } 86 | try out_stream.print("expected: ", .{}); 87 | } else { 88 | if (!lsp) { 89 | try out_stream.print("unexpected token: '{s}', ", .{ 90 | self.tok.loc.src(self.code), 91 | }); 92 | } 93 | try out_stream.print("expected: ", .{}); 94 | } 95 | 96 | for (u.expected, 0..) |tag, idx| { 97 | try out_stream.print("'{s}'", .{tag.lexeme()}); 98 | if (idx != u.expected.len - 1) { 99 | try out_stream.print(" or ", .{}); 100 | } 101 | } 102 | 103 | try out_stream.print("\n", .{}); 104 | }, 105 | .duplicate_field => |dup| { 106 | if (lsp) { 107 | try out_stream.print("duplicate field", .{}); 108 | } else { 109 | const first_sel = dup.first_loc.getSelection(self.code); 110 | try out_stream.print("found duplicate field '{s}', first definition here:", .{ 111 | self.tok.loc.src(self.code), 112 | }); 113 | if (self.path) |p| { 114 | try out_stream.print("\n{s}:{}:{}\n", .{ 115 | p, 116 | first_sel.start.line, 117 | first_sel.start.col, 118 | }); 119 | } else { 120 | try out_stream.print(" line: {} col: {}\n", .{ 121 | first_sel.start.line, 122 | first_sel.start.col, 123 | }); 124 | } 125 | } 126 | }, 127 | .missing_field => |miss| { 128 | if (lsp) { 129 | try out_stream.print( 130 | "missing field '{s}'", 131 | .{miss.name}, 132 | ); 133 | } else { 134 | const struct_end = self.tok.loc.getSelection(self.code); 135 | try out_stream.print( 136 | "missing field '{s}', struct ends here:", 137 | .{miss.name}, 138 | ); 139 | if (self.path) |p| { 140 | try out_stream.print("\n{s}:{}:{}\n", .{ 141 | p, 142 | struct_end.start.line, 143 | struct_end.start.col, 144 | }); 145 | } else { 146 | try out_stream.print(" line: {} col: {}\n", .{ 147 | struct_end.start.line, 148 | struct_end.start.col, 149 | }); 150 | } 151 | } 152 | }, 153 | .unknown_field => { 154 | const name = self.tok.loc.src(self.code); 155 | if (lsp) { 156 | try out_stream.print( 157 | "unknown field '{s}'", 158 | .{name}, 159 | ); 160 | } else { 161 | const selection = self.tok.loc.getSelection(self.code); 162 | try out_stream.print( 163 | "unknown field '{s}' found here:", 164 | .{name}, 165 | ); 166 | if (self.path) |p| { 167 | try out_stream.print("\n{s}:{}:{}\n", .{ 168 | p, 169 | selection.start.line, 170 | selection.start.col, 171 | }); 172 | } else { 173 | try out_stream.print(" line: {} col: {}\n", .{ 174 | selection.start.line, 175 | selection.start.col, 176 | }); 177 | } 178 | } 179 | }, 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /src/schema/Tokenizer.zig: -------------------------------------------------------------------------------- 1 | const Tokenizer = @This(); 2 | const std = @import("std"); 3 | 4 | idx: u32 = 0, 5 | 6 | pub const Token = struct { 7 | tag: Tag, 8 | loc: Loc, 9 | 10 | pub const Tag = enum { 11 | invalid, 12 | root_kw, 13 | enum_kw, 14 | struct_kw, 15 | map_kw, 16 | any_kw, 17 | unknown_kw, 18 | pipe, 19 | comma, 20 | eq, 21 | colon, 22 | at, 23 | lb, 24 | rb, 25 | lsb, 26 | rsb, 27 | qmark, 28 | identifier, 29 | doc_comment_line, 30 | bytes, 31 | int, 32 | float, 33 | bool, 34 | eof, 35 | 36 | // never generated by the tokenizer but 37 | // used elsewhere 38 | expr, 39 | tag_name, 40 | 41 | pub fn lexeme(self: Tag) []const u8 { 42 | return switch (self) { 43 | .invalid => "(invalid)", 44 | .root_kw => "root", 45 | .enum_kw => "enum", 46 | .struct_kw => "struct", 47 | .map_kw => "map", 48 | .any_kw => "any", 49 | .unknown_kw => "unknown", 50 | .pipe => "|", 51 | .comma => ",", 52 | .eq => "=", 53 | .colon => ":", 54 | .at => "@", 55 | .lb => "{", 56 | .rb => "}", 57 | .lsb => "[", 58 | .rsb => "]", 59 | .qmark => "?", 60 | .identifier => "(identifier)", 61 | .doc_comment_line => "(doc comment)", 62 | .bytes => "bytes", 63 | .int => "int", 64 | .float => "float", 65 | .bool => "bool", 66 | .eof => "EOF", 67 | 68 | .expr => "(expr)", 69 | .tag_name => "(tag name)", 70 | }; 71 | } 72 | }; 73 | 74 | pub const Loc = struct { 75 | start: u32, 76 | end: u32, 77 | 78 | pub fn src(self: Loc, code: []const u8) []const u8 { 79 | return code[self.start..self.end]; 80 | } 81 | 82 | pub const Selection = struct { 83 | start: Position, 84 | end: Position, 85 | 86 | pub const Position = struct { 87 | line: u32, 88 | col: u32, 89 | }; 90 | }; 91 | 92 | pub fn getSelection(self: Loc, code: []const u8) Selection { 93 | //TODO: ziglyph 94 | var selection: Selection = .{ 95 | .start = .{ .line = 1, .col = 1 }, 96 | .end = undefined, 97 | }; 98 | 99 | for (code[0..self.start]) |c| { 100 | if (c == '\n') { 101 | selection.start.line += 1; 102 | selection.start.col = 1; 103 | } else selection.start.col += 1; 104 | } 105 | 106 | selection.end = selection.start; 107 | for (code[self.start..self.end]) |c| { 108 | if (c == '\n') { 109 | selection.end.line += 1; 110 | selection.end.col = 1; 111 | } else selection.end.col += 1; 112 | } 113 | return selection; 114 | } 115 | }; 116 | }; 117 | 118 | const State = enum { 119 | start, 120 | identifier, 121 | doc_comment_start, 122 | doc_comment, 123 | }; 124 | 125 | pub fn next(self: *Tokenizer, code: [:0]const u8) Token { 126 | var state: State = .start; 127 | var res: Token = .{ 128 | .tag = .invalid, 129 | .loc = .{ 130 | .start = self.idx, 131 | .end = undefined, 132 | }, 133 | }; 134 | 135 | while (true) : (self.idx += 1) { 136 | const c = code[self.idx]; 137 | switch (state) { 138 | .start => switch (c) { 139 | 0 => { 140 | res.tag = .eof; 141 | res.loc.start = @intCast(code.len - 1); 142 | res.loc.end = @intCast(code.len); 143 | break; 144 | }, 145 | ' ', '\n', '\r', '\t' => res.loc.start += 1, 146 | '|' => { 147 | self.idx += 1; 148 | res.tag = .pipe; 149 | res.loc.end = self.idx; 150 | break; 151 | }, 152 | ',' => { 153 | self.idx += 1; 154 | res.tag = .comma; 155 | res.loc.end = self.idx; 156 | break; 157 | }, 158 | '=' => { 159 | self.idx += 1; 160 | res.tag = .eq; 161 | res.loc.end = self.idx; 162 | break; 163 | }, 164 | ':' => { 165 | self.idx += 1; 166 | res.tag = .colon; 167 | res.loc.end = self.idx; 168 | break; 169 | }, 170 | '@' => { 171 | self.idx += 1; 172 | res.tag = .at; 173 | res.loc.end = self.idx; 174 | break; 175 | }, 176 | '[' => { 177 | self.idx += 1; 178 | res.tag = .lsb; 179 | res.loc.end = self.idx; 180 | break; 181 | }, 182 | ']' => { 183 | self.idx += 1; 184 | res.tag = .rsb; 185 | res.loc.end = self.idx; 186 | break; 187 | }, 188 | '{' => { 189 | self.idx += 1; 190 | res.tag = .lb; 191 | res.loc.end = self.idx; 192 | break; 193 | }, 194 | '}' => { 195 | self.idx += 1; 196 | res.tag = .rb; 197 | res.loc.end = self.idx; 198 | break; 199 | }, 200 | '?' => { 201 | self.idx += 1; 202 | res.tag = .qmark; 203 | res.loc.end = self.idx; 204 | break; 205 | }, 206 | 207 | 'a'...'z', 'A'...'Z', '_' => state = .identifier, 208 | '/' => state = .doc_comment_start, 209 | else => { 210 | res.tag = .invalid; 211 | res.loc.end = self.idx; 212 | break; 213 | }, 214 | }, 215 | .identifier => switch (c) { 216 | 'a'...'z', 'A'...'Z', '_', '0'...'9' => continue, 217 | else => { 218 | res.loc.end = self.idx; 219 | const src = res.loc.src(code); 220 | if (std.mem.eql(u8, src, "bytes")) { 221 | res.tag = .bytes; 222 | } else if (std.mem.eql(u8, src, "bool")) { 223 | res.tag = .bool; 224 | } else if (std.mem.eql(u8, src, "int")) { 225 | res.tag = .int; 226 | } else if (std.mem.eql(u8, src, "float")) { 227 | res.tag = .float; 228 | } else if (std.mem.eql(u8, src, "struct")) { 229 | res.tag = .struct_kw; 230 | } else if (std.mem.eql(u8, src, "map")) { 231 | res.tag = .map_kw; 232 | } else if (std.mem.eql(u8, src, "any")) { 233 | res.tag = .any_kw; 234 | } else if (std.mem.eql(u8, src, "unknown")) { 235 | res.tag = .unknown_kw; 236 | } else if (std.mem.eql(u8, src, "root")) { 237 | res.tag = .root_kw; 238 | } else if (std.mem.eql(u8, src, "enum")) { 239 | res.tag = .enum_kw; 240 | } else { 241 | res.tag = .identifier; 242 | } 243 | break; 244 | }, 245 | }, 246 | .doc_comment_start => switch (c) { 247 | '/' => { 248 | if (!std.mem.startsWith(u8, code[self.idx..], "//")) { 249 | res.tag = .invalid; 250 | res.loc.end = self.idx; 251 | break; 252 | } 253 | self.idx += 1; 254 | state = .doc_comment; 255 | }, 256 | else => { 257 | res.tag = .invalid; 258 | res.loc.end = self.idx; 259 | break; 260 | }, 261 | }, 262 | .doc_comment => switch (c) { 263 | 0, '\n' => { 264 | res.tag = .doc_comment_line; 265 | res.loc.end = self.idx; 266 | break; 267 | }, 268 | else => {}, 269 | }, 270 | } 271 | } 272 | 273 | return res; 274 | } 275 | 276 | test "basics" { 277 | const case = 278 | \\root = Frontmatter 279 | \\ 280 | \\@date, 281 | \\ 282 | \\struct Frontmatter { 283 | \\ title: bytes 284 | \\} 285 | ; 286 | 287 | const expected: []const Token.Tag = &.{ 288 | // zig fmt: off 289 | .root_kw, .eq, .identifier, 290 | 291 | .at, .identifier, .comma, 292 | 293 | .struct_kw, .identifier, .lb, 294 | .identifier, .colon, .bytes, 295 | .rb, 296 | // zig fmt: on 297 | }; 298 | 299 | var t: Tokenizer = .{}; 300 | 301 | for (expected, 0..) |e, idx| { 302 | errdefer std.debug.print("failed at index: {}\n", .{idx}); 303 | const tok = t.next(case); 304 | errdefer std.debug.print("bad token: {any}\n", .{tok}); 305 | try std.testing.expectEqual(e, tok.tag); 306 | } 307 | try std.testing.expectEqual(t.next(case).tag, .eof); 308 | } 309 | 310 | -------------------------------------------------------------------------------- /src/schema/check_type.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const builtin = @import("builtin"); 3 | const Ast = @import("Ast.zig"); 4 | const Diagnostic = @import("Diagnostic.zig"); 5 | const Schema = @import("Schema.zig"); 6 | const assert = std.debug.assert; 7 | const Allocator = std.mem.Allocator; 8 | 9 | pub fn checkType(T: type, src: [:0]const u8) !void { 10 | if (!builtin.is_test) @compileError("call checkType in a unit test!"); 11 | var arena_state = std.heap.ArenaAllocator.init(std.heap.page_allocator); 12 | defer arena_state.deinit(); 13 | 14 | const arena = arena_state.allocator(); 15 | var diag: Diagnostic = .{ .path = null }; 16 | 17 | const ast = Ast.init(arena, src, &diag) catch |err| { 18 | std.debug.print("Error while parsing Ziggy Schema:\n{}\n", .{diag}); 19 | return err; 20 | }; 21 | 22 | const schema = Schema.init(arena, ast.nodes.items, src, &diag) catch |err| { 23 | std.debug.print("Error while analyzing Ziggy Schema:\n{}\n", .{diag}); 24 | return err; 25 | }; 26 | 27 | try checkRule(T, arena, &schema, src, schema.root); 28 | } 29 | 30 | fn checkRule( 31 | T: type, 32 | arena: Allocator, 33 | schema: *const Schema, 34 | src: [:0]const u8, 35 | rule: Schema.Rule, 36 | ) error{ Validation, OutOfMemory }!void { 37 | const node = schema.nodes[rule.node]; 38 | const info = @typeInfo(T); 39 | 40 | switch (info) { 41 | .@"struct", .@"union", .@"enum" => { 42 | if (@hasDecl(T, "ziggy_options") and 43 | (@hasDecl(T.ziggy_options, "parse") or 44 | @hasDecl(T.ziggy_options, "stringify"))) 45 | { 46 | return; 47 | } 48 | }, 49 | else => {}, 50 | } 51 | 52 | switch (node.tag) { 53 | else => std.debug.panic("TODO: '{s}'\n", .{@tagName(node.tag)}), 54 | .any, .unknown => return, 55 | .bool => { 56 | if (T != bool) { 57 | std.debug.print("Expected 'bool' found '{s}'\n", .{@typeName(T)}); 58 | return error.Validation; 59 | } 60 | }, 61 | .bytes => { 62 | if (T != []const u8 and T != []u8) { 63 | std.debug.print("Expected 'bytes' found '{s}'\n", .{@typeName(T)}); 64 | return error.Validation; 65 | } 66 | }, 67 | 68 | .int => { 69 | if (info != .int) { 70 | std.debug.print("Expected 'int' found '{s}'\n", .{@typeName(T)}); 71 | return error.Validation; 72 | } 73 | }, 74 | .float => { 75 | if (info != .float) { 76 | std.debug.print("Expected 'float' found '{s}'\n", .{@typeName(T)}); 77 | return error.Validation; 78 | } 79 | }, 80 | 81 | .optional => { 82 | if (info != .optional) { 83 | std.debug.print("Expected 'optional' found '{s}'\n", .{@typeName(T)}); 84 | return error.Validation; 85 | } 86 | 87 | try checkRule(info.optional.child, arena, schema, src, .{ 88 | .node = node.first_child_id, 89 | }); 90 | }, 91 | 92 | .array => switch (info) { 93 | .array => |arr| try checkRule(arr.child, arena, schema, src, .{ 94 | .node = node.first_child_id, 95 | }), 96 | .pointer => |ptr| { 97 | if (ptr.size != .slice) { 98 | std.debug.print("Expected 'optional' found '{s}'\n", .{@typeName(T)}); 99 | return error.Validation; 100 | } 101 | 102 | try checkRule(ptr.child, arena, schema, src, .{ 103 | .node = node.first_child_id, 104 | }); 105 | }, 106 | else => { 107 | std.debug.print("Expected 'array' found '{s}'\n", .{@typeName(T)}); 108 | return error.Validation; 109 | }, 110 | }, 111 | .struct_union => { 112 | if (info != .@"union") { 113 | std.debug.print("Expected 'union' found '{s}'\n", .{@typeName(T)}); 114 | return error.Validation; 115 | } 116 | const u = info.@"union"; 117 | 118 | // TODO: this acceleration data structure should probably be part 119 | // of the schema itself 120 | var cases = std.StringArrayHashMap(void).init(arena); 121 | defer cases.deinit(); 122 | { 123 | assert(node.first_child_id != 0); 124 | var idx = node.first_child_id; 125 | while (idx != 0) : (idx = schema.nodes[idx].next_id) { 126 | const case = schema.nodes[idx].loc.src(src); 127 | try cases.putNoClobber(case, {}); 128 | } 129 | } 130 | 131 | inline for (u.fields) |f| { 132 | if (!cases.swapRemove(f.name)) { 133 | std.debug.print("Case '{s}' in union type '{s}' doesn't exist in schema\n", .{ 134 | f.name, 135 | @typeName(T), 136 | }); 137 | return error.Validation; 138 | } 139 | } 140 | 141 | if (cases.pop()) |remaining| { 142 | std.debug.print("Schema union case '{s}' missing in union type '{s}'\n", .{ 143 | remaining.key, 144 | @typeName(T), 145 | }); 146 | return error.Validation; 147 | } 148 | }, 149 | .map => @panic("TODO: map support in checkType"), 150 | .identifier => { 151 | switch (info) { 152 | else => { 153 | std.debug.print("Expected 'struct' found '{s}'", .{@typeName(T)}); 154 | return error.Validation; 155 | }, 156 | .@"struct" => |s| { 157 | const sr = schema.structs.get(node.loc.src(src)).?; 158 | const seen_fields = try arena.alloc(bool, sr.fields.entries.len); 159 | @memset(seen_fields, false); 160 | 161 | outer: inline for (s.fields) |f| { 162 | // TODO: check for skip_fields 163 | 164 | if (@hasDecl(T, "ziggy_options") and @hasDecl(T.ziggy_options, "skip_fields")) { 165 | const SF = std.meta.FieldEnum(T); 166 | const field_enum = @field(SF, f.name); 167 | inline for (T.ziggy_options.skip_fields) |sf| { 168 | if (field_enum == sf) continue :outer; 169 | } 170 | } 171 | 172 | const idx = sr.fields.getIndex(f.name) orelse { 173 | std.debug.print("'{s}.{s}' not present in schema\n", .{ 174 | @typeName(T), 175 | f.name, 176 | }); 177 | return error.Validation; 178 | }; 179 | 180 | seen_fields[idx] = true; 181 | 182 | const field = sr.fields.entries.items(.value)[idx]; 183 | try checkRule(f.type, arena, schema, src, field.rule); 184 | } 185 | 186 | for (seen_fields, 0..) |seen, idx| { 187 | if (!seen) { 188 | std.debug.print("Struct '{s}' is missing field '{s}' \n", .{ 189 | @typeName(T), 190 | sr.fields.entries.items(.key)[idx], 191 | }); 192 | return error.Validation; 193 | } 194 | } 195 | }, 196 | } 197 | }, 198 | } 199 | } 200 | 201 | test "bool" { 202 | const T = bool; 203 | const case = 204 | \\root = bool 205 | \\ 206 | ; 207 | 208 | try checkType(T, case); 209 | } 210 | 211 | test "ints" { 212 | const Ts = &.{ usize, i16, u22, u0, u1, i0, i1, u64, i64 }; 213 | const case = 214 | \\root = int 215 | \\ 216 | ; 217 | 218 | inline for (Ts) |T| try checkType(T, case); 219 | } 220 | 221 | test "simple struct" { 222 | const Foo = struct { 223 | bar: usize, 224 | baz: bool, 225 | }; 226 | 227 | const case = 228 | \\root = Foo 229 | \\ 230 | \\struct Foo { 231 | \\ bar: int, 232 | \\ baz: bool, 233 | \\} 234 | ; 235 | 236 | try checkType(Foo, case); 237 | } 238 | 239 | test "simple struct - missing field in schema" { 240 | const Foo = struct { 241 | bar: usize, 242 | baz: bool, 243 | }; 244 | 245 | const case = 246 | \\root = Foo 247 | \\ 248 | \\struct Foo { 249 | \\ bar: int, 250 | \\ baz: bool, 251 | \\ box: bool, 252 | \\} 253 | ; 254 | 255 | try std.testing.expectError(error.Validation, checkType(Foo, case)); 256 | } 257 | 258 | test "simple struct - missing field in type" { 259 | const Foo = struct { 260 | bar: usize, 261 | baz: bool, 262 | box: bool, 263 | }; 264 | 265 | const case = 266 | \\root = Foo 267 | \\ 268 | \\struct Foo { 269 | \\ bar: int, 270 | \\ baz: bool, 271 | \\} 272 | ; 273 | 274 | try std.testing.expectError(error.Validation, checkType(Foo, case)); 275 | } 276 | 277 | test "simple struct - skip fields" { 278 | const Foo = struct { 279 | bar: usize, 280 | baz: bool, 281 | box: bool, 282 | 283 | const Foo = @This(); 284 | pub const ziggy_options = struct { 285 | pub const skip_fields: []const std.meta.FieldEnum(Foo) = &.{ 286 | .box, 287 | }; 288 | }; 289 | }; 290 | 291 | const case = 292 | \\root = Foo 293 | \\ 294 | \\struct Foo { 295 | \\ bar: int, 296 | \\ baz: bool, 297 | \\} 298 | ; 299 | 300 | try checkType(Foo, case); 301 | } 302 | 303 | test "optional at root" { 304 | const T = ?bool; 305 | const case = 306 | \\root = ?bool 307 | ; 308 | 309 | try checkType(T, case); 310 | } 311 | 312 | test "optional in struct" { 313 | const Foo = struct { 314 | bar: ?usize, 315 | }; 316 | 317 | const case = 318 | \\root = Foo 319 | \\ 320 | \\struct Foo { 321 | \\ bar: ?int, 322 | \\} 323 | ; 324 | 325 | try checkType(Foo, case); 326 | } 327 | 328 | test "optional in struct - error" { 329 | const Foo = struct { 330 | bar: ?usize, 331 | }; 332 | 333 | const case = 334 | \\root = Foo 335 | \\ 336 | \\struct Foo { 337 | \\ bar: ?bool, 338 | \\} 339 | ; 340 | 341 | try std.testing.expectError(error.Validation, checkType(Foo, case)); 342 | } 343 | 344 | test "array" { 345 | const Ts = .{ [10]usize, []i64 }; 346 | const case = 347 | \\root = [int] 348 | ; 349 | 350 | inline for (Ts) |T| try checkType(T, case); 351 | } 352 | 353 | test "array with error" { 354 | const T = []const bool; 355 | const case = 356 | \\root = [?bool] 357 | ; 358 | 359 | try std.testing.expectError(error.Validation, checkType(T, case)); 360 | } 361 | -------------------------------------------------------------------------------- /src/ziggy/Diagnostic.zig: -------------------------------------------------------------------------------- 1 | const Diagnostic = @This(); 2 | 3 | const std = @import("std"); 4 | const Tokenizer = @import("Tokenizer.zig"); 5 | const Token = Tokenizer.Token; 6 | 7 | /// A path to the file, used to display diagnostics. 8 | /// If not present, error positions will be printed as "line: XX col: XX". 9 | /// This field should be set as needed by users. 10 | path: ?[]const u8, 11 | 12 | errors: std.ArrayListUnmanaged(Error) = .{}, 13 | 14 | pub const Error = union(enum) { 15 | overflow, 16 | oom, 17 | // found an unexpected $name (token, ...) 18 | unexpected: struct { 19 | name: []const u8, 20 | sel: Token.Loc.Selection, 21 | expected: []const []const u8, 22 | }, 23 | // Invalid syntax, eg 123ab123, use `unexpected` to also report an expected 24 | // token / value / etc. 25 | syntax: struct { 26 | name: []const u8, 27 | sel: Token.Loc.Selection, 28 | }, 29 | duplicate_field: struct { 30 | name: []const u8, 31 | sel: Token.Loc.Selection, 32 | original: Token.Loc.Selection, 33 | }, 34 | // A struct is missing a field and it has no missing_field_name nodes. 35 | missing_field: struct { 36 | name: []const u8, 37 | sel: Token.Loc.Selection, 38 | }, 39 | unknown_field: struct { 40 | name: []const u8, 41 | sel: Token.Loc.Selection, 42 | }, 43 | // If the value is a struct union, the struct value must have a name. 44 | missing_struct_name: struct { 45 | // the area where the name shuld be put 46 | sel: Token.Loc.Selection, 47 | // the expected type expression from schema 48 | expected: []const u8, 49 | }, 50 | unknown_struct_name: struct { 51 | name: []const u8, 52 | sel: Token.Loc.Selection, 53 | // the expected type expression from schema 54 | expected: []const u8, 55 | }, 56 | missing_value: struct { 57 | // the area where the name shuld be put 58 | sel: Token.Loc.Selection, 59 | // the expected type expression from schema 60 | expected: []const u8, 61 | }, 62 | // The schema corresponding to this file could not be loaded 63 | // (missing file, contains sytnax errors, etc). 64 | schema: struct { 65 | sel: Token.Loc.Selection, 66 | // the error encountered while processing the schema file 67 | err: []const u8, 68 | }, 69 | 70 | type_mismatch: struct { 71 | name: []const u8, 72 | sel: Token.Loc.Selection, 73 | expected: []const u8, 74 | }, 75 | 76 | pub const ZigError = error{ 77 | Overflow, 78 | OutOfMemory, 79 | Syntax, 80 | }; 81 | pub fn zigError(e: Error) ZigError { 82 | return switch (e) { 83 | .overflow => error.Overflow, 84 | .oom => error.OutOfMemory, 85 | else => error.Syntax, 86 | }; 87 | } 88 | 89 | pub fn getErrorSelection(e: Error) Token.Loc.Selection { 90 | return switch (e) { 91 | .overflow, .oom => .{ 92 | .start = .{ .line = 0, .col = 0 }, 93 | .end = .{ .line = 0, .col = 0 }, 94 | }, 95 | inline else => |x| x.sel, 96 | }; 97 | } 98 | 99 | pub fn fmt(e: Error, src: []const u8, path: ?[]const u8) ErrorFmt { 100 | return .{ 101 | .err = e, 102 | .src = src, 103 | .path = path, 104 | }; 105 | } 106 | pub const ErrorFmt = struct { 107 | err: Error, 108 | src: []const u8, 109 | path: ?[]const u8, 110 | 111 | pub fn format( 112 | err_fmt: ErrorFmt, 113 | comptime fmt_string: []const u8, 114 | options: std.fmt.FormatOptions, 115 | out_stream: anytype, 116 | ) !void { 117 | _ = options; 118 | 119 | const lsp = std.mem.eql(u8, fmt_string, "lsp"); 120 | 121 | if (!lsp) { 122 | const sel = err_fmt.err.getErrorSelection(); 123 | const start = sel.start; 124 | if (err_fmt.path) |p| { 125 | try out_stream.print("{s}:{}:{}:\n", .{ 126 | p, 127 | start.line, 128 | start.col, 129 | }); 130 | } else { 131 | try out_stream.print("line: {} col: {}:\n", .{ 132 | start.line, 133 | start.col, 134 | }); 135 | } 136 | 137 | var it = std.mem.splitScalar(u8, err_fmt.src, '\n'); 138 | for (1..sel.start.line) |_| _ = it.next().?; 139 | 140 | const line = it.next().?; 141 | const line_trim_left = std.mem.trimLeft(u8, line, &std.ascii.whitespace); 142 | // const start_trim_left = line_off.start + line_off.line.len - line_trim_left.len; 143 | 144 | const caret_len = if (sel.start.line == sel.end.line) sel.end.col - sel.start.col else line_trim_left.len; 145 | 146 | const caret_spaces_len = sel.start.col - 1; 147 | 148 | const line_trim = std.mem.trimRight(u8, line_trim_left, &std.ascii.whitespace); 149 | 150 | var hl_buf: [1024]u8 = undefined; 151 | 152 | const highlight = if (caret_len + caret_spaces_len < 1024) blk: { 153 | const h = hl_buf[0 .. caret_len + caret_spaces_len]; 154 | @memset(h[0..caret_spaces_len], ' '); 155 | @memset(h[caret_spaces_len..][0..caret_len], '^'); 156 | break :blk h; 157 | } else ""; 158 | 159 | try out_stream.print( 160 | \\ {s} 161 | \\ {s} 162 | \\ 163 | , .{ line_trim, highlight }); 164 | } 165 | 166 | switch (err_fmt.err) { 167 | .oom => try out_stream.print("out of memory\n", .{}), 168 | .overflow => { 169 | try out_stream.print("overflow\n", .{}); 170 | // if (!lsp) { 171 | // try out_stream.print(": '{s}'", .{ 172 | // o.token.loc.src(err_fmt.code), 173 | // }); 174 | // } 175 | }, 176 | .unexpected => |u| { 177 | try out_stream.print("unexpected '{s}'", .{u.name}); 178 | 179 | try out_stream.print(", expected: ", .{}); 180 | 181 | for (u.expected, 0..) |elem, idx| { 182 | try out_stream.print("'{s}'", .{elem}); 183 | if (idx != u.expected.len - 1) { 184 | try out_stream.print(" or ", .{}); 185 | } 186 | } 187 | 188 | try out_stream.print("\n", .{}); 189 | }, 190 | .syntax => |syn| { 191 | if (lsp) { 192 | try out_stream.print("syntax error\n", .{}); 193 | } else { 194 | try out_stream.print("syntax error: '{s}' \n", .{syn.name}); 195 | } 196 | }, 197 | .duplicate_field => |dup| { 198 | if (lsp) { 199 | try out_stream.print("duplicate field", .{}); 200 | } else { 201 | const first_sel = dup.original; 202 | try out_stream.print( 203 | "duplicate field '{s}', first definition here:", 204 | .{dup.name}, 205 | ); 206 | if (err_fmt.path) |p| { 207 | try out_stream.print("\n{s}:{}:{}\n", .{ 208 | p, 209 | first_sel.start.line, 210 | first_sel.start.col, 211 | }); 212 | } else { 213 | try out_stream.print(" line: {} col: {}\n", .{ 214 | first_sel.start.line, 215 | first_sel.start.col, 216 | }); 217 | } 218 | } 219 | }, 220 | .missing_field => |mf| { 221 | try out_stream.print("missing field: '{s}'", .{mf.name}); 222 | }, 223 | .unknown_field => |uf| { 224 | try out_stream.print("unknown field '{s}'", .{uf.name}); 225 | }, 226 | .missing_struct_name => |msn| { 227 | try out_stream.print( 228 | "struct union requires name, expected: '{s}'\n", 229 | .{msn.expected}, 230 | ); 231 | }, 232 | .unknown_struct_name => |usn| { 233 | try out_stream.print( 234 | "unknown struct name, expected: '{s}'\n", 235 | .{usn.expected}, 236 | ); 237 | }, 238 | 239 | .missing_value => |mv| { 240 | try out_stream.print( 241 | "missing value, expected: {s}\n", 242 | .{mv.expected}, 243 | ); 244 | }, 245 | .schema => |s| { 246 | try out_stream.print("schema file error: {s}", .{ 247 | s.err, 248 | }); 249 | }, 250 | 251 | .type_mismatch => |mism| { 252 | try out_stream.print( 253 | "wrong value type, expected {s}", 254 | .{mism.expected}, 255 | ); 256 | }, 257 | } 258 | } 259 | }; 260 | }; 261 | 262 | pub fn deinit(self: *Diagnostic, gpa: std.mem.Allocator) void { 263 | self.errors.deinit(gpa); 264 | } 265 | 266 | pub fn debug(self: Diagnostic) void { 267 | std.debug.print("{}", .{self}); 268 | } 269 | 270 | pub fn fmt(d: Diagnostic, src: []const u8) Formatter { 271 | return .{ .diag = d, .src = src }; 272 | } 273 | 274 | pub const Formatter = struct { 275 | diag: Diagnostic, 276 | src: []const u8, 277 | 278 | pub fn format( 279 | self: Formatter, 280 | comptime fmt_string: []const u8, 281 | options: std.fmt.FormatOptions, 282 | out_stream: anytype, 283 | ) !void { 284 | for (self.diag.errors.items) |e| { 285 | try e.fmt(self.src, self.diag.path).format(fmt_string, options, out_stream); 286 | } 287 | } 288 | }; 289 | -------------------------------------------------------------------------------- /src/ziggy/Query.zig: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /tests/schema/errors/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristoff-it/ziggy/fe3bf9389e7ff213cf3548caaf9c6f3d4bb38647/tests/schema/errors/.keep -------------------------------------------------------------------------------- /tests/type_driven.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const ziggy = @import("ziggy"); 3 | const test_type = @import("test_type"); 4 | const CaseType = test_type.Case; 5 | 6 | pub fn main() !void { 7 | var gpa_state: std.heap.DebugAllocator(.{}) = .init; 8 | var arena_state = std.heap.ArenaAllocator.init(gpa_state.allocator()); 9 | defer arena_state.deinit(); 10 | const arena = arena_state.allocator(); 11 | 12 | const args = try std.process.argsAlloc(arena); 13 | const case = try std.fs.cwd().readFileAllocOptions( 14 | arena, 15 | args[1], 16 | ziggy.max_size, 17 | null, 18 | 1, 19 | 0, 20 | ); 21 | 22 | var diag: ziggy.Diagnostic = .{ .path = null }; 23 | _ = ziggy.parseLeaky(CaseType, arena, case, .{ 24 | .diagnostic = &diag, 25 | }) catch |err| { 26 | if (err != error.Syntax) @panic("wrong error!"); 27 | std.debug.print("{s}", .{diag.fmt(case)}); 28 | std.process.exit(1); 29 | }; 30 | 31 | @panic("unreachable"); 32 | } 33 | -------------------------------------------------------------------------------- /tests/ziggy/ast/errors/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristoff-it/ziggy/fe3bf9389e7ff213cf3548caaf9c6f3d4bb38647/tests/ziggy/ast/errors/.keep -------------------------------------------------------------------------------- /tests/ziggy/ast/errors/missing_bottom_curly.ziggy: -------------------------------------------------------------------------------- 1 | { 2 | .foo = "bar", 3 | .bar = false, 4 | -------------------------------------------------------------------------------- /tests/ziggy/ast/errors/missing_bottom_curly_snap.txt: -------------------------------------------------------------------------------- 1 | missing_bottom_curly.ziggy:4:1: 2 | 3 | 4 | unexpected '', expected: '.' or '}' 5 | 6 | -------------------------------------------------------------------------------- /tests/ziggy/ast/errors/missing_comma.ziggy: -------------------------------------------------------------------------------- 1 | .foo = "bar" 2 | .bar = false, 3 | -------------------------------------------------------------------------------- /tests/ziggy/ast/errors/missing_comma_snap.txt: -------------------------------------------------------------------------------- 1 | missing_comma.ziggy:2:1: 2 | .bar = false, 3 | ^ 4 | unexpected '.', expected: '}' or 'EOF' 5 | 6 | -------------------------------------------------------------------------------- /tests/ziggy/ast/errors/struct.ziggy: -------------------------------------------------------------------------------- 1 | .foo = "bar", 2 | .bar = .false, 3 | -------------------------------------------------------------------------------- /tests/ziggy/ast/errors/struct_snap.txt: -------------------------------------------------------------------------------- 1 | struct.ziggy:2:8: 2 | .bar = .false, 3 | ^ 4 | unexpected '.', expected: '(value)' 5 | 6 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristoff-it/ziggy/fe3bf9389e7ff213cf3548caaf9c6f3d4bb38647/tests/ziggy/type-driven/errors/.keep -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/duplicate_field.zig: -------------------------------------------------------------------------------- 1 | pub const Case = struct { 2 | foo: []const u8, 3 | bar: bool, 4 | }; 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/duplicate_field.ziggy: -------------------------------------------------------------------------------- 1 | .foo = "bar", 2 | .bar = false, 3 | .foo = "bar", 4 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/duplicate_field_snap.txt: -------------------------------------------------------------------------------- 1 | line: 3 col: 2: 2 | .foo = "bar", 3 | ^^^ 4 | duplicate field 'foo', first definition here: line: 1 col: 2 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_bottom_curly.zig: -------------------------------------------------------------------------------- 1 | pub const Case = struct { 2 | foo: []const u8, 3 | bar: bool, 4 | }; 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_bottom_curly.ziggy: -------------------------------------------------------------------------------- 1 | { 2 | .foo = "bar", 3 | .bar = false, 4 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_bottom_curly_snap.txt: -------------------------------------------------------------------------------- 1 | line: 4 col: 1: 2 | 3 | 4 | unexpected 'EOF', expected: '.' or '}' 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_comma.zig: -------------------------------------------------------------------------------- 1 | pub const Case = struct { 2 | foo: []const u8, 3 | bar: bool, 4 | }; 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_comma.ziggy: -------------------------------------------------------------------------------- 1 | .foo = "bar" 2 | .bar = false, 3 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_comma_snap.txt: -------------------------------------------------------------------------------- 1 | line: 2 col: 1: 2 | .bar = false, 3 | ^ 4 | unexpected '.', expected: ',' or '}' or 'EOF' 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_field.zig: -------------------------------------------------------------------------------- 1 | pub const Case = struct { 2 | foo: []const u8, 3 | bar: bool, 4 | }; 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_field.ziggy: -------------------------------------------------------------------------------- 1 | .foo = "bar", 2 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/missing_field_snap.txt: -------------------------------------------------------------------------------- 1 | line: 2 col: 1: 2 | 3 | 4 | missing field: 'bar' -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/struct.zig: -------------------------------------------------------------------------------- 1 | pub const Case = struct { 2 | foo: []const u8, 3 | bar: bool, 4 | }; 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/struct.ziggy: -------------------------------------------------------------------------------- 1 | .foo = "bar", 2 | .bar = .false, 3 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/struct_snap.txt: -------------------------------------------------------------------------------- 1 | line: 2 col: 8: 2 | .bar = .false, 3 | ^ 4 | unexpected '.', expected: 'true' or 'false' 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/unknown_field.zig: -------------------------------------------------------------------------------- 1 | pub const Case = struct { 2 | foo: []const u8, 3 | bar: bool, 4 | }; 5 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/unknown_field.ziggy: -------------------------------------------------------------------------------- 1 | .foo = "bar", 2 | .bar = false, 3 | .baz = "oops", 4 | -------------------------------------------------------------------------------- /tests/ziggy/type-driven/errors/unknown_field_snap.txt: -------------------------------------------------------------------------------- 1 | line: 3 col: 2: 2 | .baz = "oops", 3 | ^^^ 4 | unknown field 'baz' -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "tree-sitter-ziggy-schema" 3 | description = "ziggy-schema grammar for the tree-sitter parsing library" 4 | version = "0.0.1" 5 | keywords = ["incremental", "parsing", "ziggy-schema"] 6 | categories = ["parsing", "text-editors"] 7 | repository = "https://github.com/kristoff-it/ziggy/tree-sitter-ziggy-schema" 8 | edition = "2018" 9 | license = "MIT" 10 | 11 | build = "bindings/rust/build.rs" 12 | include = [ 13 | "bindings/rust/*", 14 | "grammar.js", 15 | "queries/*", 16 | "src/*", 17 | ] 18 | 19 | [lib] 20 | path = "bindings/rust/lib.rs" 21 | 22 | [dependencies] 23 | tree-sitter = "~0.20.10" 24 | 25 | [build-dependencies] 26 | cc = "1.0" 27 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Loris Cro 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/binding.gyp: -------------------------------------------------------------------------------- 1 | { 2 | "targets": [ 3 | { 4 | "target_name": "tree_sitter_YOUR_LANGUAGE_NAME_binding", 5 | "include_dirs": [ 6 | " 3 | #include "nan.h" 4 | 5 | using namespace v8; 6 | 7 | extern "C" TSLanguage * tree_sitter_YOUR_LANGUAGE_NAME(); 8 | 9 | namespace { 10 | 11 | NAN_METHOD(New) {} 12 | 13 | void Init(Local exports, Local module) { 14 | Local tpl = Nan::New(New); 15 | tpl->SetClassName(Nan::New("Language").ToLocalChecked()); 16 | tpl->InstanceTemplate()->SetInternalFieldCount(1); 17 | 18 | Local constructor = Nan::GetFunction(tpl).ToLocalChecked(); 19 | Local instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked(); 20 | Nan::SetInternalFieldPointer(instance, 0, tree_sitter_YOUR_LANGUAGE_NAME()); 21 | 22 | Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("YOUR_LANGUAGE_NAME").ToLocalChecked()); 23 | Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance); 24 | } 25 | 26 | NODE_MODULE(tree_sitter_YOUR_LANGUAGE_NAME_binding, Init) 27 | 28 | } // namespace 29 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/bindings/node/index.js: -------------------------------------------------------------------------------- 1 | try { 2 | module.exports = require("../../build/Release/tree_sitter_YOUR_LANGUAGE_NAME_binding"); 3 | } catch (error1) { 4 | if (error1.code !== 'MODULE_NOT_FOUND') { 5 | throw error1; 6 | } 7 | try { 8 | module.exports = require("../../build/Debug/tree_sitter_YOUR_LANGUAGE_NAME_binding"); 9 | } catch (error2) { 10 | if (error2.code !== 'MODULE_NOT_FOUND') { 11 | throw error2; 12 | } 13 | throw error1 14 | } 15 | } 16 | 17 | try { 18 | module.exports.nodeTypeInfo = require("../../src/node-types.json"); 19 | } catch (_) {} 20 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/bindings/rust/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let src_dir = std::path::Path::new("src"); 3 | 4 | let mut c_config = cc::Build::new(); 5 | c_config.include(&src_dir); 6 | c_config 7 | .flag_if_supported("-Wno-unused-parameter") 8 | .flag_if_supported("-Wno-unused-but-set-variable") 9 | .flag_if_supported("-Wno-trigraphs"); 10 | let parser_path = src_dir.join("parser.c"); 11 | c_config.file(&parser_path); 12 | 13 | // If your language uses an external scanner written in C, 14 | // then include this block of code: 15 | 16 | /* 17 | let scanner_path = src_dir.join("scanner.c"); 18 | c_config.file(&scanner_path); 19 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap()); 20 | */ 21 | 22 | c_config.compile("parser"); 23 | println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap()); 24 | 25 | // If your language uses an external scanner written in C++, 26 | // then include this block of code: 27 | 28 | /* 29 | let mut cpp_config = cc::Build::new(); 30 | cpp_config.cpp(true); 31 | cpp_config.include(&src_dir); 32 | cpp_config 33 | .flag_if_supported("-Wno-unused-parameter") 34 | .flag_if_supported("-Wno-unused-but-set-variable"); 35 | let scanner_path = src_dir.join("scanner.cc"); 36 | cpp_config.file(&scanner_path); 37 | cpp_config.compile("scanner"); 38 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap()); 39 | */ 40 | } 41 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/bindings/rust/lib.rs: -------------------------------------------------------------------------------- 1 | //! This crate provides YOUR_LANGUAGE_NAME language support for the [tree-sitter][] parsing library. 2 | //! 3 | //! Typically, you will use the [language][language func] function to add this language to a 4 | //! tree-sitter [Parser][], and then use the parser to parse some code: 5 | //! 6 | //! ``` 7 | //! let code = ""; 8 | //! let mut parser = tree_sitter::Parser::new(); 9 | //! parser.set_language(tree_sitter_YOUR_LANGUAGE_NAME::language()).expect("Error loading YOUR_LANGUAGE_NAME grammar"); 10 | //! let tree = parser.parse(code, None).unwrap(); 11 | //! ``` 12 | //! 13 | //! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html 14 | //! [language func]: fn.language.html 15 | //! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html 16 | //! [tree-sitter]: https://tree-sitter.github.io/ 17 | 18 | use tree_sitter::Language; 19 | 20 | extern "C" { 21 | fn tree_sitter_YOUR_LANGUAGE_NAME() -> Language; 22 | } 23 | 24 | /// Get the tree-sitter [Language][] for this grammar. 25 | /// 26 | /// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html 27 | pub fn language() -> Language { 28 | unsafe { tree_sitter_YOUR_LANGUAGE_NAME() } 29 | } 30 | 31 | /// The content of the [`node-types.json`][] file for this grammar. 32 | /// 33 | /// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types 34 | pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json"); 35 | 36 | // Uncomment these to include any queries that this grammar contains 37 | 38 | // pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm"); 39 | // pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm"); 40 | // pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm"); 41 | // pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm"); 42 | 43 | #[cfg(test)] 44 | mod tests { 45 | #[test] 46 | fn test_can_load_grammar() { 47 | let mut parser = tree_sitter::Parser::new(); 48 | parser 49 | .set_language(super::language()) 50 | .expect("Error loading YOUR_LANGUAGE_NAME language"); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/grammar.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable 80001 */ 2 | /* eslint-disable arrow-parens */ 3 | /* eslint-disable camelcase */ 4 | /* eslint-disable-next-line spaced-comment */ 5 | /// 6 | //@ts-check 7 | 8 | 9 | module.exports = grammar({ 10 | name: 'ziggy_schema', 11 | 12 | extras: $ => [/\s/], 13 | word: $ => $.identifier, 14 | 15 | rules: { 16 | schema: $ => seq( 17 | seq("root", '=', field("root", $.expr)), 18 | field("tags", commaSep($.tag)), 19 | field("structs", repeat($.struct)) 20 | ), 21 | 22 | 23 | tag_name: $ => seq('@', alias($.identifier, "_tag_name")), 24 | enum_definition: $ => seq("enum", "{", commaSep1($.identifier), "}"), 25 | tag: $ => seq( 26 | field("docs", optional($.doc_comment)), 27 | field("name", $.tag_name), 28 | "=", 29 | field("expr", choice("bytes", $.enum_definition)), 30 | ), 31 | 32 | expr: $ => choice( 33 | $.struct_union, 34 | $.identifier, 35 | $.tag_name, 36 | $.map, 37 | $.array, 38 | $.optional, 39 | "bytes", 40 | "int", 41 | "float", 42 | "bool", 43 | "any", 44 | "unknown", 45 | ), 46 | 47 | struct_union: $ => seq($.identifier, repeat1(seq('|', $.identifier))), 48 | 49 | 50 | 51 | identifier: (_) => { 52 | const identifier_start = /[a-zA-Z_]/; 53 | const identifier_part = choice(identifier_start, /[0-9]/); 54 | return token(seq(identifier_start, repeat(identifier_part))); 55 | }, 56 | 57 | map: $ => seq("map", '[', $.expr, ']'), 58 | array: $ => seq('[', $.expr, ']'), 59 | optional: $ => seq('?', $.expr), 60 | 61 | struct: $ => seq( 62 | field("docs", optional($.doc_comment)), 63 | 'struct', field("name", $.identifier), '{', 64 | commaSep($.struct_field), 65 | '}', 66 | ), 67 | 68 | struct_field: $ => seq( 69 | field("docs", optional($.doc_comment)), 70 | field("key", $.identifier), ':', field("value", $.expr) 71 | ), 72 | 73 | doc_comment: _ => repeat1(token(seq('///', /.*/))), 74 | } 75 | }); 76 | 77 | /** 78 | * Creates a rule to optionally match one or more of the rules separated by a comma 79 | * 80 | * @param {RuleOrLiteral} rule 81 | * 82 | * @return {SeqRule} 83 | * 84 | */ 85 | function commaSep1(rule) { 86 | return seq(rule, repeat(seq(",", rule)), optional(",")); 87 | } 88 | 89 | /** 90 | * Creates a rule to optionally match one or more of the rules separated by a comma 91 | * 92 | * @param {RuleOrLiteral} rule 93 | * 94 | * @return {Rule} 95 | * 96 | */ 97 | function commaSep(rule) { 98 | return optional(commaSep1(rule)); 99 | } 100 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tree-sitter-YOUR-LANGUAGE-NAME", 3 | "version": "0.0.1", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "tree-sitter-YOUR-LANGUAGE-NAME", 9 | "version": "0.0.1", 10 | "dependencies": { 11 | "nan": "^2.12.1" 12 | }, 13 | "devDependencies": { 14 | "tree-sitter-cli": "^0.20.8" 15 | } 16 | }, 17 | "node_modules/nan": { 18 | "version": "2.18.0", 19 | "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", 20 | "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==" 21 | }, 22 | "node_modules/tree-sitter-cli": { 23 | "version": "0.20.8", 24 | "resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.20.8.tgz", 25 | "integrity": "sha512-XjTcS3wdTy/2cc/ptMLc/WRyOLECRYcMTrSWyhZnj1oGSOWbHLTklgsgRICU3cPfb0vy+oZCC33M43u6R1HSCA==", 26 | "dev": true, 27 | "hasInstallScript": true, 28 | "bin": { 29 | "tree-sitter": "cli.js" 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tree-sitter-ziggy-schema", 3 | "version": "0.0.1", 4 | "description": "ziggy-schema grammar for tree-sitter", 5 | "main": "bindings/node", 6 | "keywords": [ 7 | "parsing", 8 | "incremental" 9 | ], 10 | "dependencies": { 11 | "nan": "^2.12.1" 12 | }, 13 | "devDependencies": { 14 | "tree-sitter-cli": "^0.20.8" 15 | }, 16 | "scripts": { 17 | "test": "tree-sitter test" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/queries/contexts.scm: -------------------------------------------------------------------------------- 1 | [ 2 | (struct) 3 | (struct_field) 4 | ] @context 5 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/queries/highlights.scm: -------------------------------------------------------------------------------- 1 | 2 | (struct_field 3 | key: (_) @keyword) 4 | 5 | (tag_name) @function 6 | 7 | [ 8 | "unknown" 9 | "any" 10 | "struct" 11 | "root" 12 | "enum" 13 | "map" 14 | ] @keyword 15 | 16 | 17 | (identifier) @type 18 | 19 | "?" @type 20 | 21 | [ 22 | "bool" 23 | "bytes" 24 | "int" 25 | "float" 26 | ] @constant.builtin 27 | 28 | 29 | (doc_comment) @comment.line.documentation 30 | 31 | (ERROR) @error 32 | 33 | "," @punctuation.delimiter 34 | 35 | "|" @punctuation 36 | 37 | 38 | [ 39 | "[" 40 | "]" 41 | "{" 42 | "}" 43 | ] @punctuation.bracket 44 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/queries/indents.scm: -------------------------------------------------------------------------------- 1 | [ 2 | (struct) 3 | ] @indent 4 | 5 | [ 6 | "}" 7 | ] @outdent 8 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/queries/rainbows.scm: -------------------------------------------------------------------------------- 1 | [ 2 | (struct) 3 | ] @rainbow.scope 4 | 5 | [ 6 | "[" "]" 7 | "{" "}" 8 | ] @rainbow.bracket 9 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/src/grammar.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ziggy_schema", 3 | "word": "identifier", 4 | "rules": { 5 | "schema": { 6 | "type": "SEQ", 7 | "members": [ 8 | { 9 | "type": "SEQ", 10 | "members": [ 11 | { 12 | "type": "STRING", 13 | "value": "root" 14 | }, 15 | { 16 | "type": "STRING", 17 | "value": "=" 18 | }, 19 | { 20 | "type": "FIELD", 21 | "name": "root", 22 | "content": { 23 | "type": "SYMBOL", 24 | "name": "expr" 25 | } 26 | } 27 | ] 28 | }, 29 | { 30 | "type": "FIELD", 31 | "name": "tags", 32 | "content": { 33 | "type": "CHOICE", 34 | "members": [ 35 | { 36 | "type": "SEQ", 37 | "members": [ 38 | { 39 | "type": "SYMBOL", 40 | "name": "tag" 41 | }, 42 | { 43 | "type": "REPEAT", 44 | "content": { 45 | "type": "SEQ", 46 | "members": [ 47 | { 48 | "type": "STRING", 49 | "value": "," 50 | }, 51 | { 52 | "type": "SYMBOL", 53 | "name": "tag" 54 | } 55 | ] 56 | } 57 | }, 58 | { 59 | "type": "CHOICE", 60 | "members": [ 61 | { 62 | "type": "STRING", 63 | "value": "," 64 | }, 65 | { 66 | "type": "BLANK" 67 | } 68 | ] 69 | } 70 | ] 71 | }, 72 | { 73 | "type": "BLANK" 74 | } 75 | ] 76 | } 77 | }, 78 | { 79 | "type": "FIELD", 80 | "name": "structs", 81 | "content": { 82 | "type": "REPEAT", 83 | "content": { 84 | "type": "SYMBOL", 85 | "name": "struct" 86 | } 87 | } 88 | } 89 | ] 90 | }, 91 | "tag_name": { 92 | "type": "SEQ", 93 | "members": [ 94 | { 95 | "type": "STRING", 96 | "value": "@" 97 | }, 98 | { 99 | "type": "ALIAS", 100 | "content": { 101 | "type": "SYMBOL", 102 | "name": "identifier" 103 | }, 104 | "named": false, 105 | "value": "_tag_name" 106 | } 107 | ] 108 | }, 109 | "enum_definition": { 110 | "type": "SEQ", 111 | "members": [ 112 | { 113 | "type": "STRING", 114 | "value": "enum" 115 | }, 116 | { 117 | "type": "STRING", 118 | "value": "{" 119 | }, 120 | { 121 | "type": "SEQ", 122 | "members": [ 123 | { 124 | "type": "SYMBOL", 125 | "name": "identifier" 126 | }, 127 | { 128 | "type": "REPEAT", 129 | "content": { 130 | "type": "SEQ", 131 | "members": [ 132 | { 133 | "type": "STRING", 134 | "value": "," 135 | }, 136 | { 137 | "type": "SYMBOL", 138 | "name": "identifier" 139 | } 140 | ] 141 | } 142 | }, 143 | { 144 | "type": "CHOICE", 145 | "members": [ 146 | { 147 | "type": "STRING", 148 | "value": "," 149 | }, 150 | { 151 | "type": "BLANK" 152 | } 153 | ] 154 | } 155 | ] 156 | }, 157 | { 158 | "type": "STRING", 159 | "value": "}" 160 | } 161 | ] 162 | }, 163 | "tag": { 164 | "type": "SEQ", 165 | "members": [ 166 | { 167 | "type": "FIELD", 168 | "name": "docs", 169 | "content": { 170 | "type": "CHOICE", 171 | "members": [ 172 | { 173 | "type": "SYMBOL", 174 | "name": "doc_comment" 175 | }, 176 | { 177 | "type": "BLANK" 178 | } 179 | ] 180 | } 181 | }, 182 | { 183 | "type": "FIELD", 184 | "name": "name", 185 | "content": { 186 | "type": "SYMBOL", 187 | "name": "tag_name" 188 | } 189 | }, 190 | { 191 | "type": "STRING", 192 | "value": "=" 193 | }, 194 | { 195 | "type": "FIELD", 196 | "name": "expr", 197 | "content": { 198 | "type": "CHOICE", 199 | "members": [ 200 | { 201 | "type": "STRING", 202 | "value": "bytes" 203 | }, 204 | { 205 | "type": "SYMBOL", 206 | "name": "enum_definition" 207 | } 208 | ] 209 | } 210 | } 211 | ] 212 | }, 213 | "expr": { 214 | "type": "CHOICE", 215 | "members": [ 216 | { 217 | "type": "SYMBOL", 218 | "name": "struct_union" 219 | }, 220 | { 221 | "type": "SYMBOL", 222 | "name": "identifier" 223 | }, 224 | { 225 | "type": "SYMBOL", 226 | "name": "tag_name" 227 | }, 228 | { 229 | "type": "SYMBOL", 230 | "name": "map" 231 | }, 232 | { 233 | "type": "SYMBOL", 234 | "name": "array" 235 | }, 236 | { 237 | "type": "SYMBOL", 238 | "name": "optional" 239 | }, 240 | { 241 | "type": "STRING", 242 | "value": "bytes" 243 | }, 244 | { 245 | "type": "STRING", 246 | "value": "int" 247 | }, 248 | { 249 | "type": "STRING", 250 | "value": "float" 251 | }, 252 | { 253 | "type": "STRING", 254 | "value": "bool" 255 | }, 256 | { 257 | "type": "STRING", 258 | "value": "any" 259 | }, 260 | { 261 | "type": "STRING", 262 | "value": "unknown" 263 | } 264 | ] 265 | }, 266 | "struct_union": { 267 | "type": "SEQ", 268 | "members": [ 269 | { 270 | "type": "SYMBOL", 271 | "name": "identifier" 272 | }, 273 | { 274 | "type": "REPEAT1", 275 | "content": { 276 | "type": "SEQ", 277 | "members": [ 278 | { 279 | "type": "STRING", 280 | "value": "|" 281 | }, 282 | { 283 | "type": "SYMBOL", 284 | "name": "identifier" 285 | } 286 | ] 287 | } 288 | } 289 | ] 290 | }, 291 | "identifier": { 292 | "type": "TOKEN", 293 | "content": { 294 | "type": "SEQ", 295 | "members": [ 296 | { 297 | "type": "PATTERN", 298 | "value": "[a-zA-Z_]" 299 | }, 300 | { 301 | "type": "REPEAT", 302 | "content": { 303 | "type": "CHOICE", 304 | "members": [ 305 | { 306 | "type": "PATTERN", 307 | "value": "[a-zA-Z_]" 308 | }, 309 | { 310 | "type": "PATTERN", 311 | "value": "[0-9]" 312 | } 313 | ] 314 | } 315 | } 316 | ] 317 | } 318 | }, 319 | "map": { 320 | "type": "SEQ", 321 | "members": [ 322 | { 323 | "type": "STRING", 324 | "value": "map" 325 | }, 326 | { 327 | "type": "STRING", 328 | "value": "[" 329 | }, 330 | { 331 | "type": "SYMBOL", 332 | "name": "expr" 333 | }, 334 | { 335 | "type": "STRING", 336 | "value": "]" 337 | } 338 | ] 339 | }, 340 | "array": { 341 | "type": "SEQ", 342 | "members": [ 343 | { 344 | "type": "STRING", 345 | "value": "[" 346 | }, 347 | { 348 | "type": "SYMBOL", 349 | "name": "expr" 350 | }, 351 | { 352 | "type": "STRING", 353 | "value": "]" 354 | } 355 | ] 356 | }, 357 | "optional": { 358 | "type": "SEQ", 359 | "members": [ 360 | { 361 | "type": "STRING", 362 | "value": "?" 363 | }, 364 | { 365 | "type": "SYMBOL", 366 | "name": "expr" 367 | } 368 | ] 369 | }, 370 | "struct": { 371 | "type": "SEQ", 372 | "members": [ 373 | { 374 | "type": "FIELD", 375 | "name": "docs", 376 | "content": { 377 | "type": "CHOICE", 378 | "members": [ 379 | { 380 | "type": "SYMBOL", 381 | "name": "doc_comment" 382 | }, 383 | { 384 | "type": "BLANK" 385 | } 386 | ] 387 | } 388 | }, 389 | { 390 | "type": "STRING", 391 | "value": "struct" 392 | }, 393 | { 394 | "type": "FIELD", 395 | "name": "name", 396 | "content": { 397 | "type": "SYMBOL", 398 | "name": "identifier" 399 | } 400 | }, 401 | { 402 | "type": "STRING", 403 | "value": "{" 404 | }, 405 | { 406 | "type": "CHOICE", 407 | "members": [ 408 | { 409 | "type": "SEQ", 410 | "members": [ 411 | { 412 | "type": "SYMBOL", 413 | "name": "struct_field" 414 | }, 415 | { 416 | "type": "REPEAT", 417 | "content": { 418 | "type": "SEQ", 419 | "members": [ 420 | { 421 | "type": "STRING", 422 | "value": "," 423 | }, 424 | { 425 | "type": "SYMBOL", 426 | "name": "struct_field" 427 | } 428 | ] 429 | } 430 | }, 431 | { 432 | "type": "CHOICE", 433 | "members": [ 434 | { 435 | "type": "STRING", 436 | "value": "," 437 | }, 438 | { 439 | "type": "BLANK" 440 | } 441 | ] 442 | } 443 | ] 444 | }, 445 | { 446 | "type": "BLANK" 447 | } 448 | ] 449 | }, 450 | { 451 | "type": "STRING", 452 | "value": "}" 453 | } 454 | ] 455 | }, 456 | "struct_field": { 457 | "type": "SEQ", 458 | "members": [ 459 | { 460 | "type": "FIELD", 461 | "name": "docs", 462 | "content": { 463 | "type": "CHOICE", 464 | "members": [ 465 | { 466 | "type": "SYMBOL", 467 | "name": "doc_comment" 468 | }, 469 | { 470 | "type": "BLANK" 471 | } 472 | ] 473 | } 474 | }, 475 | { 476 | "type": "FIELD", 477 | "name": "key", 478 | "content": { 479 | "type": "SYMBOL", 480 | "name": "identifier" 481 | } 482 | }, 483 | { 484 | "type": "STRING", 485 | "value": ":" 486 | }, 487 | { 488 | "type": "FIELD", 489 | "name": "value", 490 | "content": { 491 | "type": "SYMBOL", 492 | "name": "expr" 493 | } 494 | } 495 | ] 496 | }, 497 | "doc_comment": { 498 | "type": "REPEAT1", 499 | "content": { 500 | "type": "TOKEN", 501 | "content": { 502 | "type": "SEQ", 503 | "members": [ 504 | { 505 | "type": "STRING", 506 | "value": "///" 507 | }, 508 | { 509 | "type": "PATTERN", 510 | "value": ".*" 511 | } 512 | ] 513 | } 514 | } 515 | } 516 | }, 517 | "extras": [ 518 | { 519 | "type": "PATTERN", 520 | "value": "\\s" 521 | } 522 | ], 523 | "conflicts": [], 524 | "precedences": [], 525 | "externals": [], 526 | "inline": [], 527 | "supertypes": [] 528 | } 529 | 530 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/src/node-types.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "array", 4 | "named": true, 5 | "fields": {}, 6 | "children": { 7 | "multiple": false, 8 | "required": true, 9 | "types": [ 10 | { 11 | "type": "expr", 12 | "named": true 13 | } 14 | ] 15 | } 16 | }, 17 | { 18 | "type": "doc_comment", 19 | "named": true, 20 | "fields": {} 21 | }, 22 | { 23 | "type": "enum_definition", 24 | "named": true, 25 | "fields": {}, 26 | "children": { 27 | "multiple": true, 28 | "required": true, 29 | "types": [ 30 | { 31 | "type": "identifier", 32 | "named": true 33 | } 34 | ] 35 | } 36 | }, 37 | { 38 | "type": "expr", 39 | "named": true, 40 | "fields": {}, 41 | "children": { 42 | "multiple": false, 43 | "required": false, 44 | "types": [ 45 | { 46 | "type": "array", 47 | "named": true 48 | }, 49 | { 50 | "type": "identifier", 51 | "named": true 52 | }, 53 | { 54 | "type": "map", 55 | "named": true 56 | }, 57 | { 58 | "type": "optional", 59 | "named": true 60 | }, 61 | { 62 | "type": "struct_union", 63 | "named": true 64 | }, 65 | { 66 | "type": "tag_name", 67 | "named": true 68 | } 69 | ] 70 | } 71 | }, 72 | { 73 | "type": "map", 74 | "named": true, 75 | "fields": {}, 76 | "children": { 77 | "multiple": false, 78 | "required": true, 79 | "types": [ 80 | { 81 | "type": "expr", 82 | "named": true 83 | } 84 | ] 85 | } 86 | }, 87 | { 88 | "type": "optional", 89 | "named": true, 90 | "fields": {}, 91 | "children": { 92 | "multiple": false, 93 | "required": true, 94 | "types": [ 95 | { 96 | "type": "expr", 97 | "named": true 98 | } 99 | ] 100 | } 101 | }, 102 | { 103 | "type": "schema", 104 | "named": true, 105 | "fields": { 106 | "root": { 107 | "multiple": false, 108 | "required": true, 109 | "types": [ 110 | { 111 | "type": "expr", 112 | "named": true 113 | } 114 | ] 115 | }, 116 | "structs": { 117 | "multiple": true, 118 | "required": false, 119 | "types": [ 120 | { 121 | "type": "struct", 122 | "named": true 123 | } 124 | ] 125 | }, 126 | "tags": { 127 | "multiple": true, 128 | "required": false, 129 | "types": [ 130 | { 131 | "type": ",", 132 | "named": false 133 | }, 134 | { 135 | "type": "tag", 136 | "named": true 137 | } 138 | ] 139 | } 140 | } 141 | }, 142 | { 143 | "type": "struct", 144 | "named": true, 145 | "fields": { 146 | "docs": { 147 | "multiple": false, 148 | "required": false, 149 | "types": [ 150 | { 151 | "type": "doc_comment", 152 | "named": true 153 | } 154 | ] 155 | }, 156 | "name": { 157 | "multiple": false, 158 | "required": true, 159 | "types": [ 160 | { 161 | "type": "identifier", 162 | "named": true 163 | } 164 | ] 165 | } 166 | }, 167 | "children": { 168 | "multiple": true, 169 | "required": false, 170 | "types": [ 171 | { 172 | "type": "struct_field", 173 | "named": true 174 | } 175 | ] 176 | } 177 | }, 178 | { 179 | "type": "struct_field", 180 | "named": true, 181 | "fields": { 182 | "docs": { 183 | "multiple": false, 184 | "required": false, 185 | "types": [ 186 | { 187 | "type": "doc_comment", 188 | "named": true 189 | } 190 | ] 191 | }, 192 | "key": { 193 | "multiple": false, 194 | "required": true, 195 | "types": [ 196 | { 197 | "type": "identifier", 198 | "named": true 199 | } 200 | ] 201 | }, 202 | "value": { 203 | "multiple": false, 204 | "required": true, 205 | "types": [ 206 | { 207 | "type": "expr", 208 | "named": true 209 | } 210 | ] 211 | } 212 | } 213 | }, 214 | { 215 | "type": "struct_union", 216 | "named": true, 217 | "fields": {}, 218 | "children": { 219 | "multiple": true, 220 | "required": true, 221 | "types": [ 222 | { 223 | "type": "identifier", 224 | "named": true 225 | } 226 | ] 227 | } 228 | }, 229 | { 230 | "type": "tag", 231 | "named": true, 232 | "fields": { 233 | "docs": { 234 | "multiple": false, 235 | "required": false, 236 | "types": [ 237 | { 238 | "type": "doc_comment", 239 | "named": true 240 | } 241 | ] 242 | }, 243 | "expr": { 244 | "multiple": false, 245 | "required": true, 246 | "types": [ 247 | { 248 | "type": "bytes", 249 | "named": false 250 | }, 251 | { 252 | "type": "enum_definition", 253 | "named": true 254 | } 255 | ] 256 | }, 257 | "name": { 258 | "multiple": false, 259 | "required": true, 260 | "types": [ 261 | { 262 | "type": "tag_name", 263 | "named": true 264 | } 265 | ] 266 | } 267 | } 268 | }, 269 | { 270 | "type": "tag_name", 271 | "named": true, 272 | "fields": {} 273 | }, 274 | { 275 | "type": ",", 276 | "named": false 277 | }, 278 | { 279 | "type": ":", 280 | "named": false 281 | }, 282 | { 283 | "type": "=", 284 | "named": false 285 | }, 286 | { 287 | "type": "?", 288 | "named": false 289 | }, 290 | { 291 | "type": "@", 292 | "named": false 293 | }, 294 | { 295 | "type": "[", 296 | "named": false 297 | }, 298 | { 299 | "type": "]", 300 | "named": false 301 | }, 302 | { 303 | "type": "_tag_name", 304 | "named": false 305 | }, 306 | { 307 | "type": "any", 308 | "named": false 309 | }, 310 | { 311 | "type": "bool", 312 | "named": false 313 | }, 314 | { 315 | "type": "bytes", 316 | "named": false 317 | }, 318 | { 319 | "type": "enum", 320 | "named": false 321 | }, 322 | { 323 | "type": "float", 324 | "named": false 325 | }, 326 | { 327 | "type": "identifier", 328 | "named": true 329 | }, 330 | { 331 | "type": "int", 332 | "named": false 333 | }, 334 | { 335 | "type": "map", 336 | "named": false 337 | }, 338 | { 339 | "type": "root", 340 | "named": false 341 | }, 342 | { 343 | "type": "struct", 344 | "named": false 345 | }, 346 | { 347 | "type": "unknown", 348 | "named": false 349 | }, 350 | { 351 | "type": "{", 352 | "named": false 353 | }, 354 | { 355 | "type": "|", 356 | "named": false 357 | }, 358 | { 359 | "type": "}", 360 | "named": false 361 | } 362 | ] -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/src/root.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const testing = std.testing; 3 | 4 | export fn add(a: i32, b: i32) i32 { 5 | return a + b; 6 | } 7 | 8 | test "basic add functionality" { 9 | try testing.expect(add(3, 7) == 10); 10 | } 11 | -------------------------------------------------------------------------------- /tree-sitter-ziggy-schema/src/tree_sitter/parser.h: -------------------------------------------------------------------------------- 1 | #ifndef TREE_SITTER_PARSER_H_ 2 | #define TREE_SITTER_PARSER_H_ 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | #include 9 | #include 10 | #include 11 | 12 | #define ts_builtin_sym_error ((TSSymbol)-1) 13 | #define ts_builtin_sym_end 0 14 | #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 15 | 16 | typedef uint16_t TSStateId; 17 | 18 | #ifndef TREE_SITTER_API_H_ 19 | typedef uint16_t TSSymbol; 20 | typedef uint16_t TSFieldId; 21 | typedef struct TSLanguage TSLanguage; 22 | #endif 23 | 24 | typedef struct { 25 | TSFieldId field_id; 26 | uint8_t child_index; 27 | bool inherited; 28 | } TSFieldMapEntry; 29 | 30 | typedef struct { 31 | uint16_t index; 32 | uint16_t length; 33 | } TSFieldMapSlice; 34 | 35 | typedef struct { 36 | bool visible; 37 | bool named; 38 | bool supertype; 39 | } TSSymbolMetadata; 40 | 41 | typedef struct TSLexer TSLexer; 42 | 43 | struct TSLexer { 44 | int32_t lookahead; 45 | TSSymbol result_symbol; 46 | void (*advance)(TSLexer *, bool); 47 | void (*mark_end)(TSLexer *); 48 | uint32_t (*get_column)(TSLexer *); 49 | bool (*is_at_included_range_start)(const TSLexer *); 50 | bool (*eof)(const TSLexer *); 51 | }; 52 | 53 | typedef enum { 54 | TSParseActionTypeShift, 55 | TSParseActionTypeReduce, 56 | TSParseActionTypeAccept, 57 | TSParseActionTypeRecover, 58 | } TSParseActionType; 59 | 60 | typedef union { 61 | struct { 62 | uint8_t type; 63 | TSStateId state; 64 | bool extra; 65 | bool repetition; 66 | } shift; 67 | struct { 68 | uint8_t type; 69 | uint8_t child_count; 70 | TSSymbol symbol; 71 | int16_t dynamic_precedence; 72 | uint16_t production_id; 73 | } reduce; 74 | uint8_t type; 75 | } TSParseAction; 76 | 77 | typedef struct { 78 | uint16_t lex_state; 79 | uint16_t external_lex_state; 80 | } TSLexMode; 81 | 82 | typedef union { 83 | TSParseAction action; 84 | struct { 85 | uint8_t count; 86 | bool reusable; 87 | } entry; 88 | } TSParseActionEntry; 89 | 90 | struct TSLanguage { 91 | uint32_t version; 92 | uint32_t symbol_count; 93 | uint32_t alias_count; 94 | uint32_t token_count; 95 | uint32_t external_token_count; 96 | uint32_t state_count; 97 | uint32_t large_state_count; 98 | uint32_t production_id_count; 99 | uint32_t field_count; 100 | uint16_t max_alias_sequence_length; 101 | const uint16_t *parse_table; 102 | const uint16_t *small_parse_table; 103 | const uint32_t *small_parse_table_map; 104 | const TSParseActionEntry *parse_actions; 105 | const char * const *symbol_names; 106 | const char * const *field_names; 107 | const TSFieldMapSlice *field_map_slices; 108 | const TSFieldMapEntry *field_map_entries; 109 | const TSSymbolMetadata *symbol_metadata; 110 | const TSSymbol *public_symbol_map; 111 | const uint16_t *alias_map; 112 | const TSSymbol *alias_sequences; 113 | const TSLexMode *lex_modes; 114 | bool (*lex_fn)(TSLexer *, TSStateId); 115 | bool (*keyword_lex_fn)(TSLexer *, TSStateId); 116 | TSSymbol keyword_capture_token; 117 | struct { 118 | const bool *states; 119 | const TSSymbol *symbol_map; 120 | void *(*create)(void); 121 | void (*destroy)(void *); 122 | bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); 123 | unsigned (*serialize)(void *, char *); 124 | void (*deserialize)(void *, const char *, unsigned); 125 | } external_scanner; 126 | const TSStateId *primary_state_ids; 127 | }; 128 | 129 | /* 130 | * Lexer Macros 131 | */ 132 | 133 | #define START_LEXER() \ 134 | bool result = false; \ 135 | bool skip = false; \ 136 | bool eof = false; \ 137 | int32_t lookahead; \ 138 | goto start; \ 139 | next_state: \ 140 | lexer->advance(lexer, skip); \ 141 | start: \ 142 | skip = false; \ 143 | lookahead = lexer->lookahead; 144 | 145 | #define ADVANCE(state_value) \ 146 | { \ 147 | state = state_value; \ 148 | goto next_state; \ 149 | } 150 | 151 | #define SKIP(state_value) \ 152 | { \ 153 | skip = true; \ 154 | state = state_value; \ 155 | goto next_state; \ 156 | } 157 | 158 | #define ACCEPT_TOKEN(symbol_value) \ 159 | result = true; \ 160 | lexer->result_symbol = symbol_value; \ 161 | lexer->mark_end(lexer); 162 | 163 | #define END_STATE() return result; 164 | 165 | /* 166 | * Parse Table Macros 167 | */ 168 | 169 | #define SMALL_STATE(id) id - LARGE_STATE_COUNT 170 | 171 | #define STATE(id) id 172 | 173 | #define ACTIONS(id) id 174 | 175 | #define SHIFT(state_value) \ 176 | {{ \ 177 | .shift = { \ 178 | .type = TSParseActionTypeShift, \ 179 | .state = state_value \ 180 | } \ 181 | }} 182 | 183 | #define SHIFT_REPEAT(state_value) \ 184 | {{ \ 185 | .shift = { \ 186 | .type = TSParseActionTypeShift, \ 187 | .state = state_value, \ 188 | .repetition = true \ 189 | } \ 190 | }} 191 | 192 | #define SHIFT_EXTRA() \ 193 | {{ \ 194 | .shift = { \ 195 | .type = TSParseActionTypeShift, \ 196 | .extra = true \ 197 | } \ 198 | }} 199 | 200 | #define REDUCE(symbol_val, child_count_val, ...) \ 201 | {{ \ 202 | .reduce = { \ 203 | .type = TSParseActionTypeReduce, \ 204 | .symbol = symbol_val, \ 205 | .child_count = child_count_val, \ 206 | __VA_ARGS__ \ 207 | }, \ 208 | }} 209 | 210 | #define RECOVER() \ 211 | {{ \ 212 | .type = TSParseActionTypeRecover \ 213 | }} 214 | 215 | #define ACCEPT_INPUT() \ 216 | {{ \ 217 | .type = TSParseActionTypeAccept \ 218 | }} 219 | 220 | #ifdef __cplusplus 221 | } 222 | #endif 223 | 224 | #endif // TREE_SITTER_PARSER_H_ 225 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "tree-sitter-YOUR-LANGUAGE-NAME" 3 | description = "YOUR-LANGUAGE-NAME grammar for the tree-sitter parsing library" 4 | version = "0.0.1" 5 | keywords = ["incremental", "parsing", "YOUR-LANGUAGE-NAME"] 6 | categories = ["parsing", "text-editors"] 7 | repository = "https://github.com/tree-sitter/tree-sitter-YOUR-LANGUAGE-NAME" 8 | edition = "2018" 9 | license = "MIT" 10 | 11 | build = "bindings/rust/build.rs" 12 | include = [ 13 | "bindings/rust/*", 14 | "grammar.js", 15 | "queries/*", 16 | "src/*", 17 | ] 18 | 19 | [lib] 20 | path = "bindings/rust/lib.rs" 21 | 22 | [dependencies] 23 | tree-sitter = "~0.20.10" 24 | 25 | [build-dependencies] 26 | cc = "1.0" 27 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Loris Cro 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/binding.gyp: -------------------------------------------------------------------------------- 1 | { 2 | "targets": [ 3 | { 4 | "target_name": "tree_sitter_YOUR_LANGUAGE_NAME_binding", 5 | "include_dirs": [ 6 | " 3 | #include "nan.h" 4 | 5 | using namespace v8; 6 | 7 | extern "C" TSLanguage * tree_sitter_YOUR_LANGUAGE_NAME(); 8 | 9 | namespace { 10 | 11 | NAN_METHOD(New) {} 12 | 13 | void Init(Local exports, Local module) { 14 | Local tpl = Nan::New(New); 15 | tpl->SetClassName(Nan::New("Language").ToLocalChecked()); 16 | tpl->InstanceTemplate()->SetInternalFieldCount(1); 17 | 18 | Local constructor = Nan::GetFunction(tpl).ToLocalChecked(); 19 | Local instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked(); 20 | Nan::SetInternalFieldPointer(instance, 0, tree_sitter_YOUR_LANGUAGE_NAME()); 21 | 22 | Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("YOUR_LANGUAGE_NAME").ToLocalChecked()); 23 | Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance); 24 | } 25 | 26 | NODE_MODULE(tree_sitter_YOUR_LANGUAGE_NAME_binding, Init) 27 | 28 | } // namespace 29 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/bindings/node/index.js: -------------------------------------------------------------------------------- 1 | try { 2 | module.exports = require("../../build/Release/tree_sitter_YOUR_LANGUAGE_NAME_binding"); 3 | } catch (error1) { 4 | if (error1.code !== 'MODULE_NOT_FOUND') { 5 | throw error1; 6 | } 7 | try { 8 | module.exports = require("../../build/Debug/tree_sitter_YOUR_LANGUAGE_NAME_binding"); 9 | } catch (error2) { 10 | if (error2.code !== 'MODULE_NOT_FOUND') { 11 | throw error2; 12 | } 13 | throw error1 14 | } 15 | } 16 | 17 | try { 18 | module.exports.nodeTypeInfo = require("../../src/node-types.json"); 19 | } catch (_) {} 20 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/bindings/rust/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let src_dir = std::path::Path::new("src"); 3 | 4 | let mut c_config = cc::Build::new(); 5 | c_config.include(&src_dir); 6 | c_config 7 | .flag_if_supported("-Wno-unused-parameter") 8 | .flag_if_supported("-Wno-unused-but-set-variable") 9 | .flag_if_supported("-Wno-trigraphs"); 10 | let parser_path = src_dir.join("parser.c"); 11 | c_config.file(&parser_path); 12 | 13 | // If your language uses an external scanner written in C, 14 | // then include this block of code: 15 | 16 | /* 17 | let scanner_path = src_dir.join("scanner.c"); 18 | c_config.file(&scanner_path); 19 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap()); 20 | */ 21 | 22 | c_config.compile("parser"); 23 | println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap()); 24 | 25 | // If your language uses an external scanner written in C++, 26 | // then include this block of code: 27 | 28 | /* 29 | let mut cpp_config = cc::Build::new(); 30 | cpp_config.cpp(true); 31 | cpp_config.include(&src_dir); 32 | cpp_config 33 | .flag_if_supported("-Wno-unused-parameter") 34 | .flag_if_supported("-Wno-unused-but-set-variable"); 35 | let scanner_path = src_dir.join("scanner.cc"); 36 | cpp_config.file(&scanner_path); 37 | cpp_config.compile("scanner"); 38 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap()); 39 | */ 40 | } 41 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/bindings/rust/lib.rs: -------------------------------------------------------------------------------- 1 | //! This crate provides ziggy language support for the [tree-sitter][] parsing library. 2 | //! 3 | //! Typically, you will use the [language][language func] function to add this language to a 4 | //! tree-sitter [Parser][], and then use the parser to parse some code: 5 | //! 6 | //! ``` 7 | //! let code = ""; 8 | //! let mut parser = tree_sitter::Parser::new(); 9 | //! parser.set_language(tree_sitter_ziggy::language()).expect("Error loading ziggy grammar"); 10 | //! let tree = parser.parse(code, None).unwrap(); 11 | //! ``` 12 | //! 13 | //! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html 14 | //! [language func]: fn.language.html 15 | //! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html 16 | //! [tree-sitter]: https://tree-sitter.github.io/ 17 | 18 | use tree_sitter::Language; 19 | 20 | extern "C" { 21 | fn tree_sitter_ziggy() -> Language; 22 | } 23 | 24 | /// Get the tree-sitter [Language][] for this grammar. 25 | /// 26 | /// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html 27 | pub fn language() -> Language { 28 | unsafe { tree_sitter_ziggy() } 29 | } 30 | 31 | /// The content of the [`node-types.json`][] file for this grammar. 32 | /// 33 | /// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types 34 | pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json"); 35 | 36 | // Uncomment these to include any queries that this grammar contains 37 | 38 | // pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm"); 39 | // pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm"); 40 | // pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm"); 41 | // pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm"); 42 | 43 | #[cfg(test)] 44 | mod tests { 45 | #[test] 46 | fn test_can_load_grammar() { 47 | let mut parser = tree_sitter::Parser::new(); 48 | parser 49 | .set_language(super::language()) 50 | .expect("Error loading ziggy language"); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/grammar.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable 80001 */ 2 | /* eslint-disable arrow-parens */ 3 | /* eslint-disable camelcase */ 4 | /* eslint-disable-next-line spaced-comment */ 5 | /// 6 | //@ts-check 7 | 8 | const 9 | bin = /[01]/, 10 | bin_ = seq(optional("_"), bin), 11 | oct = /[0-7]/, 12 | oct_ = seq(optional("_"), oct), 13 | hex = /[0-9a-fA-F]/, 14 | hex_ = seq(optional("_"), hex), 15 | dec = /[0-9]/, 16 | dec_ = seq(optional("_"), dec), 17 | bin_int = seq(bin, repeat(bin_)), 18 | oct_int = seq(oct, repeat(oct_)), 19 | dec_int = seq(dec, repeat(dec_)), 20 | hex_int = seq(hex, repeat(hex_)) 21 | 22 | module.exports = grammar({ 23 | name: 'ziggy', 24 | 25 | extras: $ => [/\s/], 26 | 27 | rules: { 28 | document: $ => seq( 29 | optional($.top_comment), 30 | optional(choice( 31 | $.top_level_struct, 32 | $._value, 33 | )), 34 | ), 35 | 36 | _value: $ => choice( 37 | $.struct, 38 | $.map, 39 | $.array, 40 | $.tag_string, 41 | $.string, 42 | $.float, 43 | $.integer, 44 | $.true, 45 | $.false, 46 | $.null, 47 | ), 48 | 49 | top_level_struct: $ => seq(commaSep1($.struct_field), optional($.comment)), 50 | 51 | struct: $ => prec(1, seq( 52 | field('name', optional($.struct_name)), 53 | '{', 54 | commaSep($.struct_field), 55 | optional($.comment), 56 | '}', 57 | )), 58 | 59 | struct_name: $ => seq(/[A-Z]/, repeat(/[a-zA-Z0-9_]/)), 60 | 61 | struct_field: $ => seq( 62 | optional($.comment), 63 | '.', 64 | field('key', $.identifier), 65 | '=', 66 | field('value', $._value), 67 | ), 68 | 69 | map: $ => seq( 70 | '{', 71 | commaSep($.map_field), 72 | optional($.comment), 73 | '}', 74 | ), 75 | 76 | map_field: $ => seq( 77 | optional($.comment), 78 | field('key', $.string), 79 | ':', 80 | field('value', $._value), 81 | ), 82 | 83 | array: $ => seq('[', commaSep($.array_elem), optional($.comment),']'), 84 | 85 | array_elem: $ => seq( 86 | optional($.comment), 87 | $._value, 88 | ), 89 | 90 | tag_string: $ => seq('@', field('name', $.tag), '(', $.quoted_string, ')'), 91 | 92 | tag: _ => seq(/[a-z]/, repeat(/[a-z_0-9]/)), 93 | 94 | 95 | string: $ => choice( 96 | $.quoted_string, 97 | repeat1($.line_string), 98 | ), 99 | 100 | line_string: $ => seq("\\\\", /[^\n]*/), 101 | 102 | quoted_string: $ => seq( 103 | '"', 104 | repeat(choice( 105 | token.immediate(prec(1, /[^"\\]+/)), 106 | $.escape_sequence, 107 | )), 108 | '"', 109 | ), 110 | 111 | 112 | escape_sequence: _ => seq( 113 | "\\", 114 | choice(/x[0-9a-fA-f]{2}/, /u\{[0-9a-fA-F]+\}/, /[nr\\t'"]/) 115 | ), 116 | 117 | identifier: (_) => { 118 | const identifier_start = /[a-zA-Z_]/; 119 | const identifier_part = choice(identifier_start, /[0-9]/); 120 | return token(seq(identifier_start, repeat(identifier_part))); 121 | }, 122 | 123 | float: (_) => choice( 124 | token( 125 | seq(/[-]?/,"0x", hex_int, ".", hex_int, optional(seq(/[pP][-+]?/, dec_int))) 126 | ), 127 | token(seq(/[-]?/,dec_int, ".", dec_int, optional(seq(/[eE][-+]?/, dec_int)))), 128 | token(seq(/[-]?/,"0x", hex_int, /[pP][-+]?/, dec_int)), 129 | token(seq(/[-]?/,dec_int, /[eE][-+]?/, dec_int)) 130 | ), 131 | 132 | integer: (_) => choice( 133 | token(seq(/[-]?/,"0b", bin_int)), 134 | token(seq(/[-]?/,"0o", oct_int)), 135 | token(seq(/[-]?/,"0x", hex_int)), 136 | token(seq(/[-]?/,dec_int)) 137 | ), 138 | 139 | 140 | true: _ => 'true', 141 | 142 | false: _ => 'false', 143 | 144 | null: _ => 'null', 145 | 146 | comment: _ => repeat1(token(seq('//', token.immediate(/[^!]/), /.*/))), 147 | top_comment: $ => repeat1(token(seq('//!', /.*/))), 148 | } 149 | }); 150 | 151 | /** 152 | * Creates a rule to optionally match one or more of the rules separated by a comma 153 | * 154 | * @param {RuleOrLiteral} rule 155 | * 156 | * @return {SeqRule} 157 | * 158 | */ 159 | function commaSep1(rule) { 160 | return seq(rule, repeat(seq(",", rule)), optional(",")); 161 | } 162 | 163 | /** 164 | * Creates a rule to optionally match one or more of the rules separated by a comma 165 | * 166 | * @param {RuleOrLiteral} rule 167 | * 168 | * @return {Rule} 169 | * 170 | */ 171 | function commaSep(rule) { 172 | return optional(commaSep1(rule)); 173 | } 174 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tree-sitter-YOUR-LANGUAGE-NAME", 3 | "version": "0.0.1", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "tree-sitter-YOUR-LANGUAGE-NAME", 9 | "version": "0.0.1", 10 | "dependencies": { 11 | "nan": "^2.12.1" 12 | }, 13 | "devDependencies": { 14 | "tree-sitter-cli": "^0.20.8" 15 | } 16 | }, 17 | "node_modules/nan": { 18 | "version": "2.18.0", 19 | "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", 20 | "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==" 21 | }, 22 | "node_modules/tree-sitter-cli": { 23 | "version": "0.20.8", 24 | "resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.20.8.tgz", 25 | "integrity": "sha512-XjTcS3wdTy/2cc/ptMLc/WRyOLECRYcMTrSWyhZnj1oGSOWbHLTklgsgRICU3cPfb0vy+oZCC33M43u6R1HSCA==", 26 | "dev": true, 27 | "hasInstallScript": true, 28 | "bin": { 29 | "tree-sitter": "cli.js" 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tree-sitter-YOUR-LANGUAGE-NAME", 3 | "version": "0.0.1", 4 | "description": "YOUR-LANGUAGE-NAME grammar for tree-sitter", 5 | "main": "bindings/node", 6 | "keywords": [ 7 | "parsing", 8 | "incremental" 9 | ], 10 | "dependencies": { 11 | "nan": "^2.12.1" 12 | }, 13 | "devDependencies": { 14 | "tree-sitter-cli": "^0.20.8" 15 | }, 16 | "scripts": { 17 | "test": "tree-sitter test" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/queries/contexts.scm: -------------------------------------------------------------------------------- 1 | ; Credits to nvim-treesitter/nvim-treesitter-context 2 | [ 3 | (struct) 4 | (struct_field) 5 | (map) 6 | (map_field) 7 | ] @context 8 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/queries/highlights.scm: -------------------------------------------------------------------------------- 1 | [ 2 | (true) 3 | (false) 4 | ] @constant.builtin.boolean 5 | 6 | (null) @constant.builtin 7 | 8 | [ 9 | (integer) 10 | (float) 11 | ] @constant.numeric 12 | 13 | 14 | (struct_field 15 | key: (_) @keyword) 16 | 17 | (struct 18 | name: (_) @type) 19 | 20 | (tag) @function 21 | 22 | [ 23 | (string) 24 | (line_string)* 25 | ] @string 26 | 27 | 28 | (comment) @comment.line 29 | 30 | (escape_sequence) @constant.character.escape 31 | 32 | (ERROR) @error 33 | 34 | "," @punctuation.delimiter 35 | 36 | [ 37 | "[" 38 | "]" 39 | "{" 40 | "}" 41 | "(" 42 | ")" 43 | ] @punctuation.bracket 44 | 45 | 46 | (top_comment) @comment.line 47 | 48 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/queries/indents.scm: -------------------------------------------------------------------------------- 1 | [ 2 | (struct) 3 | (map) 4 | (array) 5 | ] @indent 6 | 7 | [ 8 | "]" 9 | "}" 10 | ] @outdent 11 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/queries/rainbows.scm: -------------------------------------------------------------------------------- 1 | [ 2 | (struct) 3 | (map) 4 | (array) 5 | ] @rainbow.scope 6 | 7 | [ 8 | "[" "]" 9 | "{" "}" 10 | ] @rainbow.bracket 11 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/src/node-types.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "array", 4 | "named": true, 5 | "fields": {}, 6 | "children": { 7 | "multiple": true, 8 | "required": false, 9 | "types": [ 10 | { 11 | "type": "array_elem", 12 | "named": true 13 | }, 14 | { 15 | "type": "comment", 16 | "named": true 17 | } 18 | ] 19 | } 20 | }, 21 | { 22 | "type": "array_elem", 23 | "named": true, 24 | "fields": {}, 25 | "children": { 26 | "multiple": true, 27 | "required": true, 28 | "types": [ 29 | { 30 | "type": "array", 31 | "named": true 32 | }, 33 | { 34 | "type": "comment", 35 | "named": true 36 | }, 37 | { 38 | "type": "false", 39 | "named": true 40 | }, 41 | { 42 | "type": "float", 43 | "named": true 44 | }, 45 | { 46 | "type": "integer", 47 | "named": true 48 | }, 49 | { 50 | "type": "map", 51 | "named": true 52 | }, 53 | { 54 | "type": "null", 55 | "named": true 56 | }, 57 | { 58 | "type": "string", 59 | "named": true 60 | }, 61 | { 62 | "type": "struct", 63 | "named": true 64 | }, 65 | { 66 | "type": "tag_string", 67 | "named": true 68 | }, 69 | { 70 | "type": "true", 71 | "named": true 72 | } 73 | ] 74 | } 75 | }, 76 | { 77 | "type": "comment", 78 | "named": true, 79 | "fields": {} 80 | }, 81 | { 82 | "type": "document", 83 | "named": true, 84 | "fields": {}, 85 | "children": { 86 | "multiple": true, 87 | "required": false, 88 | "types": [ 89 | { 90 | "type": "array", 91 | "named": true 92 | }, 93 | { 94 | "type": "false", 95 | "named": true 96 | }, 97 | { 98 | "type": "float", 99 | "named": true 100 | }, 101 | { 102 | "type": "integer", 103 | "named": true 104 | }, 105 | { 106 | "type": "map", 107 | "named": true 108 | }, 109 | { 110 | "type": "null", 111 | "named": true 112 | }, 113 | { 114 | "type": "string", 115 | "named": true 116 | }, 117 | { 118 | "type": "struct", 119 | "named": true 120 | }, 121 | { 122 | "type": "tag_string", 123 | "named": true 124 | }, 125 | { 126 | "type": "top_comment", 127 | "named": true 128 | }, 129 | { 130 | "type": "top_level_struct", 131 | "named": true 132 | }, 133 | { 134 | "type": "true", 135 | "named": true 136 | } 137 | ] 138 | } 139 | }, 140 | { 141 | "type": "escape_sequence", 142 | "named": true, 143 | "fields": {} 144 | }, 145 | { 146 | "type": "float", 147 | "named": true, 148 | "fields": {} 149 | }, 150 | { 151 | "type": "integer", 152 | "named": true, 153 | "fields": {} 154 | }, 155 | { 156 | "type": "line_string", 157 | "named": true, 158 | "fields": {} 159 | }, 160 | { 161 | "type": "map", 162 | "named": true, 163 | "fields": {}, 164 | "children": { 165 | "multiple": true, 166 | "required": false, 167 | "types": [ 168 | { 169 | "type": "comment", 170 | "named": true 171 | }, 172 | { 173 | "type": "map_field", 174 | "named": true 175 | } 176 | ] 177 | } 178 | }, 179 | { 180 | "type": "map_field", 181 | "named": true, 182 | "fields": { 183 | "key": { 184 | "multiple": false, 185 | "required": true, 186 | "types": [ 187 | { 188 | "type": "string", 189 | "named": true 190 | } 191 | ] 192 | }, 193 | "value": { 194 | "multiple": false, 195 | "required": true, 196 | "types": [ 197 | { 198 | "type": "array", 199 | "named": true 200 | }, 201 | { 202 | "type": "false", 203 | "named": true 204 | }, 205 | { 206 | "type": "float", 207 | "named": true 208 | }, 209 | { 210 | "type": "integer", 211 | "named": true 212 | }, 213 | { 214 | "type": "map", 215 | "named": true 216 | }, 217 | { 218 | "type": "null", 219 | "named": true 220 | }, 221 | { 222 | "type": "string", 223 | "named": true 224 | }, 225 | { 226 | "type": "struct", 227 | "named": true 228 | }, 229 | { 230 | "type": "tag_string", 231 | "named": true 232 | }, 233 | { 234 | "type": "true", 235 | "named": true 236 | } 237 | ] 238 | } 239 | }, 240 | "children": { 241 | "multiple": false, 242 | "required": false, 243 | "types": [ 244 | { 245 | "type": "comment", 246 | "named": true 247 | } 248 | ] 249 | } 250 | }, 251 | { 252 | "type": "quoted_string", 253 | "named": true, 254 | "fields": {}, 255 | "children": { 256 | "multiple": true, 257 | "required": false, 258 | "types": [ 259 | { 260 | "type": "escape_sequence", 261 | "named": true 262 | } 263 | ] 264 | } 265 | }, 266 | { 267 | "type": "string", 268 | "named": true, 269 | "fields": {}, 270 | "children": { 271 | "multiple": true, 272 | "required": true, 273 | "types": [ 274 | { 275 | "type": "line_string", 276 | "named": true 277 | }, 278 | { 279 | "type": "quoted_string", 280 | "named": true 281 | } 282 | ] 283 | } 284 | }, 285 | { 286 | "type": "struct", 287 | "named": true, 288 | "fields": { 289 | "name": { 290 | "multiple": false, 291 | "required": false, 292 | "types": [ 293 | { 294 | "type": "struct_name", 295 | "named": true 296 | } 297 | ] 298 | } 299 | }, 300 | "children": { 301 | "multiple": true, 302 | "required": false, 303 | "types": [ 304 | { 305 | "type": "comment", 306 | "named": true 307 | }, 308 | { 309 | "type": "struct_field", 310 | "named": true 311 | } 312 | ] 313 | } 314 | }, 315 | { 316 | "type": "struct_field", 317 | "named": true, 318 | "fields": { 319 | "key": { 320 | "multiple": false, 321 | "required": true, 322 | "types": [ 323 | { 324 | "type": "identifier", 325 | "named": true 326 | } 327 | ] 328 | }, 329 | "value": { 330 | "multiple": false, 331 | "required": true, 332 | "types": [ 333 | { 334 | "type": "array", 335 | "named": true 336 | }, 337 | { 338 | "type": "false", 339 | "named": true 340 | }, 341 | { 342 | "type": "float", 343 | "named": true 344 | }, 345 | { 346 | "type": "integer", 347 | "named": true 348 | }, 349 | { 350 | "type": "map", 351 | "named": true 352 | }, 353 | { 354 | "type": "null", 355 | "named": true 356 | }, 357 | { 358 | "type": "string", 359 | "named": true 360 | }, 361 | { 362 | "type": "struct", 363 | "named": true 364 | }, 365 | { 366 | "type": "tag_string", 367 | "named": true 368 | }, 369 | { 370 | "type": "true", 371 | "named": true 372 | } 373 | ] 374 | } 375 | }, 376 | "children": { 377 | "multiple": false, 378 | "required": false, 379 | "types": [ 380 | { 381 | "type": "comment", 382 | "named": true 383 | } 384 | ] 385 | } 386 | }, 387 | { 388 | "type": "struct_name", 389 | "named": true, 390 | "fields": {} 391 | }, 392 | { 393 | "type": "tag", 394 | "named": true, 395 | "fields": {} 396 | }, 397 | { 398 | "type": "tag_string", 399 | "named": true, 400 | "fields": { 401 | "name": { 402 | "multiple": false, 403 | "required": true, 404 | "types": [ 405 | { 406 | "type": "tag", 407 | "named": true 408 | } 409 | ] 410 | } 411 | }, 412 | "children": { 413 | "multiple": false, 414 | "required": true, 415 | "types": [ 416 | { 417 | "type": "quoted_string", 418 | "named": true 419 | } 420 | ] 421 | } 422 | }, 423 | { 424 | "type": "top_comment", 425 | "named": true, 426 | "fields": {} 427 | }, 428 | { 429 | "type": "top_level_struct", 430 | "named": true, 431 | "fields": {}, 432 | "children": { 433 | "multiple": true, 434 | "required": true, 435 | "types": [ 436 | { 437 | "type": "comment", 438 | "named": true 439 | }, 440 | { 441 | "type": "struct_field", 442 | "named": true 443 | } 444 | ] 445 | } 446 | }, 447 | { 448 | "type": "\"", 449 | "named": false 450 | }, 451 | { 452 | "type": "(", 453 | "named": false 454 | }, 455 | { 456 | "type": ")", 457 | "named": false 458 | }, 459 | { 460 | "type": ",", 461 | "named": false 462 | }, 463 | { 464 | "type": ".", 465 | "named": false 466 | }, 467 | { 468 | "type": ":", 469 | "named": false 470 | }, 471 | { 472 | "type": "=", 473 | "named": false 474 | }, 475 | { 476 | "type": "@", 477 | "named": false 478 | }, 479 | { 480 | "type": "[", 481 | "named": false 482 | }, 483 | { 484 | "type": "\\", 485 | "named": false 486 | }, 487 | { 488 | "type": "\\\\", 489 | "named": false 490 | }, 491 | { 492 | "type": "]", 493 | "named": false 494 | }, 495 | { 496 | "type": "false", 497 | "named": true 498 | }, 499 | { 500 | "type": "identifier", 501 | "named": true 502 | }, 503 | { 504 | "type": "null", 505 | "named": true 506 | }, 507 | { 508 | "type": "true", 509 | "named": true 510 | }, 511 | { 512 | "type": "{", 513 | "named": false 514 | }, 515 | { 516 | "type": "}", 517 | "named": false 518 | } 519 | ] -------------------------------------------------------------------------------- /tree-sitter-ziggy/src/root.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const testing = std.testing; 3 | 4 | export fn add(a: i32, b: i32) i32 { 5 | return a + b; 6 | } 7 | 8 | test "basic add functionality" { 9 | try testing.expect(add(3, 7) == 10); 10 | } 11 | -------------------------------------------------------------------------------- /tree-sitter-ziggy/src/tree_sitter/parser.h: -------------------------------------------------------------------------------- 1 | #ifndef TREE_SITTER_PARSER_H_ 2 | #define TREE_SITTER_PARSER_H_ 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | #include 9 | #include 10 | #include 11 | 12 | #define ts_builtin_sym_error ((TSSymbol)-1) 13 | #define ts_builtin_sym_end 0 14 | #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 15 | 16 | #ifndef TREE_SITTER_API_H_ 17 | typedef uint16_t TSStateId; 18 | typedef uint16_t TSSymbol; 19 | typedef uint16_t TSFieldId; 20 | typedef struct TSLanguage TSLanguage; 21 | #endif 22 | 23 | typedef struct { 24 | TSFieldId field_id; 25 | uint8_t child_index; 26 | bool inherited; 27 | } TSFieldMapEntry; 28 | 29 | typedef struct { 30 | uint16_t index; 31 | uint16_t length; 32 | } TSFieldMapSlice; 33 | 34 | typedef struct { 35 | bool visible; 36 | bool named; 37 | bool supertype; 38 | } TSSymbolMetadata; 39 | 40 | typedef struct TSLexer TSLexer; 41 | 42 | struct TSLexer { 43 | int32_t lookahead; 44 | TSSymbol result_symbol; 45 | void (*advance)(TSLexer *, bool); 46 | void (*mark_end)(TSLexer *); 47 | uint32_t (*get_column)(TSLexer *); 48 | bool (*is_at_included_range_start)(const TSLexer *); 49 | bool (*eof)(const TSLexer *); 50 | }; 51 | 52 | typedef enum { 53 | TSParseActionTypeShift, 54 | TSParseActionTypeReduce, 55 | TSParseActionTypeAccept, 56 | TSParseActionTypeRecover, 57 | } TSParseActionType; 58 | 59 | typedef union { 60 | struct { 61 | uint8_t type; 62 | TSStateId state; 63 | bool extra; 64 | bool repetition; 65 | } shift; 66 | struct { 67 | uint8_t type; 68 | uint8_t child_count; 69 | TSSymbol symbol; 70 | int16_t dynamic_precedence; 71 | uint16_t production_id; 72 | } reduce; 73 | uint8_t type; 74 | } TSParseAction; 75 | 76 | typedef struct { 77 | uint16_t lex_state; 78 | uint16_t external_lex_state; 79 | } TSLexMode; 80 | 81 | typedef union { 82 | TSParseAction action; 83 | struct { 84 | uint8_t count; 85 | bool reusable; 86 | } entry; 87 | } TSParseActionEntry; 88 | 89 | struct TSLanguage { 90 | uint32_t version; 91 | uint32_t symbol_count; 92 | uint32_t alias_count; 93 | uint32_t token_count; 94 | uint32_t external_token_count; 95 | uint32_t state_count; 96 | uint32_t large_state_count; 97 | uint32_t production_id_count; 98 | uint32_t field_count; 99 | uint16_t max_alias_sequence_length; 100 | const uint16_t *parse_table; 101 | const uint16_t *small_parse_table; 102 | const uint32_t *small_parse_table_map; 103 | const TSParseActionEntry *parse_actions; 104 | const char * const *symbol_names; 105 | const char * const *field_names; 106 | const TSFieldMapSlice *field_map_slices; 107 | const TSFieldMapEntry *field_map_entries; 108 | const TSSymbolMetadata *symbol_metadata; 109 | const TSSymbol *public_symbol_map; 110 | const uint16_t *alias_map; 111 | const TSSymbol *alias_sequences; 112 | const TSLexMode *lex_modes; 113 | bool (*lex_fn)(TSLexer *, TSStateId); 114 | bool (*keyword_lex_fn)(TSLexer *, TSStateId); 115 | TSSymbol keyword_capture_token; 116 | struct { 117 | const bool *states; 118 | const TSSymbol *symbol_map; 119 | void *(*create)(void); 120 | void (*destroy)(void *); 121 | bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); 122 | unsigned (*serialize)(void *, char *); 123 | void (*deserialize)(void *, const char *, unsigned); 124 | } external_scanner; 125 | const TSStateId *primary_state_ids; 126 | }; 127 | 128 | /* 129 | * Lexer Macros 130 | */ 131 | 132 | #ifdef _MSC_VER 133 | #define UNUSED __pragma(warning(suppress : 4101)) 134 | #else 135 | #define UNUSED __attribute__((unused)) 136 | #endif 137 | 138 | #define START_LEXER() \ 139 | bool result = false; \ 140 | bool skip = false; \ 141 | UNUSED \ 142 | bool eof = false; \ 143 | int32_t lookahead; \ 144 | goto start; \ 145 | next_state: \ 146 | lexer->advance(lexer, skip); \ 147 | start: \ 148 | skip = false; \ 149 | lookahead = lexer->lookahead; 150 | 151 | #define ADVANCE(state_value) \ 152 | { \ 153 | state = state_value; \ 154 | goto next_state; \ 155 | } 156 | 157 | #define SKIP(state_value) \ 158 | { \ 159 | skip = true; \ 160 | state = state_value; \ 161 | goto next_state; \ 162 | } 163 | 164 | #define ACCEPT_TOKEN(symbol_value) \ 165 | result = true; \ 166 | lexer->result_symbol = symbol_value; \ 167 | lexer->mark_end(lexer); 168 | 169 | #define END_STATE() return result; 170 | 171 | /* 172 | * Parse Table Macros 173 | */ 174 | 175 | #define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) 176 | 177 | #define STATE(id) id 178 | 179 | #define ACTIONS(id) id 180 | 181 | #define SHIFT(state_value) \ 182 | {{ \ 183 | .shift = { \ 184 | .type = TSParseActionTypeShift, \ 185 | .state = (state_value) \ 186 | } \ 187 | }} 188 | 189 | #define SHIFT_REPEAT(state_value) \ 190 | {{ \ 191 | .shift = { \ 192 | .type = TSParseActionTypeShift, \ 193 | .state = (state_value), \ 194 | .repetition = true \ 195 | } \ 196 | }} 197 | 198 | #define SHIFT_EXTRA() \ 199 | {{ \ 200 | .shift = { \ 201 | .type = TSParseActionTypeShift, \ 202 | .extra = true \ 203 | } \ 204 | }} 205 | 206 | #define REDUCE(symbol_val, child_count_val, ...) \ 207 | {{ \ 208 | .reduce = { \ 209 | .type = TSParseActionTypeReduce, \ 210 | .symbol = symbol_val, \ 211 | .child_count = child_count_val, \ 212 | __VA_ARGS__ \ 213 | }, \ 214 | }} 215 | 216 | #define RECOVER() \ 217 | {{ \ 218 | .type = TSParseActionTypeRecover \ 219 | }} 220 | 221 | #define ACCEPT_INPUT() \ 222 | {{ \ 223 | .type = TSParseActionTypeAccept \ 224 | }} 225 | 226 | #ifdef __cplusplus 227 | } 228 | #endif 229 | 230 | #endif // TREE_SITTER_PARSER_H_ 231 | --------------------------------------------------------------------------------