├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── README.md ├── build_all.sh ├── gdshader-lsp-vscode ├── .vscode │ ├── launch.json │ └── tasks.json ├── README.md ├── client │ ├── out │ │ ├── extension.js │ │ └── extension.js.map │ ├── package-lock.json │ ├── package.json │ ├── src │ │ └── extension.ts │ └── tsconfig.json ├── gdshader.tmLanguage.json ├── icon.png ├── language-configuration.json ├── package-lock.json ├── package.json ├── scripts │ └── e2e.sh ├── server │ └── gdshader-lsp ├── syntaxes │ ├── gdshader.tmLanguage.json │ └── gdshader.tmLanguage.yaml └── tsconfig.json ├── project.godot ├── src ├── completion.rs ├── interpreter │ ├── expression_interpreter.rs │ ├── mod.rs │ ├── statement_interpreter.rs │ └── top_level_interpreter.rs ├── lexer │ ├── mod.rs │ ├── operations.rs │ ├── token.rs │ └── token_stream.rs ├── lib.rs ├── main.rs ├── memory │ ├── functions.rs │ ├── hint.rs │ ├── mod.rs │ ├── render_modes.rs │ ├── scope.rs │ ├── types.rs │ └── variables.rs ├── nodes │ ├── expression_node.rs │ ├── mod.rs │ ├── statement_node.rs │ └── top_level_nodes.rs ├── parser │ ├── expression.rs │ ├── mod.rs │ ├── statement.rs │ └── top_level_parser.rs └── source_code.rs ├── test.gdshader └── yorb.gdshaderinc /.gitignore: -------------------------------------------------------------------------------- 1 | # Rust 2 | target/ 3 | Cargo.lock 4 | **/node_modules 5 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "aho-corasick" 7 | version = "1.1.2" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" 10 | dependencies = [ 11 | "memchr", 12 | ] 13 | 14 | [[package]] 15 | name = "beef" 16 | version = "0.5.2" 17 | source = "registry+https://github.com/rust-lang/crates.io-index" 18 | checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" 19 | 20 | [[package]] 21 | name = "bitflags" 22 | version = "1.3.2" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" 25 | 26 | [[package]] 27 | name = "crossbeam-channel" 28 | version = "0.5.11" 29 | source = "registry+https://github.com/rust-lang/crates.io-index" 30 | checksum = "176dc175b78f56c0f321911d9c8eb2b77a78a4860b9c19db83835fea1a46649b" 31 | dependencies = [ 32 | "crossbeam-utils", 33 | ] 34 | 35 | [[package]] 36 | name = "crossbeam-utils" 37 | version = "0.8.19" 38 | source = "registry+https://github.com/rust-lang/crates.io-index" 39 | checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" 40 | 41 | [[package]] 42 | name = "fnv" 43 | version = "1.0.7" 44 | source = "registry+https://github.com/rust-lang/crates.io-index" 45 | checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" 46 | 47 | [[package]] 48 | name = "form_urlencoded" 49 | version = "1.2.1" 50 | source = "registry+https://github.com/rust-lang/crates.io-index" 51 | checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" 52 | dependencies = [ 53 | "percent-encoding", 54 | ] 55 | 56 | [[package]] 57 | name = "gdshader-lsp" 58 | version = "0.1.0" 59 | dependencies = [ 60 | "logos", 61 | "lsp-server", 62 | "lsp-types", 63 | "regex", 64 | "serde_json", 65 | "strum_macros", 66 | "walkdir", 67 | ] 68 | 69 | [[package]] 70 | name = "heck" 71 | version = "0.4.1" 72 | source = "registry+https://github.com/rust-lang/crates.io-index" 73 | checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" 74 | 75 | [[package]] 76 | name = "idna" 77 | version = "0.5.0" 78 | source = "registry+https://github.com/rust-lang/crates.io-index" 79 | checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" 80 | dependencies = [ 81 | "unicode-bidi", 82 | "unicode-normalization", 83 | ] 84 | 85 | [[package]] 86 | name = "itoa" 87 | version = "1.0.10" 88 | source = "registry+https://github.com/rust-lang/crates.io-index" 89 | checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" 90 | 91 | [[package]] 92 | name = "lazy_static" 93 | version = "1.4.0" 94 | source = "registry+https://github.com/rust-lang/crates.io-index" 95 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 96 | 97 | [[package]] 98 | name = "log" 99 | version = "0.4.20" 100 | source = "registry+https://github.com/rust-lang/crates.io-index" 101 | checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" 102 | 103 | [[package]] 104 | name = "logos" 105 | version = "0.14.0" 106 | source = "registry+https://github.com/rust-lang/crates.io-index" 107 | checksum = "161971eb88a0da7ae0c333e1063467c5b5727e7fb6b710b8db4814eade3a42e8" 108 | dependencies = [ 109 | "logos-derive", 110 | ] 111 | 112 | [[package]] 113 | name = "logos-codegen" 114 | version = "0.14.0" 115 | source = "registry+https://github.com/rust-lang/crates.io-index" 116 | checksum = "8e31badd9de5131fdf4921f6473d457e3dd85b11b7f091ceb50e4df7c3eeb12a" 117 | dependencies = [ 118 | "beef", 119 | "fnv", 120 | "lazy_static", 121 | "proc-macro2", 122 | "quote", 123 | "regex-syntax", 124 | "syn", 125 | ] 126 | 127 | [[package]] 128 | name = "logos-derive" 129 | version = "0.14.0" 130 | source = "registry+https://github.com/rust-lang/crates.io-index" 131 | checksum = "1c2a69b3eb68d5bd595107c9ee58d7e07fe2bb5e360cc85b0f084dedac80de0a" 132 | dependencies = [ 133 | "logos-codegen", 134 | ] 135 | 136 | [[package]] 137 | name = "lsp-server" 138 | version = "0.7.6" 139 | source = "registry+https://github.com/rust-lang/crates.io-index" 140 | checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095" 141 | dependencies = [ 142 | "crossbeam-channel", 143 | "log", 144 | "serde", 145 | "serde_json", 146 | ] 147 | 148 | [[package]] 149 | name = "lsp-types" 150 | version = "0.95.0" 151 | source = "registry+https://github.com/rust-lang/crates.io-index" 152 | checksum = "158c1911354ef73e8fe42da6b10c0484cb65c7f1007f28022e847706c1ab6984" 153 | dependencies = [ 154 | "bitflags", 155 | "serde", 156 | "serde_json", 157 | "serde_repr", 158 | "url", 159 | ] 160 | 161 | [[package]] 162 | name = "memchr" 163 | version = "2.7.1" 164 | source = "registry+https://github.com/rust-lang/crates.io-index" 165 | checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" 166 | 167 | [[package]] 168 | name = "percent-encoding" 169 | version = "2.3.1" 170 | source = "registry+https://github.com/rust-lang/crates.io-index" 171 | checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" 172 | 173 | [[package]] 174 | name = "proc-macro2" 175 | version = "1.0.78" 176 | source = "registry+https://github.com/rust-lang/crates.io-index" 177 | checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" 178 | dependencies = [ 179 | "unicode-ident", 180 | ] 181 | 182 | [[package]] 183 | name = "quote" 184 | version = "1.0.35" 185 | source = "registry+https://github.com/rust-lang/crates.io-index" 186 | checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" 187 | dependencies = [ 188 | "proc-macro2", 189 | ] 190 | 191 | [[package]] 192 | name = "regex" 193 | version = "1.10.3" 194 | source = "registry+https://github.com/rust-lang/crates.io-index" 195 | checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" 196 | dependencies = [ 197 | "aho-corasick", 198 | "memchr", 199 | "regex-automata", 200 | "regex-syntax", 201 | ] 202 | 203 | [[package]] 204 | name = "regex-automata" 205 | version = "0.4.5" 206 | source = "registry+https://github.com/rust-lang/crates.io-index" 207 | checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" 208 | dependencies = [ 209 | "aho-corasick", 210 | "memchr", 211 | "regex-syntax", 212 | ] 213 | 214 | [[package]] 215 | name = "regex-syntax" 216 | version = "0.8.2" 217 | source = "registry+https://github.com/rust-lang/crates.io-index" 218 | checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" 219 | 220 | [[package]] 221 | name = "rustversion" 222 | version = "1.0.14" 223 | source = "registry+https://github.com/rust-lang/crates.io-index" 224 | checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" 225 | 226 | [[package]] 227 | name = "ryu" 228 | version = "1.0.16" 229 | source = "registry+https://github.com/rust-lang/crates.io-index" 230 | checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" 231 | 232 | [[package]] 233 | name = "same-file" 234 | version = "1.0.6" 235 | source = "registry+https://github.com/rust-lang/crates.io-index" 236 | checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" 237 | dependencies = [ 238 | "winapi-util", 239 | ] 240 | 241 | [[package]] 242 | name = "serde" 243 | version = "1.0.196" 244 | source = "registry+https://github.com/rust-lang/crates.io-index" 245 | checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" 246 | dependencies = [ 247 | "serde_derive", 248 | ] 249 | 250 | [[package]] 251 | name = "serde_derive" 252 | version = "1.0.196" 253 | source = "registry+https://github.com/rust-lang/crates.io-index" 254 | checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" 255 | dependencies = [ 256 | "proc-macro2", 257 | "quote", 258 | "syn", 259 | ] 260 | 261 | [[package]] 262 | name = "serde_json" 263 | version = "1.0.113" 264 | source = "registry+https://github.com/rust-lang/crates.io-index" 265 | checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" 266 | dependencies = [ 267 | "itoa", 268 | "ryu", 269 | "serde", 270 | ] 271 | 272 | [[package]] 273 | name = "serde_repr" 274 | version = "0.1.18" 275 | source = "registry+https://github.com/rust-lang/crates.io-index" 276 | checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb" 277 | dependencies = [ 278 | "proc-macro2", 279 | "quote", 280 | "syn", 281 | ] 282 | 283 | [[package]] 284 | name = "strum_macros" 285 | version = "0.26.1" 286 | source = "registry+https://github.com/rust-lang/crates.io-index" 287 | checksum = "7a3417fc93d76740d974a01654a09777cb500428cc874ca9f45edfe0c4d4cd18" 288 | dependencies = [ 289 | "heck", 290 | "proc-macro2", 291 | "quote", 292 | "rustversion", 293 | "syn", 294 | ] 295 | 296 | [[package]] 297 | name = "syn" 298 | version = "2.0.49" 299 | source = "registry+https://github.com/rust-lang/crates.io-index" 300 | checksum = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496" 301 | dependencies = [ 302 | "proc-macro2", 303 | "quote", 304 | "unicode-ident", 305 | ] 306 | 307 | [[package]] 308 | name = "tinyvec" 309 | version = "1.6.0" 310 | source = "registry+https://github.com/rust-lang/crates.io-index" 311 | checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" 312 | dependencies = [ 313 | "tinyvec_macros", 314 | ] 315 | 316 | [[package]] 317 | name = "tinyvec_macros" 318 | version = "0.1.1" 319 | source = "registry+https://github.com/rust-lang/crates.io-index" 320 | checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" 321 | 322 | [[package]] 323 | name = "unicode-bidi" 324 | version = "0.3.15" 325 | source = "registry+https://github.com/rust-lang/crates.io-index" 326 | checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" 327 | 328 | [[package]] 329 | name = "unicode-ident" 330 | version = "1.0.12" 331 | source = "registry+https://github.com/rust-lang/crates.io-index" 332 | checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" 333 | 334 | [[package]] 335 | name = "unicode-normalization" 336 | version = "0.1.22" 337 | source = "registry+https://github.com/rust-lang/crates.io-index" 338 | checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" 339 | dependencies = [ 340 | "tinyvec", 341 | ] 342 | 343 | [[package]] 344 | name = "url" 345 | version = "2.5.0" 346 | source = "registry+https://github.com/rust-lang/crates.io-index" 347 | checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" 348 | dependencies = [ 349 | "form_urlencoded", 350 | "idna", 351 | "percent-encoding", 352 | "serde", 353 | ] 354 | 355 | [[package]] 356 | name = "walkdir" 357 | version = "2.4.0" 358 | source = "registry+https://github.com/rust-lang/crates.io-index" 359 | checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" 360 | dependencies = [ 361 | "same-file", 362 | "winapi-util", 363 | ] 364 | 365 | [[package]] 366 | name = "winapi" 367 | version = "0.3.9" 368 | source = "registry+https://github.com/rust-lang/crates.io-index" 369 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 370 | dependencies = [ 371 | "winapi-i686-pc-windows-gnu", 372 | "winapi-x86_64-pc-windows-gnu", 373 | ] 374 | 375 | [[package]] 376 | name = "winapi-i686-pc-windows-gnu" 377 | version = "0.4.0" 378 | source = "registry+https://github.com/rust-lang/crates.io-index" 379 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 380 | 381 | [[package]] 382 | name = "winapi-util" 383 | version = "0.1.6" 384 | source = "registry+https://github.com/rust-lang/crates.io-index" 385 | checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" 386 | dependencies = [ 387 | "winapi", 388 | ] 389 | 390 | [[package]] 391 | name = "winapi-x86_64-pc-windows-gnu" 392 | version = "0.4.0" 393 | source = "registry+https://github.com/rust-lang/crates.io-index" 394 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 395 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "gdshader-lsp" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | logos = "0.14" 10 | lsp-server = "0.7.6" 11 | lsp-types = "0.95.0" 12 | regex = "1.10" 13 | serde_json = "1.0.1" 14 | strum_macros = "0.26.1" 15 | walkdir = "2.4" 16 | 17 | [target.x86_64-apple-darwin] 18 | linker = "x86_64-apple-darwin14-clang" 19 | ar = "x86_64-apple-darwin14-ar" 20 | 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Introduction 2 | --- 3 | Gdshader-lsp ins a language server for the Godot Shading Language that runs independently from the Godot engine. This allows you to edit gdshader files in your editor of choice. It is still a work in progress, but most of the key features of an LSP is included, including helpful error messages, hover hints, and code completion. 4 | 5 | This was made at the same time as [tree-sitter-gdshader](https://github.com/GodOfAvacyn/tree-sitter-gdshader), which is used for syntax highlighting (also a WIP, but support for neovim is sort of there). 6 | ## Download Instructions 7 | --- 8 | GDShader is now available as a VSCode extension! Getting it that way comes built-in with syntax highlighting, so you don't need to do any nonsense with my treesitter repo. 9 | 10 | To download the server for manual use, run the following command in a terminal to grab the binary: 11 | ``` 12 | wget https://github.com/GodOfAvacyn/gdshader-lsp/releases/download/v0.1/gdshader-lsp 13 | ``` 14 | Alternatively, you can download the source code and build it yourself. This project was done in Rust, so you will need a variant of Cargo installed to use it. 15 | 16 | If you are a neovim user, here is how to manually add the languag server to Neovim: 17 | 1. create a custom lua function somewhere in your neovim configuration: 18 | ``` 19 | function gdshader() 20 | vim.lsp.start { 21 | name = "gdshader-lsp", 22 | cmd = { 23 | "", 24 | }, 25 | capabilities = vim.lsp.protocol.make_client_capabilities() 26 | } 27 | end 28 | ``` 29 | 2. When editing a .gdshader file, start the language server with ':lua gdshader()'. You'll need to call that function for each new gdshader file you open (but only once) (and until I can set up a real client for neovim). 30 | 3. (Optional) follow steps at [tree-sitter-gdshader](https://github.com/GodOfAvacyn/tree-sitter-gdshader) to get syntax highlighting support. 31 | 32 | ## Features 33 | --- 34 | Gdshader-lsp currently has support for code completion, hover hints, error messages, and include statements. It lacks support for some key features - notably, support for other preprocessor macros (which, in its current form, this will probably be a deal-breaker for many people). Here is a full list of coming features that, in my opinion, would make it more usable, in my opinion: 35 | * Jump to definition 36 | * Preprocessor macro support 37 | * A spot among the supported lspconfig servers for Neovim. 38 | -------------------------------------------------------------------------------- /build_all.sh: -------------------------------------------------------------------------------- 1 | MACOS_TARGET="x86_64-apple-darwin" 2 | 3 | echo "Building target for platform ${MACOS_TARGET}" 4 | echo 5 | 6 | export LIBZ_SYS_STATIC=1 7 | 8 | # Use Clang for C/C++ builds 9 | export CC=o64-clang 10 | export CXX=o64-clang++ 11 | 12 | cargo build --release --target "${MACOS_TARGET}" 13 | 14 | echo 15 | echo Done 16 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "type": "extensionHost", 6 | "request": "launch", 7 | "name": "Launch Client", 8 | "runtimeExecutable": "${execPath}", 9 | "args": ["--extensionDevelopmentPath=${workspaceRoot}"], 10 | "outFiles": [ 11 | "${workspaceRoot}/client/out/**/*.js", 12 | ], 13 | "autoAttachChildProcesses": true, 14 | "preLaunchTask": { 15 | "type": "npm", 16 | "script": "watch" 17 | } 18 | }, 19 | { 20 | "name": "Language Server E2E Test", 21 | "type": "extensionHost", 22 | "request": "launch", 23 | "runtimeExecutable": "${execPath}", 24 | "args": [ 25 | "--extensionDevelopmentPath=${workspaceRoot}", 26 | ], 27 | "outFiles": ["${workspaceRoot}/client/out/test/**/*.js"] 28 | } 29 | ] 30 | } 31 | 32 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "type": "npm", 6 | "script": "compile", 7 | "group": "build", 8 | "presentation": { 9 | "panel": "dedicated", 10 | "reveal": "never" 11 | }, 12 | "problemMatcher": [ 13 | "$tsc" 14 | ] 15 | }, 16 | { 17 | "type": "npm", 18 | "script": "watch", 19 | "isBackground": true, 20 | "group": { 21 | "kind": "build", 22 | "isDefault": true 23 | }, 24 | "presentation": { 25 | "panel": "dedicated", 26 | "reveal": "never" 27 | }, 28 | "problemMatcher": [ 29 | "$tsc-watch" 30 | ] 31 | } 32 | ] 33 | } 34 | 35 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/README.md: -------------------------------------------------------------------------------- 1 | ### The GDShader Language Server 2 | --- 3 | This extension is a language server for the Godot shading language that runs independently of the Godot engine. It's a fully-featured language server (with some excpetions) and should offer a nice alternative to Godot's editor if you prefer VSCode, as I do. 4 | ### Features 5 | --- 6 | GDShader-lsp currently has support for: 7 | * Syntax highlighting 8 | * Smart code completion 9 | * Type checking & hover support 10 | * Helpful error messages 11 | * Include preprocessor macros 12 | ### Missing Features 13 | --- 14 | GDShader-lsp does not have (but will soon have) 15 | * Preprocessor support (other than include statements) 16 | * Jump-to-definition 17 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/client/out/extension.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | exports.deactivate = exports.activate = void 0; 4 | const path = require("path"); 5 | const vscode_1 = require("vscode"); 6 | const node_1 = require("vscode-languageclient/node"); 7 | let client; 8 | function activate(context) { 9 | const serverOptions = { 10 | command: context.asAbsolutePath(path.join('server', 'gdshader-lsp')) 11 | }; 12 | const clientOptions = { 13 | documentSelector: [{ scheme: "file", pattern: "**/*.{gdshader,gdshaderinc}" }], 14 | synchronize: { 15 | fileEvents: vscode_1.workspace.createFileSystemWatcher('**/.clientrc') 16 | } 17 | }; 18 | client = new node_1.LanguageClient('languageServerExample', 'Language Server Example', serverOptions, clientOptions); 19 | client.start(); 20 | } 21 | exports.activate = activate; 22 | function deactivate() { 23 | if (!client) { 24 | return undefined; 25 | } 26 | return client.stop(); 27 | } 28 | exports.deactivate = deactivate; 29 | //# sourceMappingURL=extension.js.map -------------------------------------------------------------------------------- /gdshader-lsp-vscode/client/out/extension.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;AAAA,6BAA6B;AAC7B,mCAAqD;AAErD,qDAGmC;AAEnC,IAAI,MAAsB,CAAC;AAE3B,SAAgB,QAAQ,CAAC,OAAyB;IAEjD,MAAM,aAAa,GAAG;QACrB,OAAO,EAAE,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC;KACpE,CAAC;IAEF,MAAM,aAAa,GAA0B;QAC5C,gBAAgB,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,6BAA6B,EAAE,CAAC;QAC9E,WAAW,EAAE;YACZ,UAAU,EAAE,kBAAS,CAAC,uBAAuB,CAAC,cAAc,CAAC;SAC7D;KACD,CAAC;IAEF,MAAM,GAAG,IAAI,qBAAc,CAC1B,uBAAuB,EACvB,yBAAyB,EACzB,aAAa,EACb,aAAa,CACb,CAAC;IAEF,MAAM,CAAC,KAAK,EAAE,CAAC;AAChB,CAAC;AArBD,4BAqBC;AAED,SAAgB,UAAU;IACzB,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;AACtB,CAAC;AALD,gCAKC"} -------------------------------------------------------------------------------- /gdshader-lsp-vscode/client/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gdshader-lsp-client", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "gdshader-lsp-client", 9 | "version": "1.0.0", 10 | "license": "ISC", 11 | "dependencies": { 12 | "vscode-languageclient": "^9.0.1" 13 | }, 14 | "devDependencies": { 15 | "@types/vscode": "^1.75.1", 16 | "@vscode/test-electron": "^2.3.8" 17 | }, 18 | "engines": { 19 | "vscode": "^1.75.0" 20 | } 21 | }, 22 | "node_modules/@tootallnate/once": { 23 | "version": "1.1.2", 24 | "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", 25 | "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", 26 | "dev": true, 27 | "engines": { 28 | "node": ">= 6" 29 | } 30 | }, 31 | "node_modules/@types/vscode": { 32 | "version": "1.86.0", 33 | "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.86.0.tgz", 34 | "integrity": "sha512-DnIXf2ftWv+9LWOB5OJeIeaLigLHF7fdXF6atfc7X5g2w/wVZBgk0amP7b+ub5xAuW1q7qP5YcFvOcit/DtyCQ==", 35 | "dev": true 36 | }, 37 | "node_modules/@vscode/test-electron": { 38 | "version": "2.3.9", 39 | "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.9.tgz", 40 | "integrity": "sha512-z3eiChaCQXMqBnk2aHHSEkobmC2VRalFQN0ApOAtydL172zXGxTwGrRtviT5HnUB+Q+G3vtEYFtuQkYqBzYgMA==", 41 | "dev": true, 42 | "dependencies": { 43 | "http-proxy-agent": "^4.0.1", 44 | "https-proxy-agent": "^5.0.0", 45 | "jszip": "^3.10.1", 46 | "semver": "^7.5.2" 47 | }, 48 | "engines": { 49 | "node": ">=16" 50 | } 51 | }, 52 | "node_modules/agent-base": { 53 | "version": "6.0.2", 54 | "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", 55 | "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", 56 | "dev": true, 57 | "dependencies": { 58 | "debug": "4" 59 | }, 60 | "engines": { 61 | "node": ">= 6.0.0" 62 | } 63 | }, 64 | "node_modules/balanced-match": { 65 | "version": "1.0.2", 66 | "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", 67 | "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" 68 | }, 69 | "node_modules/brace-expansion": { 70 | "version": "2.0.1", 71 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", 72 | "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", 73 | "dependencies": { 74 | "balanced-match": "^1.0.0" 75 | } 76 | }, 77 | "node_modules/core-util-is": { 78 | "version": "1.0.3", 79 | "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", 80 | "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", 81 | "dev": true 82 | }, 83 | "node_modules/debug": { 84 | "version": "4.3.4", 85 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", 86 | "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", 87 | "dev": true, 88 | "dependencies": { 89 | "ms": "2.1.2" 90 | }, 91 | "engines": { 92 | "node": ">=6.0" 93 | }, 94 | "peerDependenciesMeta": { 95 | "supports-color": { 96 | "optional": true 97 | } 98 | } 99 | }, 100 | "node_modules/http-proxy-agent": { 101 | "version": "4.0.1", 102 | "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", 103 | "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", 104 | "dev": true, 105 | "dependencies": { 106 | "@tootallnate/once": "1", 107 | "agent-base": "6", 108 | "debug": "4" 109 | }, 110 | "engines": { 111 | "node": ">= 6" 112 | } 113 | }, 114 | "node_modules/https-proxy-agent": { 115 | "version": "5.0.1", 116 | "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", 117 | "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", 118 | "dev": true, 119 | "dependencies": { 120 | "agent-base": "6", 121 | "debug": "4" 122 | }, 123 | "engines": { 124 | "node": ">= 6" 125 | } 126 | }, 127 | "node_modules/immediate": { 128 | "version": "3.0.6", 129 | "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", 130 | "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", 131 | "dev": true 132 | }, 133 | "node_modules/inherits": { 134 | "version": "2.0.4", 135 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", 136 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", 137 | "dev": true 138 | }, 139 | "node_modules/isarray": { 140 | "version": "1.0.0", 141 | "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", 142 | "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", 143 | "dev": true 144 | }, 145 | "node_modules/jszip": { 146 | "version": "3.10.1", 147 | "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", 148 | "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", 149 | "dev": true, 150 | "dependencies": { 151 | "lie": "~3.3.0", 152 | "pako": "~1.0.2", 153 | "readable-stream": "~2.3.6", 154 | "setimmediate": "^1.0.5" 155 | } 156 | }, 157 | "node_modules/lie": { 158 | "version": "3.3.0", 159 | "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", 160 | "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", 161 | "dev": true, 162 | "dependencies": { 163 | "immediate": "~3.0.5" 164 | } 165 | }, 166 | "node_modules/lru-cache": { 167 | "version": "6.0.0", 168 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", 169 | "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", 170 | "dependencies": { 171 | "yallist": "^4.0.0" 172 | }, 173 | "engines": { 174 | "node": ">=10" 175 | } 176 | }, 177 | "node_modules/minimatch": { 178 | "version": "5.1.6", 179 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", 180 | "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", 181 | "dependencies": { 182 | "brace-expansion": "^2.0.1" 183 | }, 184 | "engines": { 185 | "node": ">=10" 186 | } 187 | }, 188 | "node_modules/ms": { 189 | "version": "2.1.2", 190 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", 191 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", 192 | "dev": true 193 | }, 194 | "node_modules/pako": { 195 | "version": "1.0.11", 196 | "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", 197 | "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", 198 | "dev": true 199 | }, 200 | "node_modules/process-nextick-args": { 201 | "version": "2.0.1", 202 | "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", 203 | "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", 204 | "dev": true 205 | }, 206 | "node_modules/readable-stream": { 207 | "version": "2.3.8", 208 | "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", 209 | "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", 210 | "dev": true, 211 | "dependencies": { 212 | "core-util-is": "~1.0.0", 213 | "inherits": "~2.0.3", 214 | "isarray": "~1.0.0", 215 | "process-nextick-args": "~2.0.0", 216 | "safe-buffer": "~5.1.1", 217 | "string_decoder": "~1.1.1", 218 | "util-deprecate": "~1.0.1" 219 | } 220 | }, 221 | "node_modules/safe-buffer": { 222 | "version": "5.1.2", 223 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", 224 | "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", 225 | "dev": true 226 | }, 227 | "node_modules/semver": { 228 | "version": "7.6.0", 229 | "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", 230 | "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", 231 | "dependencies": { 232 | "lru-cache": "^6.0.0" 233 | }, 234 | "bin": { 235 | "semver": "bin/semver.js" 236 | }, 237 | "engines": { 238 | "node": ">=10" 239 | } 240 | }, 241 | "node_modules/setimmediate": { 242 | "version": "1.0.5", 243 | "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", 244 | "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", 245 | "dev": true 246 | }, 247 | "node_modules/string_decoder": { 248 | "version": "1.1.1", 249 | "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", 250 | "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", 251 | "dev": true, 252 | "dependencies": { 253 | "safe-buffer": "~5.1.0" 254 | } 255 | }, 256 | "node_modules/util-deprecate": { 257 | "version": "1.0.2", 258 | "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", 259 | "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", 260 | "dev": true 261 | }, 262 | "node_modules/vscode-jsonrpc": { 263 | "version": "8.2.0", 264 | "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", 265 | "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", 266 | "engines": { 267 | "node": ">=14.0.0" 268 | } 269 | }, 270 | "node_modules/vscode-languageclient": { 271 | "version": "9.0.1", 272 | "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-9.0.1.tgz", 273 | "integrity": "sha512-JZiimVdvimEuHh5olxhxkht09m3JzUGwggb5eRUkzzJhZ2KjCN0nh55VfiED9oez9DyF8/fz1g1iBV3h+0Z2EA==", 274 | "dependencies": { 275 | "minimatch": "^5.1.0", 276 | "semver": "^7.3.7", 277 | "vscode-languageserver-protocol": "3.17.5" 278 | }, 279 | "engines": { 280 | "vscode": "^1.82.0" 281 | } 282 | }, 283 | "node_modules/vscode-languageserver-protocol": { 284 | "version": "3.17.5", 285 | "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", 286 | "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", 287 | "dependencies": { 288 | "vscode-jsonrpc": "8.2.0", 289 | "vscode-languageserver-types": "3.17.5" 290 | } 291 | }, 292 | "node_modules/vscode-languageserver-types": { 293 | "version": "3.17.5", 294 | "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", 295 | "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" 296 | }, 297 | "node_modules/yallist": { 298 | "version": "4.0.0", 299 | "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", 300 | "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" 301 | } 302 | } 303 | } 304 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gdshader-lsp-client", 3 | "version": "1.0.0", 4 | "description": "Client for the gdshader lsp.", 5 | "author": "godofavacyn", 6 | "license": "ISC", 7 | "engines": { 8 | "vscode": "^1.75.0" 9 | }, 10 | "dependencies": { 11 | "vscode-languageclient": "^9.0.1" 12 | }, 13 | "devDependencies": { 14 | "@types/vscode": "^1.75.1", 15 | "@vscode/test-electron": "^2.3.8" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/client/src/extension.ts: -------------------------------------------------------------------------------- 1 | import * as os from "os"; 2 | import * as path from "path"; 3 | import { workspace, ExtensionContext } from "vscode"; 4 | 5 | import { 6 | LanguageClient, 7 | LanguageClientOptions, 8 | } from "vscode-languageclient/node" 9 | 10 | let client: LanguageClient; 11 | 12 | export function activate(context: ExtensionContext) { 13 | 14 | const server = "gdshader-lsp-" + os.platform(); 15 | const serverOptions = { 16 | command: context.asAbsolutePath(path.join("server", server)) 17 | }; 18 | 19 | const clientOptions: LanguageClientOptions = { 20 | documentSelector: [{ scheme: "file", pattern: "**/*.{gdshader,gdshaderinc}" }], 21 | synchronize: { 22 | fileEvents: workspace.createFileSystemWatcher("**/.clientrc") 23 | } 24 | }; 25 | 26 | client = new LanguageClient( 27 | "languageServerExample", 28 | "Language Server Example", 29 | serverOptions, 30 | clientOptions 31 | ); 32 | 33 | client.start(); 34 | } 35 | 36 | export function deactivate(): Thenable | undefined { 37 | if (!client) { 38 | return undefined; 39 | } 40 | return client.stop(); 41 | } 42 | 43 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/client/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es2020", 5 | "lib": ["es2020"], 6 | "outDir": "out", 7 | "rootDir": "src", 8 | "sourceMap": true 9 | }, 10 | "include": ["src"], 11 | "exclude": ["node_modules", ".vscode-test"] 12 | } 13 | 14 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/gdshader.tmLanguage.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GodOfAvacyn/gdshader-lsp/f3847df8a17cd66674b2ec058c020d80ff7d4f8f/gdshader-lsp-vscode/gdshader.tmLanguage.json -------------------------------------------------------------------------------- /gdshader-lsp-vscode/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GodOfAvacyn/gdshader-lsp/f3847df8a17cd66674b2ec058c020d80ff7d4f8f/gdshader-lsp-vscode/icon.png -------------------------------------------------------------------------------- /gdshader-lsp-vscode/language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | "lineComment": "//", 4 | "blockComment": [ "/*", "*/" ] 5 | }, 6 | "brackets": [ 7 | ["{", "}"], 8 | ["[", "]"], 9 | ["(", ")"] 10 | ], 11 | "autoClosingPairs": [ 12 | ["{", "}"], 13 | ["[", "]"], 14 | ["(", ")"], 15 | ["\"", "\""], 16 | ["'", "'"] 17 | ], 18 | "surroundingPairs": [ 19 | ["{", "}"], 20 | ["[", "]"], 21 | ["(", ")"], 22 | ["\"", "\""], 23 | ["'", "'"] 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gdshader-lsp", 3 | "version": "1.0.7", 4 | "description": "Language server for the Godot Shading language.", 5 | "icon": "icon.png", 6 | "readme": "README.md", 7 | "publisher": "godofavacyn", 8 | "categories": ["Programming Languages"], 9 | "keywords": ["godot", "gdshader", "shader"], 10 | "author": "godofavacyn", 11 | "license": "ISC", 12 | "engines": { 13 | "vscode": "^1.75.0" 14 | }, 15 | "activationEvents": [ 16 | "workspaceContains:**/project.godot" 17 | ], 18 | "main": "./client/out/extension", 19 | "contributes": { 20 | "configuration": { 21 | "type": "object", 22 | "title": "Example configuration", 23 | "properties": { 24 | "languageServerExample.maxNumberOfProblems": { 25 | "scope": "resource", 26 | "type": "number", 27 | "default": 100, 28 | "description": "Controls the max number of problems produced by the server." 29 | }, 30 | "languageServerExample.trace.server": { 31 | "scope": "window", 32 | "type": "string", 33 | "enum": [ 34 | "off", 35 | "messages", 36 | "verbose" 37 | ], 38 | "default": "off", 39 | "description": "Traces the communication between client and server." 40 | } 41 | } 42 | }, 43 | "languages": [{ 44 | "id": "gdshader", 45 | "extensions": [".gdshader",".gdshaderinc"], 46 | "configuration": "./language-configuration.json" 47 | }], 48 | "grammars": [{ 49 | "language": "gdshader", 50 | "scopeName": "source.gdshader", 51 | "path": "./syntaxes/gdshader.tmLanguage.json" 52 | }] 53 | }, 54 | "scripts": { 55 | "vscode:prepublish": "npm run compile", 56 | "compile": "tsc -b", 57 | "watch": "tsc -b -w", 58 | "lint": "eslint ./client/src --ext .ts,.tsx", 59 | "postinstall": "cd client && npm install && cd ..", 60 | "test": "sh ./scripts/e2e.sh" 61 | }, 62 | "devDependencies": { 63 | "@types/mocha": "^10.0.6", 64 | "@types/node": "^18.14.6", 65 | "@typescript-eslint/eslint-plugin": "^6.14.0", 66 | "@typescript-eslint/parser": "^6.14.0", 67 | "eslint": "^8.56.0", 68 | "js-yaml": "^4.1.0", 69 | "mocha": "^10.2.0", 70 | "typescript": "^5.3.3" 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/scripts/e2e.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | export CODE_TESTS_PATH="$(pwd)/client/out/test" 4 | export CODE_TESTS_WORKSPACE="$(pwd)/client/testFixture" 5 | 6 | node "$(pwd)/client/out/test/runTest" 7 | 8 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/server/gdshader-lsp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GodOfAvacyn/gdshader-lsp/f3847df8a17cd66674b2ec058c020d80ff7d4f8f/gdshader-lsp-vscode/server/gdshader-lsp -------------------------------------------------------------------------------- /gdshader-lsp-vscode/syntaxes/gdshader.tmLanguage.json: -------------------------------------------------------------------------------- 1 | { 2 | "scopeName": "source.gdshader", 3 | "patterns": [ 4 | { 5 | "include": "#expression" 6 | } 7 | ], 8 | "repository": { 9 | "expression": { 10 | "patterns": [ 11 | { 12 | "include": "#keyword" 13 | }, 14 | { 15 | "include": "#subkeyword" 16 | }, 17 | { 18 | "include": "#builtin_type" 19 | }, 20 | { 21 | "include": "#builtin_variable" 22 | }, 23 | { 24 | "include": "#function" 25 | }, 26 | { 27 | "include": "#notfunction" 28 | }, 29 | { 30 | "include": "#identifier" 31 | }, 32 | { 33 | "include": "#primitive" 34 | }, 35 | { 36 | "include": "#symbol" 37 | }, 38 | { 39 | "include": "#string" 40 | }, 41 | { 42 | "include": "#comment" 43 | } 44 | ] 45 | }, 46 | "keyword": { 47 | "match": "(?x)\n \\b(\n render_mode | shader_type |\n render_mode | shader_type |\n group_uniforms | const |\n varying | uniform |\n global | instance |\n struct | include |\n in | out |\n inout | if |\n for | while |\n switch | case |\n default | break |\n return | continue |\n flat | smooth |\n lowp | mediump |\n highp\n )\\b", 48 | "name": "keyword" 49 | }, 50 | "subkeyword": { 51 | "match": "(?x) \n \\b(\n spatial | canvas_item | particles | sky | fog |\n blend_mix | blend_add |\n blend_sub | blend_mul |\n depth_draw_opaque | depth_draw_always |\n depth_draw_never | depth_prepass_alpha |\n depth_test_disabled | sss_mode_skin |\n cull_back | cull_front |\n cull_disabled | unshaded |\n wireframe | diffuse_burley |\n diffuse_lambert | diffuse_lambert_wrap |\n diffuse_toon | specular_schlick_ggx |\n specular_toon | specular_disabled |\n skip_vertex_transform | world_vertex_coords |\n ensure_correct_normals | shadows_disabled |\n ambient_light_disabled | shadow_to_opacity |\n vertex_lighting | particle_trails |\n alpha_to_coverage | alpha_to_coverage_and_one |\n fog_disabled | blend_premul_alpha |\n blend_disabled | light_only |\n keep_data | disable_force |\n disable_velocity | collision_use_scaleuse_half_res_pass |\n use_quarter_res_pass | disable_fog\n )\\b", 52 | "name": "support.class" 53 | }, 54 | "builtin_type": { 55 | "match": "(?x) \\b( void | bool | bvec2 | bvec3 | bvec4 | int | ivec2 | ivec3 | ivec4 | uint | uvec2 | uvec3 | uvec4 | float | vec2 | vec3 | vec4 | mat2 | mat3 | mat4 | sampler2D | isampler2D | usampler2D | sampler2DArray | isampler2DArray | usampler2DArray | sampler3D | isampler3D | usampler3D | samplerCube | samplerCubeArray | const | varying )\\b", 56 | "name": "support.type" 57 | }, 58 | "builtin_variable": { 59 | "match": "(?x) \\b( VIEWPORT_SIZE | VIEW_MATRIX | INV_VIEW_MATRIX | INV_PROJECTION_MATRIX | NODE_POSITION_WORLD | NODE_POSITION_VIEW | CAMERA_POSITION_WORLD | CAMERA_DIRECTION_WORLD | OUTPUT_IS_SRGB | INSTANCE_ID | INSTANCE_CUSTOM | VIEW_INDEX | VIEW_MONO_LEFT | VIEW_RIGHT | EYE_OFFSET | VERTEX | VERTEX_ID | NORMAL | TANGENT | BINORMAL | POSITION | UV | UV2 | COLOR | ROUGHNESS | POINT_SIZE | MODELVIEW_MATRIX | MODELVIEW_NORMAL_MATRIX | MODEL_MATRIX | MODEL_NORMAL_MATRIX | PROJECTION_MATRIX | BONE_INDICES | BONE_WEIGHTSVIEWPORT_SIZE | FRAGCOORD | FRONT_FACING | VIEW | POINT_COORD | SCREEN_TEXTURE | SCREEN_UV | DEPTH_TEXTURE | DEPTH | NORMAL_MAP | NORMAL_MAP_DEPTH | ALBEDO | ALPHA | ALPHA_SCISSOR_THRESHOLD | ALPHA_HASH_SCALE | ALPHA_ANTIALIASING_EDGE | ALPHA_TEXTURE_COORDINATE | METALLIC | SPECULAR | RIM | RIM_TINT | CLEARCOAT | CLEARCOAT_GLOSS | ANISOTROPY | ANISOTROPY_FLOW | SSS_STRENGTH | SSS_TRANSMITTANCE_COLOR | SSS_TRANSMITTANCE_DEPTH | SSS_TRANSMITTANCE_BOOST | BACKLIGHT | AO | AO_LIGHT_AFFECT | EMISSION | FOG | RADIANCE | IRRADIANCE | LIGHT | LIGHT_COLOR | SPECULAR_AMOUNT | LIGHT_IS_DIRECTIONAL | ATTENUATION | DIFFUSE_LIGHT | SPECULAR_LIGHT | CANVAS_MATRIX | SCREEN_MATRIX | AT_LIGHT_PASS | TEXTURE_PIXEL_SIZE | SCREEN_PIXEL_SIZE | TEXTURE | SPECULAR_SHININESS_TEXTURE | SPECULAR_SHININESS | NORMAL_TEXTURE | SHADOW_VERTEX | LIGHT_VERTEX | LIGHT_ENERGY | LIGHT_POSITION | LIGHT_DIRECTION | SHADOW_MODULATE | LIFETIME | DELTA | NUMBER | INDEX | EMISSION_TRANSFORM | RANDOM_SEED | ACTIVE | VELOCITY | TRANSFORM | CUSTOM | MASS | USERDATAX | FLAG_EMIT_POSITION | FLAG_EMIT_ROT_SCALE | FLAG_EMIT_VELOCITY | FLAG_EMIT_COLOR | FLAG_EMIT_CUSTOM | EMITTER_VELOCITY | INTERPOLATE_TO_END | AMOUNT_RATIO | RESTART_POSITION | RESTART_ROT_SCALE | RESTART_VELOCITY | RESTART_COLOR | RESTART_CUSTOM | RESTART | COLLIDED | COLLISION_NORMAL | COLLISION_DEPTH | ATTRACTOR_FORCE | TIME | AT_HALF_RES_PASS | AT_QUARTER_RES_PASS | AT_CUBEMAP_PASS | LIGHTX_ENABLED | LIGHTX_ENERGY | LIGHTX_DIRECTION | LIGHTX_COLOR | LIGHTX_SIZE | PI | TAU | E | WORLD_POSITION | OBJECT_POSITION | UVW | SIZE | SDF | DENSITY )\\b", 60 | "name": "support.class" 61 | }, 62 | "function": { 63 | "match": "(?x) \\b( radians | degrees | sin | cos | tan | asin | acos | atan | sinh | cosh | tanh | asinh | acosh | atanh | pow | exp | exp2 | log | log2 | sqrt | inversesqrt | abs | sign | floor | round | roundEven | trunc | ceil | fract | mod | mod | modf | min | max | clamp | textureSize | textureQueryLod | textureQueryLevels | texture | textureProj | textureLod | textureProjLod | textureGrad | textureProjGrad | texelFetch | textureGather dFdx | dFdxCoarse | dFdxFine | dFdy | dFdyCoarse | dFdyFine | fwidth | fwidthCoarse | fwidthFine | packHalf2x16 | unpackHalf2x16 | packUnorm2x16 | unpackUnorm2x16 | packSnorm2x16 | unpackSnorm2x16 | packUnorm4x8 | unpackUnorm4x8 | packSnorm4x8 | unpackSnorm4x8 bitfieldExtract | bitfieldInsert | bitfieldReverse | bitCount | findLSB | findMSB | imulExtended | umulExtended | uaddCarry | usubBorrow | ldexp | frexp )\\b", 64 | "name": "entity.name.function" 65 | }, 66 | "primitive": { 67 | "match": "\\b((\\b(\\d+(.\\d*)?(u)?)\\b)|true|false)\\b", 68 | "name": "constant" 69 | }, 70 | "string": { 71 | "begin": "\"", 72 | "end": "\"", 73 | "name": "string" 74 | }, 75 | "notfunction": { 76 | "match": "\\w+(?=\\()", 77 | "name": "entity.name.function" 78 | }, 79 | "symbol": { 80 | "match": "(\\*|-|\\+|%|=|/(?!/)|\\&|\\^|<|>|#)", 81 | "name": "keyword.operator" 82 | }, 83 | "comment": { 84 | "match": "(\\/\\/.*$)", 85 | "name": "comment" 86 | } 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/syntaxes/gdshader.tmLanguage.yaml: -------------------------------------------------------------------------------- 1 | scopeName: source.gdshader 2 | patterns: 3 | - include: '#expression' 4 | 5 | repository: 6 | expression: 7 | patterns: 8 | - include: '#keyword' 9 | - include: '#subkeyword' 10 | - include: '#builtin_type' 11 | - include: '#builtin_variable' 12 | - include: '#function' 13 | - include: '#notfunction' 14 | - include: '#identifier' 15 | - include: '#primitive' 16 | - include: '#symbol' 17 | - include: '#string' 18 | - include: '#comment' 19 | keyword: 20 | match: >- 21 | (?x) 22 | \b( 23 | render_mode | shader_type | 24 | render_mode | shader_type | 25 | group_uniforms | const | 26 | varying | uniform | 27 | global | instance | 28 | struct | include | 29 | in | out | 30 | inout | if | 31 | for | while | 32 | switch | case | 33 | default | break | 34 | return | continue | 35 | flat | smooth | 36 | lowp | mediump | 37 | highp 38 | )\b 39 | name: keyword 40 | subkeyword: 41 | match: >- 42 | (?x) 43 | \b( 44 | spatial | canvas_item | particles | sky | fog | 45 | blend_mix | blend_add | 46 | blend_sub | blend_mul | 47 | depth_draw_opaque | depth_draw_always | 48 | depth_draw_never | depth_prepass_alpha | 49 | depth_test_disabled | sss_mode_skin | 50 | cull_back | cull_front | 51 | cull_disabled | unshaded | 52 | wireframe | diffuse_burley | 53 | diffuse_lambert | diffuse_lambert_wrap | 54 | diffuse_toon | specular_schlick_ggx | 55 | specular_toon | specular_disabled | 56 | skip_vertex_transform | world_vertex_coords | 57 | ensure_correct_normals | shadows_disabled | 58 | ambient_light_disabled | shadow_to_opacity | 59 | vertex_lighting | particle_trails | 60 | alpha_to_coverage | alpha_to_coverage_and_one | 61 | fog_disabled | blend_premul_alpha | 62 | blend_disabled | light_only | 63 | keep_data | disable_force | 64 | disable_velocity | collision_use_scaleuse_half_res_pass | 65 | use_quarter_res_pass | disable_fog 66 | )\b 67 | name: support.class 68 | builtin_type: 69 | match: 70 | (?x) 71 | \b( 72 | void | 73 | bool | bvec2 | 74 | bvec3 | bvec4 | 75 | int | ivec2 | 76 | ivec3 | ivec4 | 77 | uint | uvec2 | 78 | uvec3 | uvec4 | 79 | float | vec2 | 80 | vec3 | vec4 | 81 | mat2 | mat3 | 82 | mat4 | sampler2D | 83 | isampler2D | usampler2D | 84 | sampler2DArray | 85 | isampler2DArray | usampler2DArray | 86 | sampler3D | isampler3D | 87 | usampler3D | samplerCube | 88 | samplerCubeArray | const | 89 | varying 90 | )\b 91 | name: support.type 92 | builtin_variable: 93 | match: 94 | (?x) 95 | \b( 96 | VIEWPORT_SIZE | VIEW_MATRIX | 97 | INV_VIEW_MATRIX | INV_PROJECTION_MATRIX | 98 | NODE_POSITION_WORLD | NODE_POSITION_VIEW | 99 | CAMERA_POSITION_WORLD | CAMERA_DIRECTION_WORLD | 100 | OUTPUT_IS_SRGB | INSTANCE_ID | 101 | INSTANCE_CUSTOM | VIEW_INDEX | 102 | VIEW_MONO_LEFT | VIEW_RIGHT | 103 | EYE_OFFSET | VERTEX | 104 | VERTEX_ID | NORMAL | 105 | TANGENT | BINORMAL | 106 | POSITION | UV | 107 | UV2 | COLOR | 108 | ROUGHNESS | POINT_SIZE | 109 | MODELVIEW_MATRIX | MODELVIEW_NORMAL_MATRIX | 110 | MODEL_MATRIX | MODEL_NORMAL_MATRIX | 111 | PROJECTION_MATRIX | BONE_INDICES | 112 | BONE_WEIGHTSVIEWPORT_SIZE | FRAGCOORD | 113 | FRONT_FACING | VIEW | 114 | POINT_COORD | SCREEN_TEXTURE | 115 | SCREEN_UV | DEPTH_TEXTURE | 116 | DEPTH | NORMAL_MAP | 117 | NORMAL_MAP_DEPTH | ALBEDO | 118 | ALPHA | ALPHA_SCISSOR_THRESHOLD | 119 | ALPHA_HASH_SCALE | ALPHA_ANTIALIASING_EDGE | 120 | ALPHA_TEXTURE_COORDINATE | METALLIC | 121 | SPECULAR | RIM | 122 | RIM_TINT | CLEARCOAT | 123 | CLEARCOAT_GLOSS | ANISOTROPY | 124 | ANISOTROPY_FLOW | SSS_STRENGTH | 125 | SSS_TRANSMITTANCE_COLOR | SSS_TRANSMITTANCE_DEPTH | 126 | SSS_TRANSMITTANCE_BOOST | BACKLIGHT | 127 | AO | AO_LIGHT_AFFECT | 128 | EMISSION | FOG | 129 | RADIANCE | IRRADIANCE | 130 | LIGHT | LIGHT_COLOR | 131 | SPECULAR_AMOUNT | LIGHT_IS_DIRECTIONAL | 132 | ATTENUATION | DIFFUSE_LIGHT | 133 | SPECULAR_LIGHT | CANVAS_MATRIX | 134 | SCREEN_MATRIX | AT_LIGHT_PASS | 135 | TEXTURE_PIXEL_SIZE | SCREEN_PIXEL_SIZE | 136 | TEXTURE | SPECULAR_SHININESS_TEXTURE | 137 | SPECULAR_SHININESS | NORMAL_TEXTURE | 138 | SHADOW_VERTEX | LIGHT_VERTEX | 139 | LIGHT_ENERGY | LIGHT_POSITION | 140 | LIGHT_DIRECTION | SHADOW_MODULATE | 141 | LIFETIME | DELTA | 142 | NUMBER | INDEX | 143 | EMISSION_TRANSFORM | RANDOM_SEED | 144 | ACTIVE | VELOCITY | 145 | TRANSFORM | CUSTOM | 146 | MASS | USERDATAX | 147 | FLAG_EMIT_POSITION | FLAG_EMIT_ROT_SCALE | 148 | FLAG_EMIT_VELOCITY | FLAG_EMIT_COLOR | 149 | FLAG_EMIT_CUSTOM | EMITTER_VELOCITY | 150 | INTERPOLATE_TO_END | AMOUNT_RATIO | 151 | RESTART_POSITION | RESTART_ROT_SCALE | 152 | RESTART_VELOCITY | RESTART_COLOR | 153 | RESTART_CUSTOM | RESTART | 154 | COLLIDED | COLLISION_NORMAL | 155 | COLLISION_DEPTH | ATTRACTOR_FORCE | 156 | TIME | AT_HALF_RES_PASS | 157 | AT_QUARTER_RES_PASS | AT_CUBEMAP_PASS | 158 | LIGHTX_ENABLED | LIGHTX_ENERGY | 159 | LIGHTX_DIRECTION | LIGHTX_COLOR | 160 | LIGHTX_SIZE | PI | 161 | TAU | E | 162 | WORLD_POSITION | OBJECT_POSITION | 163 | UVW | SIZE | 164 | SDF | DENSITY 165 | )\b 166 | name: support.class 167 | function: 168 | match: 169 | (?x) 170 | \b( 171 | radians | degrees | 172 | sin | cos | 173 | tan | asin | 174 | acos | atan | 175 | sinh | cosh | 176 | tanh | asinh | 177 | acosh | atanh | 178 | pow | exp | 179 | exp2 | log | 180 | log2 | sqrt | 181 | inversesqrt | abs | 182 | sign | floor | 183 | round | roundEven | 184 | trunc | ceil | 185 | fract | mod | 186 | mod | modf | 187 | min | max | 188 | clamp | textureSize | 189 | textureQueryLod | textureQueryLevels | 190 | texture | textureProj | 191 | textureLod | textureProjLod | 192 | textureGrad | textureProjGrad | 193 | texelFetch | textureGather 194 | dFdx | dFdxCoarse | 195 | dFdxFine | dFdy | 196 | dFdyCoarse | dFdyFine | 197 | fwidth | fwidthCoarse | 198 | fwidthFine | packHalf2x16 | 199 | unpackHalf2x16 | packUnorm2x16 | 200 | unpackUnorm2x16 | packSnorm2x16 | 201 | unpackSnorm2x16 | packUnorm4x8 | 202 | unpackUnorm4x8 | packSnorm4x8 | 203 | unpackSnorm4x8 bitfieldExtract | 204 | bitfieldInsert | bitfieldReverse | 205 | bitCount | findLSB | 206 | findMSB | imulExtended | 207 | umulExtended | uaddCarry | 208 | usubBorrow | ldexp | 209 | frexp 210 | )\b 211 | name: entity.name.function 212 | primitive: 213 | match: \b((\b(\d+(.\d*)?(u)?)\b)|true|false)\b 214 | name: constant 215 | string: 216 | begin: "\"" 217 | end: "\"" 218 | name: string 219 | notfunction: 220 | match: \w+(?=\() 221 | name: entity.name.function 222 | symbol: 223 | match: (\*|-|\+|%|=|/(?!/)|\&|\^|<|>|#) 224 | name: keyword.operator 225 | comment: 226 | match: (\/\/.*$) 227 | name: comment 228 | 229 | 230 | -------------------------------------------------------------------------------- /gdshader-lsp-vscode/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es2020", 5 | "lib": ["es2020"], 6 | "outDir": "out", 7 | "rootDir": "src", 8 | "sourceMap": true 9 | }, 10 | "include": [ 11 | "src" 12 | ], 13 | "exclude": [ 14 | "node_modules", 15 | ".vscode-test" 16 | ], 17 | "references": [ 18 | { "path": "./client" }, 19 | ] 20 | } 21 | 22 | -------------------------------------------------------------------------------- /project.godot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GodOfAvacyn/gdshader-lsp/f3847df8a17cd66674b2ec058c020d80ff7d4f8f/project.godot -------------------------------------------------------------------------------- /src/completion.rs: -------------------------------------------------------------------------------- 1 | use lsp_types::*; 2 | 3 | use crate::{interpreter::evaluate_expression, memory::*, nodes::{ExpressionNode, TypeNode}}; 4 | 5 | fn cast_types() -> Vec { 6 | [ 7 | "float()", 8 | "int()", 9 | "uint()", 10 | "bool()", 11 | "vec2()", 12 | "vec3()", 13 | "vec4()", 14 | "ivec2()", 15 | "ivec3()", 16 | "ivec4()", 17 | "uvec2()", 18 | "uvec3()", 19 | "uvec4()", 20 | "bvec2()", 21 | "bvec3()", 22 | "bvec4()", 23 | "mat2()", 24 | "mat3()", 25 | "mat4()", 26 | ].map(|x| x.to_string()).to_vec() 27 | } 28 | 29 | #[derive(Clone, Debug)] 30 | pub enum CompletionElement { 31 | TopLevelKeyword, 32 | Include, 33 | IncludeString, 34 | ShaderType, 35 | RenderMode, 36 | Uniform, 37 | Hint(TypeInfo), 38 | Precision, 39 | Interpolation, 40 | FunctionName, 41 | FunctionQualifier, 42 | Statement, 43 | SwitchCase, 44 | Type, 45 | Identifier(bool), 46 | Member(Box), 47 | None 48 | } 49 | pub fn get_completion_items( 50 | memory: &mut Memory, 51 | cursor: Position, 52 | element: &CompletionElement 53 | ) -> Vec{ 54 | let scope = memory.scopes.find_scope_from_position(cursor); 55 | match element { 56 | CompletionElement::TopLevelKeyword => { 57 | [ 58 | "shader_type", 59 | "render_mode", 60 | "const", 61 | "varying", 62 | "uniform", 63 | "global", 64 | "instance", 65 | "group_uniforms", 66 | "struct", 67 | "void", 68 | ] 69 | .iter() 70 | .map(|x| CompletionItem { 71 | label: x.to_string(), 72 | kind: Some(CompletionItemKind::KEYWORD), 73 | ..Default::default() 74 | }) 75 | .chain(memory.get_builtin_types(1)) 76 | .chain([CompletionItem{ 77 | label: "#include".to_string(), 78 | kind: Some(CompletionItemKind::VALUE), 79 | insert_text: Some("include".to_string()), 80 | ..Default::default() 81 | }].into_iter()) 82 | .collect() 83 | }, 84 | CompletionElement::ShaderType => { 85 | [ "spatial", "canvas_item", "particles", "sky", "fog" ] 86 | .iter() 87 | .map(|x| CompletionItem { 88 | label: x.to_string(), 89 | kind: Some(CompletionItemKind::VALUE), 90 | ..Default::default() 91 | }) 92 | .collect() 93 | }, 94 | CompletionElement::SwitchCase=> { 95 | [ "case", "default" ] 96 | .iter() 97 | .map(|x| CompletionItem { 98 | label: x.to_string(), 99 | kind: Some(CompletionItemKind::VALUE), 100 | ..Default::default() 101 | }) 102 | .collect() 103 | }, 104 | CompletionElement::RenderMode => { 105 | memory.valid_render_modes 106 | .iter() 107 | .map(|(x,_)| CompletionItem { 108 | label: x.to_string(), 109 | kind: Some(CompletionItemKind::VALUE), 110 | ..Default::default() 111 | }) 112 | .collect() 113 | }, 114 | CompletionElement::Uniform => { 115 | vec![CompletionItem { 116 | label: "uniform".to_string(), 117 | kind: Some(CompletionItemKind::KEYWORD), 118 | ..Default::default() 119 | }] 120 | }, 121 | CompletionElement::Hint(ty) => { 122 | memory.get_hints(ty.clone()) 123 | }, 124 | CompletionElement::Interpolation => { 125 | [ "smooth", "float", "lowp", "mediump", "highp" ] 126 | .iter().map(|x| CompletionItem { 127 | label: x.to_string(), 128 | kind: Some(CompletionItemKind::KEYWORD), 129 | ..Default::default() 130 | }) 131 | .chain(memory.get_builtin_types(0)) 132 | .chain(memory.get_structs()).collect() 133 | }, 134 | CompletionElement::Precision => { 135 | [ "lowp", "mediump", "highp" ] 136 | .iter().map(|x| CompletionItem { 137 | label: x.to_string(), 138 | kind: Some(CompletionItemKind::KEYWORD), 139 | ..Default::default() 140 | }) 141 | .chain(memory.get_builtin_types(0)) 142 | .chain(memory.get_structs()) 143 | .collect() 144 | }, 145 | CompletionElement::FunctionQualifier => { 146 | [ "in", "out", "inout" ] 147 | .iter().map(|x| CompletionItem { 148 | label: x.to_string(), 149 | kind: Some(CompletionItemKind::KEYWORD), 150 | ..Default::default() 151 | }) 152 | .chain(memory.get_builtin_types(0)) 153 | .chain(memory.get_structs()) 154 | .collect() 155 | }, 156 | CompletionElement::Statement => { 157 | [ 158 | "for", 159 | "while", 160 | "if", 161 | "switch", 162 | "continue", 163 | "break", 164 | "return", 165 | "const", 166 | "true", 167 | "false" 168 | ] 169 | .iter().map(|x| CompletionItem { 170 | label: x.to_string(), 171 | kind: Some(CompletionItemKind::KEYWORD), 172 | ..Default::default() 173 | }) 174 | .chain(memory.get_builtin_types(scope)) 175 | .chain(memory.get_structs()) 176 | .chain( 177 | memory.get_functions(cursor, false) 178 | .into_iter() 179 | .filter(|x| !cast_types().contains(&x.label)) 180 | .collect::>()) 181 | .chain(memory.get_variables(scope, false)) 182 | .collect() 183 | } 184 | CompletionElement::Type => { 185 | memory.get_builtin_types(scope) 186 | .into_iter() 187 | .chain(memory.get_structs()) 188 | .collect() 189 | }, 190 | CompletionElement::Identifier(is_const) => { 191 | [ 192 | "true", 193 | "false" 194 | ] 195 | .iter().map(|x| CompletionItem { 196 | label: x.to_string(), 197 | kind: Some(CompletionItemKind::KEYWORD), 198 | ..Default::default() 199 | }) 200 | .chain(memory.get_functions(cursor, *is_const)) 201 | .chain(memory.get_variables(scope, *is_const)) 202 | .chain(memory.get_structs()) 203 | .collect() 204 | }, 205 | CompletionElement::Member(member) => { 206 | memory.scopes.force_scope(scope); 207 | match evaluate_expression(memory, *member.clone()) { 208 | Ok(result) => { 209 | let type_info = result.type_info; 210 | if type_info.size != 0 { 211 | return vec![]; 212 | } 213 | match type_info { 214 | _ if type_info == TypeInfo::from_str("gvec2_type") => vec!["x","y"], 215 | _ if type_info == TypeInfo::from_str("gvec3_type") => vec!["x","y","z"], 216 | _ if type_info == TypeInfo::from_str("gvec4_type") => vec!["x","y","z","w"], 217 | _ => if let Some(struct_info) = memory.structs.get(&type_info.base) { 218 | struct_info.fields.iter().map(|x| x.name.as_str()).collect() 219 | } else { 220 | vec![] 221 | } 222 | }.iter().map(|x| CompletionItem { 223 | label: x.to_string(), 224 | kind: Some(CompletionItemKind::FIELD), 225 | ..Default::default() 226 | }).collect() 227 | } 228 | _ => { 229 | vec![] 230 | } 231 | } 232 | }, 233 | CompletionElement::FunctionName => { 234 | match memory.shader_type { 235 | ShaderType::Spatial => ["fragment", "vertex", "light"].iter(), 236 | ShaderType::CanvasItem => ["fragment", "vertex", "light"].iter(), 237 | ShaderType::Particles => ["start", "process"].iter(), 238 | ShaderType::Fog => ["sky"].iter(), 239 | ShaderType::Sky => ["fog"].iter(), 240 | } 241 | .map(|x| CompletionItem { 242 | label: x.to_string(), 243 | kind: Some(CompletionItemKind::KEYWORD), 244 | ..Default::default() 245 | }) 246 | .collect() 247 | } 248 | CompletionElement::None => vec![], 249 | CompletionElement::Include => { 250 | vec![CompletionItem { 251 | label: "#include".to_string(), 252 | kind: Some(CompletionItemKind::KEYWORD), 253 | ..Default::default() 254 | }] 255 | }, 256 | CompletionElement::IncludeString => { 257 | let root = memory.root_dir.clone().map_or("".to_string(), |x| x); 258 | memory.fetch_gdshaderinc_files(&root) 259 | .iter() 260 | .map(|x| CompletionItem { 261 | label: format!("\"{}\"",x.to_string()), 262 | kind: Some(CompletionItemKind::TEXT), 263 | insert_text: Some(x.to_string()), 264 | ..Default::default() 265 | }) 266 | .collect() 267 | }, 268 | } 269 | } 270 | 271 | pub fn get_hover_description( 272 | memory: &mut Memory, 273 | cursor: Position, 274 | text: &String, 275 | ) -> Option { 276 | let scope = memory.scopes.find_scope_from_position(cursor); 277 | if let Some(info) = memory.builtin_types.get(text) { 278 | Some(HoverContents::Markup(MarkupContent { 279 | kind: MarkupKind::PlainText, 280 | value: format!("{}\n\n{}", text.clone(), info.description) 281 | })) 282 | } else if let Some(variable) = memory.scopes.collect_scopes_from(scope) 283 | .iter().find_map(|x| x.get(text)) { 284 | let description = variable.description.clone(); 285 | let description = description.map_or("".to_string(), |x| format!("\n\n{}", x)); 286 | Some(HoverContents::Markup(MarkupContent { 287 | kind: MarkupKind::Markdown, 288 | value: format!( 289 | "{} {}{}", 290 | variable.ty.to_string(), 291 | text, 292 | description 293 | ) 294 | })) 295 | } else if let Some(struct_info) = memory.structs.get(text) { 296 | let fields = struct_info.fields.iter().map(|x| { 297 | format!(" {} {};", x.ty.to_string(), x.name) 298 | }).collect::>().join("\n"); 299 | Some(HoverContents::Markup(MarkupContent { 300 | kind: MarkupKind::Markdown, 301 | value: format!("struct {} {{\n{}\n}}", text, fields) 302 | })) 303 | } else if let Some(function) = memory.functions.get(text) { 304 | let signatures = function.signatures.iter().map(|x| { 305 | let params = x.params.iter().map(|y| { 306 | let qualifier = match y.qualifier { 307 | Some(FunctionParamQualifier::In) => "in ", 308 | Some(FunctionParamQualifier::Out) => "out ", 309 | Some(FunctionParamQualifier::InOut) => "inout ", 310 | None => "" 311 | }.to_string(); 312 | format!("{}{} {}", qualifier, y.ty.to_string(), y.name) 313 | }).collect::>().join(", "); 314 | format!("{} {} ({})", x.return_type.to_string(), text, params) 315 | }).collect::>().join("\n"); 316 | let description = function.description.clone().map_or("".to_string(), |x| x); 317 | Some(HoverContents::Markup(MarkupContent { 318 | kind: MarkupKind::Markdown, 319 | value: format!("{}\n\n{}", signatures, description) 320 | })) 321 | } else if let Some(hint) = memory.hints.get(text) { 322 | let types = hint.type_info 323 | .iter() 324 | .map(|x| format!("{}", x.to_string())) 325 | .collect::>() 326 | .join(","); 327 | let args = if hint.num_arguments.iter().any(|&x| x > 0){ "(...)" } 328 | else { "" }; 329 | let description = hint.description.clone(); 330 | Some(HoverContents::Markup(MarkupContent { 331 | kind: MarkupKind::Markdown, 332 | value: format!("({}) {}{}\n\n{}", types, text, args, description) 333 | })) 334 | } else { 335 | None 336 | } 337 | } 338 | 339 | 340 | 341 | 342 | 343 | -------------------------------------------------------------------------------- /src/interpreter/expression_interpreter.rs: -------------------------------------------------------------------------------- 1 | use lsp_types::Range; 2 | use crate::{ 3 | 4 | lexer::{MaybeOperator, OperationType, Token, TokenKind}, 5 | memory::{Memory, Primitive, TypeInfo}, 6 | nodes::* 7 | }; 8 | 9 | use super::{eval_operation, eval_swizzle, EvaluateError}; 10 | 11 | pub struct ExpressionEvaluation { 12 | pub type_info: TypeInfo, 13 | pub is_const: bool, 14 | pub is_assignable: bool 15 | } 16 | impl ExpressionEvaluation { 17 | pub fn new(type_info: TypeInfo, is_const: bool, is_assignable: bool) -> Self { 18 | Self { 19 | type_info, 20 | is_const, 21 | is_assignable 22 | } 23 | } 24 | } 25 | 26 | pub type ExprEvalResult = Result; 27 | 28 | pub fn evaluate_expression( 29 | memory: &mut Memory, 30 | expr: ExpressionNode 31 | ) -> ExprEvalResult { 32 | let range = expr.range(); 33 | match expr { 34 | ExpressionNode::Identifier(x) => eval_identifier_expr(memory, x, range), 35 | ExpressionNode::Unary(x) => eval_unary_expr(memory, x, range), 36 | ExpressionNode::Binary(x) => eval_binary_expr(memory, x, range), 37 | ExpressionNode::Paren(x) => evaluate_expression(memory, *x), 38 | ExpressionNode::Conditional(x) => eval_conditional(memory, x, range), 39 | ExpressionNode::Call(x) => eval_call_expr(memory, x, range), 40 | ExpressionNode::ArrayAccess(x) => eval_array_access(memory, x, range), 41 | ExpressionNode::MemberAccess(x) => eval_member_access(memory, x, range), 42 | ExpressionNode::ArrayLiteral(x) => eval_array_literal(memory, x, range), 43 | ExpressionNode::Assignment(x) => eval_assignment_expr(memory, x, range), 44 | ExpressionNode::Increment(x) => eval_increment_expr(memory, x, range), 45 | ExpressionNode::Primitive(p) => 46 | Ok(ExpressionEvaluation::new(TypeInfo::from_primitive(p), true, false)), 47 | } 48 | } 49 | 50 | fn eval_increment_expr( 51 | memory: &mut Memory, 52 | increment: IncrementNode, 53 | range: lsp_types::Range 54 | ) -> ExprEvalResult { 55 | let result = evaluate_expression(memory, *increment.arg)?; 56 | if !result.is_assignable || result.is_const { 57 | let message = "Cannot change a constant value."; 58 | return Err(memory.alert_error(message, range)) 59 | } 60 | eval_operation(memory, OperationType::Number, result.type_info.clone(), range)?; 61 | Ok(ExpressionEvaluation::new(result.type_info, result.is_const, false)) 62 | } 63 | 64 | fn eval_assignment_expr( 65 | memory: &mut Memory, 66 | assignment: AssignmentNode, 67 | range: lsp_types::Range 68 | ) -> ExprEvalResult { 69 | let left_range = assignment.left.range(); 70 | let right_range = assignment.right.range(); 71 | let left = evaluate_expression(memory, *assignment.left)?; 72 | let right = evaluate_expression(memory, *assignment.right)?; 73 | if !left.is_assignable || left.is_const { 74 | let message = "Cannot assign to a constant value."; 75 | return Err(memory.alert_error(message, left_range)) 76 | } 77 | 78 | let mut type_mismatch = || { 79 | let message = format!( 80 | "Type mismatch: '{}' and '{}'.", 81 | left.type_info.to_string(), 82 | right.type_info.to_string() 83 | ); 84 | return Err(memory.alert_error(&message, range)); 85 | }; 86 | let matmul_check = if assignment.op.kind == TokenKind::Star { 87 | match (left.type_info.to_string().as_str(), right.type_info.to_string().as_str()) { 88 | ("mat2", "vec2") => Some(left.type_info.clone()), 89 | ("mat3", "vec3") => Some(left.type_info.clone()), 90 | ("mat4", "vec4") => Some(left.type_info.clone()), 91 | _ => None 92 | } 93 | } else { None }; 94 | 95 | if let Some(ty) = matmul_check { 96 | Ok(ExpressionEvaluation::new(ty, left.is_const, false)) 97 | } else if let Some(ty) = matmul_check { 98 | Ok(ExpressionEvaluation::new(ty, left.is_const, false)) 99 | } else { 100 | let correct_type = if assignment.op.kind == TokenKind::Equal { 101 | if left.type_info != right.type_info { 102 | return type_mismatch(); 103 | } else { 104 | left.type_info.clone() 105 | } 106 | } else { 107 | let left_size = if let Some(x) = 108 | left.type_info.get_generic_size().map_or(None, |x| x.as_size()) { x } 109 | else { return type_mismatch() }; 110 | 111 | let right_size = if let Some(x) = 112 | right.type_info.get_generic_size().map_or(None, |x| x.as_size()) { x } 113 | else { return type_mismatch() }; 114 | 115 | // let left_generic = if let Some(x) = left.type_info.get_generic_type() { x } 116 | // else { return type_mismatch() }; 117 | // 118 | // let right_generic = if let Some(x) = right.type_info.get_generic_type() { x } 119 | // else { return type_mismatch() }; 120 | 121 | if left_size == right_size { 122 | left.type_info.clone() 123 | } else { 124 | if left_size == 1 { 125 | right.type_info.clone() 126 | } else if right_size == 1 { 127 | left.type_info.clone() 128 | } else { 129 | return type_mismatch(); 130 | } 131 | } 132 | }; 133 | 134 | eval_operation(memory, assignment.op.to_assignment_op().unwrap(), correct_type, left_range)?; 135 | Ok(ExpressionEvaluation::new(left.type_info, left.is_const, false)) 136 | } 137 | 138 | } 139 | 140 | fn eval_array_literal( 141 | memory: &mut Memory, 142 | mut array_literal: Vec, 143 | range: Range 144 | ) -> ExprEvalResult { 145 | let len = array_literal.len(); 146 | let mut is_const = true; 147 | if len == 0 { 148 | let message = "Empty array literal."; 149 | return Err(memory.alert_error(message, range)) 150 | } 151 | 152 | let first = array_literal.swap_remove(0); 153 | let first_range = first.range(); 154 | let first_result = evaluate_expression(memory, first)?; 155 | if first_result.type_info.size != 0 { 156 | let message = "Nested array types are not allowed."; 157 | return Err(memory.alert_error(message, first_range)) 158 | } 159 | if !first_result.is_const { 160 | is_const = false; 161 | } 162 | 163 | for expr in array_literal { 164 | let result = evaluate_expression(memory, expr)?; 165 | if result.type_info != first_result.type_info{ 166 | let message = format!( 167 | "Mismatched types: {} and {}", 168 | first_result.type_info.to_string(), 169 | result.type_info.to_string() 170 | ); 171 | return Err(memory.alert_error(&message, range)) 172 | } 173 | if !result.is_const { 174 | is_const = false; 175 | } 176 | }; 177 | Ok(ExpressionEvaluation::new( 178 | TypeInfo { base: first_result.type_info.base, size: len as u32 }, 179 | is_const, 180 | false 181 | )) 182 | } 183 | 184 | fn eval_member_access( 185 | memory: &mut Memory, 186 | member_access: MemberAccessNode, 187 | range: lsp_types::Range 188 | ) -> ExprEvalResult { 189 | let member_access_str = memory.get_token_text(member_access.member); 190 | let argument_result = evaluate_expression(memory, *member_access.argument)?; 191 | if argument_result.type_info.size != 0 { 192 | let message = format!( 193 | "Cannot access member of array type {}", 194 | argument_result.type_info.to_string() 195 | ); 196 | return Err(memory.alert_error(&message, range)) 197 | } 198 | let message = format!( 199 | "Type {} has no member {}", 200 | argument_result.type_info.to_string(), 201 | member_access_str 202 | ); 203 | let member = &member_access_str; 204 | 205 | if let Some(swizzle) = match argument_result.type_info.base.as_str() { 206 | "vec2" => Some(eval_swizzle(member, Primitive::Float, 2)), 207 | "vec3" => Some(eval_swizzle(member, Primitive::Float, 3)), 208 | "vec4" => Some(eval_swizzle(member, Primitive::Float, 4)), 209 | "ivec2" => Some(eval_swizzle(member, Primitive::Int, 2)), 210 | "ivec3" => Some(eval_swizzle(member, Primitive::Int, 3)), 211 | "ivec4" => Some(eval_swizzle(member, Primitive::Int, 4)), 212 | "uvec2" => Some(eval_swizzle(member, Primitive::Uint, 2)), 213 | "uvec3" => Some(eval_swizzle(member, Primitive::Uint, 3)), 214 | "uvec4" => Some(eval_swizzle(member, Primitive::Uint, 4)), 215 | "bvec2" => Some(eval_swizzle(member, Primitive::Bool, 2)), 216 | "bvec3" => Some(eval_swizzle(member, Primitive::Bool, 3)), 217 | "bvec4" => Some(eval_swizzle(member, Primitive::Bool, 4)), 218 | _ => None 219 | } { 220 | if let Some(ty) = swizzle { 221 | return Ok(ExpressionEvaluation::new( 222 | ty, 223 | argument_result.is_const, 224 | argument_result.is_assignable 225 | )) 226 | } else { 227 | return Err(memory.alert_error(&message, range)) 228 | } 229 | } 230 | memory.structs 231 | .get(&argument_result.type_info.base) 232 | .and_then(|x| x.fields.iter().find(|x| x.name.as_str() == member).map(|x| x.ty.clone())) 233 | .ok_or_else(|| memory.alert_error(&message, range)) 234 | .map(|ty| ExpressionEvaluation::new( 235 | ty, 236 | argument_result.is_const, 237 | argument_result.is_assignable 238 | )) 239 | } 240 | 241 | fn eval_array_access( 242 | memory: &mut Memory, 243 | array_access: ArrayAccessNode, 244 | range: lsp_types::Range 245 | ) -> ExprEvalResult { 246 | let argument_result = evaluate_expression(memory, *array_access.argument)?; 247 | let index_result = evaluate_expression(memory, *array_access.index)?; 248 | let is_const = argument_result.is_const && index_result.is_const; 249 | let is_assignable = argument_result.is_assignable; 250 | match index_result.type_info.base.as_str() { 251 | "int" | "uint" => {}, 252 | _ => { 253 | let message = format!( 254 | "Cannot index with type {}", 255 | index_result.type_info.to_string() 256 | ); 257 | return Err(memory.alert_error(&message, range)) 258 | } 259 | } 260 | let new_eval = ExpressionEvaluation::new; 261 | match argument_result.type_info.base.as_str() { 262 | "vec2" => Ok(new_eval(TypeInfo::from_str("float"), is_const, is_assignable)), 263 | "vec3" => Ok(new_eval(TypeInfo::from_str("float"), is_const, is_assignable)), 264 | "vec4" => Ok(new_eval(TypeInfo::from_str("float"), is_const, is_assignable)), 265 | "mat2" => Ok(new_eval(TypeInfo::from_str("vec2"), is_const, is_assignable)), 266 | "mat3" => Ok(new_eval(TypeInfo::from_str("vec3"), is_const, is_assignable)), 267 | "mat4" => Ok(new_eval(TypeInfo::from_str("vec4"), is_const, is_assignable)), 268 | _ if argument_result.type_info.size != 0 => { 269 | Ok(new_eval( 270 | TypeInfo { base: argument_result.type_info.base, size: 0 }, 271 | is_const, 272 | is_assignable 273 | )) 274 | }, 275 | _ => { 276 | let message = format!( 277 | "Type {} cannot be indexed.", 278 | argument_result.type_info.to_string() 279 | ); 280 | Err(memory.alert_error(&message, range)) 281 | } 282 | } 283 | } 284 | 285 | fn eval_call_expr( 286 | memory: &mut Memory, 287 | call: CallNode, 288 | range: lsp_types::Range 289 | ) -> ExprEvalResult { 290 | let call_name = memory.get_token_text(call.identifier); 291 | let mut is_const = true; 292 | let mut arg_types = vec![]; 293 | 294 | for arg in call.args { 295 | let result = evaluate_expression(memory, arg.expression)?; 296 | if !result.is_const{ 297 | is_const = false 298 | } 299 | arg_types.push(result.type_info); 300 | } 301 | 302 | let maybe_struct = memory.structs.get(&call_name); 303 | if let Some(struct_info) = maybe_struct { 304 | let correct_types = struct_info.fields 305 | .iter() 306 | .map(|x| &x.ty) 307 | .zip(arg_types.iter()) 308 | .collect::>(); 309 | for (info, arg_info) in correct_types { 310 | if *info != *arg_info { 311 | let message = format!("Invalid arguments for function '{}'", call_name); 312 | return Err(memory.alert_error(&message, range)); 313 | } 314 | } 315 | return Ok(ExpressionEvaluation::new( 316 | TypeInfo::from_str(&call_name), 317 | is_const, 318 | false 319 | )); 320 | } 321 | 322 | let maybe_function = memory.functions.get(&call_name); 323 | if let Some(function) = maybe_function { 324 | if !function.is_const { 325 | is_const = false; 326 | } 327 | for signature in &function.signatures { 328 | let correct_types = signature.params 329 | .iter() 330 | .map(|x| x.ty.clone()) 331 | .collect::>(); 332 | if arg_types != correct_types { 333 | continue; 334 | } 335 | let old_return_type = &signature.return_type; 336 | 337 | if let Some(primitive_type) = old_return_type.is_generically_sized() { 338 | let generic_args: Vec<_> = correct_types 339 | .iter() 340 | .zip(arg_types.iter()) 341 | .filter_map(|(generic, arg)| { 342 | if generic.is_generically_sized().is_some() { Some(arg) } 343 | else { None } 344 | }) 345 | .collect(); 346 | 347 | let generic_size = generic_args[0].get_generic_size(); 348 | if generic_args.iter().all(|x| x.get_generic_size() == generic_size) { 349 | let ty = TypeInfo::from_pieces(primitive_type, generic_size.unwrap()); 350 | return Ok(ExpressionEvaluation::new(ty, is_const, false)); 351 | } 352 | } else if let Some(generic_size) = old_return_type.is_generically_typed() { 353 | let generic_args: Vec<_> = correct_types 354 | .iter() 355 | .zip(arg_types.iter()) 356 | .filter_map(|(generic, arg)| { 357 | if generic.is_generically_typed().is_some() { Some(arg) } 358 | else { None } 359 | }) 360 | .collect(); 361 | 362 | let generic_type = generic_args[0].get_generic_type(); 363 | if generic_args.iter().all(|x| x.get_generic_type() == generic_type) { 364 | let ty = TypeInfo::from_pieces(generic_type.unwrap(), generic_size); 365 | return Ok(ExpressionEvaluation::new(ty, is_const, false)); 366 | } 367 | } else { 368 | return Ok(ExpressionEvaluation::new(old_return_type.clone(), is_const, false)); 369 | } 370 | } 371 | let message = format!("Invalid arguments for function '{}'", call_name); 372 | Err(memory.alert_error(&message, range)) 373 | } else { 374 | let message = format!("Function '{}' does not exist.", call_name); 375 | Err(memory.alert_error(&message, range)) 376 | } 377 | } 378 | 379 | fn eval_conditional( 380 | memory: &mut Memory, 381 | conditional: ConditionalNode, 382 | range: lsp_types::Range 383 | ) -> ExprEvalResult { 384 | let condition_result = evaluate_expression(memory, *conditional.condition)?; 385 | if condition_result.type_info.base.as_str() != "bool" { 386 | let message = format!( 387 | "Condition type must be bool, not {}", 388 | condition_result.type_info.to_string() 389 | ); 390 | return Err(memory.alert_error(&message, range)); 391 | } 392 | let left_result = evaluate_expression(memory, *conditional.action)?; 393 | let right_result = evaluate_expression(memory, *conditional.alternate)?; 394 | let is_const = left_result.is_const && right_result.is_const && condition_result.is_const; 395 | 396 | if left_result.type_info!= right_result.type_info{ 397 | let message = format!( 398 | "Type mismatch: {} & {}", 399 | left_result.type_info.to_string(), 400 | right_result.type_info.to_string() 401 | ); 402 | Err(memory.alert_error(&message, range)) 403 | } else { 404 | Ok(ExpressionEvaluation::new(left_result.type_info, is_const, false)) 405 | } 406 | } 407 | 408 | fn eval_binary_expr( 409 | memory: &mut Memory, 410 | binary: BinaryNode, 411 | range: lsp_types::Range 412 | ) -> ExprEvalResult { 413 | let left = evaluate_expression(memory, *binary.left)?; 414 | let right = evaluate_expression(memory, *binary.right)?; 415 | let mut type_mismatch = || { 416 | let message = format!( 417 | "Type mismatch: {} & {}", 418 | left.type_info.to_string(), 419 | right.type_info.to_string() 420 | ); 421 | Err(memory.alert_error(&message, range)) 422 | }; 423 | let is_const = left.is_const && right.is_const; 424 | 425 | let matmul_check = if binary.op.kind == TokenKind::Star { 426 | match (left.type_info.to_string().as_str(), right.type_info.to_string().as_str()) { 427 | ("mat2", "vec2") => Some(right.type_info.clone()), 428 | ("mat3", "vec3") => Some(right.type_info.clone()), 429 | ("mat4", "vec4") => Some(right.type_info.clone()), 430 | ("mat2", "mat2") => Some(right.type_info.clone()), 431 | ("mat3", "mat3") => Some(right.type_info.clone()), 432 | ("mat4", "mat4") => Some(right.type_info.clone()), 433 | _ => None 434 | } 435 | } else { None }; 436 | let boolean_check = match binary.op.kind { 437 | TokenKind::And | TokenKind::Or => 438 | match (left.type_info.to_string().as_str(), right.type_info.to_string().as_str()) { 439 | ("bool", "bool") => Some(left.type_info.clone()), 440 | ("bvec2", "bvec2") => Some(left.type_info.clone()), 441 | ("bvec3", "bvec3") => Some(left.type_info.clone()), 442 | ("bvec4", "bvec4") => Some(left.type_info.clone()), 443 | _ => None 444 | }, 445 | _ => None, 446 | }; 447 | 448 | if let Some(ty) = matmul_check { 449 | Ok(ExpressionEvaluation::new(ty, is_const, false)) 450 | } else if let Some(ty) = boolean_check { 451 | Ok(ExpressionEvaluation::new(ty, is_const, false)) 452 | } else { 453 | let left_size = if let Some(x) = 454 | left.type_info.get_generic_size().map_or(None, |x| x.as_size()) { x } 455 | else { return type_mismatch() }; 456 | 457 | let right_size = if let Some(x) = 458 | right.type_info.get_generic_size().map_or(None, |x| x.as_size()) { x } 459 | else { return type_mismatch() }; 460 | 461 | let left_generic = if let Some(x) = left.type_info.get_generic_type() { x } 462 | else { return type_mismatch() }; 463 | 464 | let right_generic = if let Some(x) = right.type_info.get_generic_type() { x } 465 | else { return type_mismatch() }; 466 | 467 | let correct_type = if left_size == right_size { 468 | left.type_info.clone() 469 | } else { 470 | if left_size == 1 { 471 | right.type_info.clone() 472 | } else if right_size == 1 { 473 | left.type_info.clone() 474 | } else { 475 | return type_mismatch(); 476 | } 477 | }; 478 | if left_generic != right_generic { 479 | type_mismatch() 480 | } else { 481 | eval_operation(memory, binary.op.to_binary_op().unwrap(), correct_type, range) 482 | .map(|ty| ExpressionEvaluation::new(ty, is_const, false)) 483 | } 484 | } 485 | } 486 | 487 | fn eval_unary_expr( 488 | memory: &mut Memory, 489 | unary: UnaryNode, 490 | range: lsp_types::Range 491 | ) -> ExprEvalResult { 492 | let result = evaluate_expression(memory, *unary.arg)?; 493 | eval_operation(memory, unary.op.to_unary_op().unwrap(), result.type_info, range) 494 | .map(|ty| ExpressionEvaluation::new(ty, result.is_const, false)) 495 | } 496 | 497 | fn eval_identifier_expr( 498 | memory: &mut Memory, 499 | value: Token, 500 | range: lsp_types::Range 501 | ) -> ExprEvalResult { 502 | let identifier = memory.get_token_text(value); 503 | for scope in memory.scopes.collect_scopes() { 504 | if let Some(value) = scope.get(&identifier) { 505 | return Ok(ExpressionEvaluation::new( 506 | value.ty.clone(), 507 | value.is_const, 508 | true 509 | )) 510 | } 511 | } 512 | let message = format!("Identifier '{}' is undefined.", identifier); 513 | Err(memory.alert_error(&message, range)) 514 | } 515 | 516 | 517 | 518 | 519 | 520 | -------------------------------------------------------------------------------- /src/interpreter/mod.rs: -------------------------------------------------------------------------------- 1 | use lsp_types::Range; 2 | use regex::Regex; 3 | 4 | use crate::{ 5 | lexer::{OperationType, Token}, 6 | memory::{Memory, Primitive, TypeInfo}, 7 | nodes::{ExpressionNode, TypeNode, ValueNode} 8 | }; 9 | 10 | mod top_level_interpreter; 11 | mod expression_interpreter; 12 | mod statement_interpreter; 13 | pub use top_level_interpreter::*; 14 | pub use expression_interpreter::*; 15 | pub use statement_interpreter::*; 16 | 17 | 18 | #[derive(Debug)] 19 | pub enum EvaluateError { 20 | Warning, 21 | SemanticsError 22 | } 23 | 24 | pub type EvaluateResult = Result<(), EvaluateError>; 25 | 26 | pub fn ensure_valid_id( memory: &mut Memory, id: Token,) -> Result<(), EvaluateError> { 27 | let id_text = memory.get_token_text(id); 28 | if memory.is_id_in_use(&id_text) { 29 | let message = format!("Identifier '{}' is in use.", id_text); 30 | Err(memory.alert_error(&message, id.range)) 31 | } else { 32 | Ok(()) 33 | } 34 | } 35 | 36 | pub fn ensure_valid_type( 37 | memory: &mut Memory, 38 | ty: &TypeNode 39 | ) -> Result<(), EvaluateError> { 40 | if !memory.is_id_valid_type(&ty.info.base) { 41 | let message = format!("Invalid type '{}'.", ty.info.to_string()); 42 | Err(memory.alert_error(&message, ty.range)) 43 | } else { 44 | Ok(()) 45 | } 46 | } 47 | 48 | 49 | pub fn ensure_valid_value( 50 | memory: &mut Memory, 51 | value: &ValueNode, 52 | expression: Option, 53 | is_const: bool, 54 | ) -> EvaluateResult { 55 | ensure_valid_id(memory, value.identifier)?; 56 | ensure_valid_type(memory, &value.type_node)?; 57 | if let Some(expr) = expression { 58 | let expr_range = expr.range(); 59 | if let Ok(result) = evaluate_expression(memory, expr) { 60 | if result.type_info != value.type_node.info { 61 | let message = format!( 62 | "Type mismatch: '{}' and '{}'.", 63 | value.type_node.info.to_string(), 64 | result.type_info.to_string() 65 | ); 66 | memory.alert_error(&message, Range::new(value.range.start, expr_range.end)); 67 | } else if is_const && !result.is_const { 68 | let message = "Invalid constant expression."; 69 | memory.alert_error(&message, expr_range); 70 | } 71 | } 72 | } 73 | Ok(()) 74 | } 75 | 76 | pub fn eval_swizzle( 77 | swizzle: &str, 78 | base_type: Primitive, 79 | base_length: usize, 80 | ) -> Option { 81 | let vec2_regex: Regex = Regex::new(r"^(?:[xy]{1,4}|[rg]{1,4}|[st]{1,4})$").unwrap(); 82 | let vec3_regex: Regex = Regex::new(r"^(?:[xyz]{1,4}|[rgb]{1,4}|[stp]{1,4})$").unwrap(); 83 | let vec4_regex: Regex = Regex::new(r"^(?:[xyzw]{1,4}|[rgba]{1,4}|[stpq]{1,4})$").unwrap(); 84 | 85 | if !match base_length { 86 | 2 => vec2_regex.is_match(swizzle), 87 | 3 => vec3_regex.is_match(swizzle), 88 | 4 => vec4_regex.is_match(swizzle), 89 | _ => unreachable!() 90 | } { 91 | return None 92 | } 93 | 94 | let length = swizzle.len(); 95 | Some(match base_type { 96 | Primitive::Bool=> match length { 97 | 1 => TypeInfo::from_str("bool"), 98 | 2 => TypeInfo::from_str("bvec2"), 99 | 3 => TypeInfo::from_str("bvec3"), 100 | 4 => TypeInfo::from_str("bvec4"), 101 | _ => unreachable!() 102 | } 103 | Primitive::Uint => match length { 104 | 1 => TypeInfo::from_str("uint"), 105 | 2 => TypeInfo::from_str("uvec2"), 106 | 3 => TypeInfo::from_str("uvec3"), 107 | 4 => TypeInfo::from_str("uvec4"), 108 | _ => unreachable!() 109 | } 110 | Primitive::Int => match length { 111 | 1 => TypeInfo::from_str("int"), 112 | 2 => TypeInfo::from_str("ivec2"), 113 | 3 => TypeInfo::from_str("ivec3"), 114 | 4 => TypeInfo::from_str("ivec4"), 115 | _ => unreachable!() 116 | } 117 | Primitive::Float => match length { 118 | 1 => TypeInfo::from_str("float"), 119 | 2 => TypeInfo::from_str("vec2"), 120 | 3 => TypeInfo::from_str("vec3"), 121 | 4 => TypeInfo::from_str("vec4"), 122 | _ => unreachable!() 123 | } 124 | }) 125 | } 126 | 127 | 128 | pub fn eval_operation( 129 | memory: &mut Memory, 130 | op: OperationType, 131 | ty: TypeInfo, 132 | range: Range 133 | ) -> Result { 134 | match op { 135 | OperationType::Number => { 136 | if ty == TypeInfo::from_str("vec_type") 137 | || ty == TypeInfo::from_str("ivec_type") 138 | || ty == TypeInfo::from_str("uvec_type") 139 | || ty == TypeInfo::from_str("bvec_type") 140 | || ty == TypeInfo::from_str("mat_type") { 141 | Ok(ty) 142 | } else { 143 | let message = format!("Invalid type for operation: {}", ty.to_string()); 144 | Err(memory.alert_error(&message, range)) 145 | } 146 | } 147 | OperationType::Int => { 148 | if ty == TypeInfo::from_str("ivec_type") 149 | || ty == TypeInfo::from_str("uvec_type") { 150 | Ok(ty) 151 | } else { 152 | let message = format!("Invalid type for operation: {}", ty.to_string()); 153 | Err(memory.alert_error(&message, range)) 154 | } 155 | } 156 | OperationType::Bool => { 157 | match ty.base.as_str() { 158 | "bool" => Ok(TypeInfo::from_str("bool")), 159 | _ => { 160 | let message = format!("Invalid type for operation: {}", ty.to_string()); 161 | Err(memory.alert_error(&message, range)) 162 | } 163 | } 164 | } 165 | OperationType::Comparison => { 166 | match ty.base.as_str() { 167 | "int" => Ok(TypeInfo::from_str("bool")), 168 | "uint" => Ok(TypeInfo::from_str("bool")), 169 | "float" => Ok(TypeInfo::from_str("bool")), 170 | _ => { 171 | let message = format!("Invalid type for operation: {}", ty.to_string()); 172 | Err(memory.alert_error(&message, range)) 173 | } 174 | } 175 | } 176 | OperationType::Equal => { 177 | Ok(TypeInfo::from_str("bool")) 178 | } 179 | } 180 | } 181 | 182 | -------------------------------------------------------------------------------- /src/interpreter/statement_interpreter.rs: -------------------------------------------------------------------------------- 1 | use crate::{lexer::Token, memory::*, nodes::*}; 2 | 3 | use super::{ensure_valid_value, evaluate_expression, EvaluateError}; 4 | 5 | pub fn eval_block( 6 | memory: &mut Memory, 7 | block: BlockNode, 8 | ) { 9 | for statement in block.statements { 10 | _ = evaluate_statement(memory, statement); 11 | } 12 | } 13 | 14 | pub fn evaluate_statement( 15 | memory: &mut Memory, 16 | statement: StatementNode, 17 | ) -> Result<(), EvaluateError> { 18 | match statement { 19 | StatementNode::VarDeclaration(x) => evaluate_var_declaration(memory, x), 20 | StatementNode::If(x) => evaluate_if_statement(x, memory), 21 | StatementNode::While(x) => evaluate_while_statement(x, memory), 22 | StatementNode::For(x) => evaluate_for_statement(x, memory), 23 | StatementNode::Switch(x) => evaluate_switch_statement(x, memory), 24 | StatementNode::Expression(x) => evaluate_expression(memory, *x).map(|_| ()), 25 | StatementNode::Continue(x) => evaluate_continue_statement(memory, x), 26 | StatementNode::Break(x) => evaluate_break_statement(memory, x), 27 | StatementNode::Return(x) => evaluate_return_statement(memory, x), 28 | StatementNode::Block(x) => { 29 | eval_block(memory, x); 30 | Ok(()) 31 | } 32 | } 33 | } 34 | 35 | fn evaluate_return_statement( 36 | memory: &mut Memory, 37 | node: ReturnNode 38 | ) -> Result<(), EvaluateError> { 39 | memory.scopes.set_actual_return_type(None); 40 | if let Some(expr) = node.expression { 41 | let result_range = expr.range(); 42 | let result = evaluate_expression(memory, *expr)?; 43 | memory.scopes.set_actual_return_type(Some(result.type_info.clone())); 44 | if let Some(return_type) = memory.scopes.get_expected_return_type() { 45 | if return_type != result.type_info { 46 | let message = format!( 47 | "Invalid return type, expected '{}'", 48 | return_type.to_string() 49 | ); 50 | return Err(memory.alert_error(&message, result_range)) 51 | } 52 | } 53 | } 54 | Ok(()) 55 | } 56 | 57 | fn evaluate_break_statement( 58 | memory: &mut Memory, 59 | node: Token 60 | ) -> Result<(), EvaluateError> { 61 | if memory.scopes.scope_type() != ScopeType::Loop { 62 | let message = "Cannot use 'break' outside a loop."; 63 | Err(memory.alert_error(message, node.range)) 64 | } else { 65 | Ok(()) 66 | } 67 | } 68 | 69 | fn evaluate_continue_statement( 70 | memory: &mut Memory, 71 | node: Token 72 | ) -> Result<(), EvaluateError> { 73 | if memory.scopes.scope_type() != ScopeType::Loop { 74 | let message = "Cannot use 'continue' outside a loop."; 75 | Err(memory.alert_error(message, node.range)) 76 | } else { 77 | Ok(()) 78 | } 79 | } 80 | 81 | fn evaluate_switch_statement( 82 | node: SwitchNode, 83 | memory: &mut Memory 84 | ) -> Result<(), EvaluateError> { 85 | let range = node.condition.range(); 86 | if let Ok(expr) = evaluate_expression(memory, *node.condition) { 87 | if expr.type_info != TypeInfo::from_str("int") && 88 | expr.type_info != TypeInfo::from_str("uint") { 89 | memory.alert_error("Switch condition must be an integer.", range); 90 | } 91 | } 92 | 93 | 94 | for case in node.cases { 95 | for statement in case.statements { 96 | _ = evaluate_statement(memory, statement); 97 | } 98 | } 99 | Ok(()) 100 | } 101 | 102 | fn evaluate_for_statement( 103 | node: ForNode, 104 | memory: &mut Memory 105 | ) -> Result<(), EvaluateError> { 106 | let block_range = match *node.action { 107 | StatementNode::Block(ref b) => Some(b.range), 108 | _ => None 109 | }; 110 | 111 | if let StatementNode::VarDeclaration(initializer) = *node.initializer { 112 | if let Some(range) = block_range { 113 | memory.scopes.enter_scope(ScopeType::Loop, range) 114 | } 115 | evaluate_var_declaration(memory, initializer)?; 116 | } else { 117 | let message = "Left expression of a for loop must be a variable declaration."; 118 | return Err(memory.alert_error(message, node.keyword.range)); 119 | } 120 | 121 | let condition_range = node.condition.range(); 122 | if let Ok(condition_result) = evaluate_expression(memory, *node.condition) { 123 | if condition_result.type_info != TypeInfo::from_str("bool") { 124 | let message = "For loop condition must be a boolean expression."; 125 | memory.alert_error(message, condition_range); 126 | } 127 | } 128 | _ = evaluate_expression(memory, *node.update)?; 129 | _ = evaluate_statement(memory, *node.action); 130 | if block_range.is_some() { 131 | memory.scopes.leave_scope(); 132 | } 133 | 134 | Ok(()) 135 | } 136 | 137 | fn evaluate_while_statement( 138 | node: WhileNode, 139 | memory: &mut Memory 140 | ) -> Result<(), EvaluateError> { 141 | let condition_range = node.condition.range(); 142 | let condition_result = evaluate_expression(memory, *node.condition)?; 143 | if condition_result.type_info != TypeInfo::from_str("bool") { 144 | let message = "While loop condition must be a boolean expression."; 145 | memory.alert_error(message, condition_range); 146 | } 147 | let block_range = match *node.action { 148 | StatementNode::Block(ref b) => Some(b.range), 149 | _ => None 150 | }; 151 | if let Some(range) = block_range { 152 | memory.scopes.enter_scope(ScopeType::Loop, range) 153 | } 154 | evaluate_statement(memory, *node.action)?; 155 | if block_range.is_some() { 156 | memory.scopes.leave_scope(); 157 | } 158 | Ok(()) 159 | } 160 | 161 | fn evaluate_if_statement( 162 | node: IfNode, 163 | memory: &mut Memory 164 | ) -> Result<(), EvaluateError> { 165 | let condition_range = node.condition.range(); 166 | let condition_result = evaluate_expression(memory, *node.condition)?; 167 | if condition_result.type_info != TypeInfo::from_str("bool") { 168 | eprintln!("this is what it was {:?}", condition_result.type_info); 169 | let message = "If statement condition must be a boolean expression."; 170 | memory.alert_error(message, condition_range); 171 | } 172 | let mut block_range = match *node.action { 173 | StatementNode::Block(ref b) => Some(b.range), 174 | _ => None 175 | }; 176 | if let Some(range) = block_range { 177 | memory.scopes.enter_scope(ScopeType::Loop, range) 178 | } 179 | _ = evaluate_statement(memory, *node.action); 180 | if block_range.is_some() { 181 | memory.scopes.leave_scope(); 182 | } 183 | 184 | if let Some(x) = node.alternate { 185 | block_range = match *x.action { 186 | StatementNode::Block(ref b) => Some(b.range), 187 | _ => None 188 | }; 189 | if let Some(range) = block_range { 190 | memory.scopes.enter_scope(ScopeType::Loop, range) 191 | } 192 | _ = evaluate_statement(memory, *x.action); 193 | if block_range.is_some() { 194 | memory.scopes.leave_scope(); 195 | } 196 | } 197 | Ok(()) 198 | } 199 | 200 | fn evaluate_var_declaration( 201 | memory: &mut Memory, 202 | node: VarDeclarationNode, 203 | ) -> Result<(), EvaluateError> { 204 | let value = &node.value; 205 | 206 | let expression = node.expression; 207 | ensure_valid_value(memory, value, expression.map(|x| *x), node.is_const)?; 208 | 209 | let name = memory.get_token_text(value.identifier); 210 | let ty = value.type_node.info.clone(); 211 | let range = value.range; 212 | let (is_const, editable) = (node.is_const, !node.is_const); 213 | memory.scopes.insert( name, ValueInfo { 214 | ty, 215 | is_const, 216 | editable, 217 | range: Some(range), 218 | description: None 219 | }); 220 | Ok(()) 221 | } 222 | 223 | 224 | -------------------------------------------------------------------------------- /src/interpreter/top_level_interpreter.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashSet, fs, mem}; 2 | 3 | use crate::{evaluate_tree, lexer::TokenStream, memory::*, nodes::*, parse_tokens, parser::parse_int}; 4 | 5 | use super::*; 6 | 7 | pub fn evaluate_top_level_node( 8 | node: TopLevelNode, 9 | memory: &mut Memory, 10 | ) -> Result<(), EvaluateError> { 11 | match node { 12 | TopLevelNode::ShaderType(x) => evaluate_shader_type(memory, x), 13 | TopLevelNode::RenderMode(x) => evaluate_render_mode(memory, x), 14 | TopLevelNode::GroupUniforms(_) => Ok(()), 15 | TopLevelNode::Uniform(x) => evaluate_uniform(memory, x), 16 | TopLevelNode::Const(x) => evaluate_const(memory, x), 17 | TopLevelNode::Varying(x) => evaluate_varying(memory, x), 18 | TopLevelNode::Struct(x) => evaluate_struct(memory, x), 19 | TopLevelNode::Function(x) => evaluate_function(memory, x), 20 | TopLevelNode::Include(x) => evaluate_include(memory, x), 21 | } 22 | } 23 | 24 | pub fn evaluate_shader_type(memory: &mut Memory, node: ShaderTypeNode) -> EvaluateResult { 25 | let shader_type_slice = memory.get_token_text(node.shader_type); 26 | match shader_type_slice.as_str() { 27 | "spatial" => { 28 | memory.shader_type = ShaderType::Spatial; 29 | memory.valid_render_modes = spatial_render_modes(); 30 | } 31 | "canvas_item" => { 32 | memory.shader_type = ShaderType::CanvasItem; 33 | memory.valid_render_modes = canvas_item_render_modes(); 34 | } 35 | "particles" => { 36 | memory.shader_type = ShaderType::Particles; 37 | memory.valid_render_modes = particle_render_modes(); 38 | } 39 | "sky" => { 40 | memory.shader_type = ShaderType::Sky; 41 | memory.valid_render_modes = sky_render_modes(); 42 | memory.scopes.extend(sky_builtins()); 43 | } 44 | "fog" => { 45 | memory.shader_type = ShaderType::Fog; 46 | } 47 | _ => return Err(memory.alert_error("Invalid Shader Type.", node.shader_type.range)) 48 | } 49 | Ok(()) 50 | } 51 | 52 | pub fn evaluate_render_mode(memory: &mut Memory, node: RenderModeNode) -> EvaluateResult { 53 | for render_mode in node.render_modes { 54 | let string = memory.get_token_text(render_mode); 55 | if memory.valid_render_modes.get(&string).is_none() { 56 | return Err(memory.alert_error("Invalid Render Mode.", render_mode.range)); 57 | } 58 | } 59 | Ok(()) 60 | } 61 | 62 | pub fn evaluate_const(memory: &mut Memory, node: ConstNode) -> EvaluateResult { 63 | let value = &node.value; 64 | let expression = Some(node.expression); 65 | ensure_valid_value(memory, value, expression.map(|x| *x), true)?; 66 | 67 | let name = memory.get_token_text(value.identifier); 68 | let ty = value.type_node.info.clone(); 69 | let range = value.range; 70 | let (is_const, editable) = (true, false); 71 | memory.scopes.insert( name, ValueInfo { 72 | ty, 73 | is_const, 74 | editable, 75 | range: Some(range), 76 | description: None 77 | }); 78 | Ok(()) 79 | } 80 | 81 | pub fn evaluate_varying(memory: &mut Memory, node: VaryingNode) -> EvaluateResult { 82 | let value = &node.value; 83 | ensure_valid_value(memory, value, None, true)?; 84 | 85 | let name = memory.get_token_text(value.identifier); 86 | let ty = value.type_node.info.clone(); 87 | let range = value.range; 88 | let (is_const, editable) = (false, true); 89 | memory.scopes.insert( name, ValueInfo { 90 | ty, 91 | is_const, 92 | editable, 93 | range: Some(range), 94 | description: None 95 | }); 96 | Ok(()) 97 | } 98 | 99 | pub fn evaluate_uniform(memory: &mut Memory, node: UniformNode) -> EvaluateResult { 100 | let value = &node.value; 101 | ensure_valid_value(memory, value, node.expression.map(|x| *x), true)?; 102 | 103 | let name = memory.get_token_text(value.identifier); 104 | let ty = value.type_node.info.clone(); 105 | let range = value.range; 106 | 107 | if let Some(hint) = node.hint { 108 | let name = memory.get_token_text(hint.identifier); 109 | let maybe_hint_info = memory.hints.get(&name).map(|x| x.clone()); 110 | if let Some(hint_info) = maybe_hint_info { 111 | if !hint_info.num_arguments.contains(&hint.params.map_or(0, |x| x.len())) { 112 | memory.alert_error("Invalid number of hint args.", hint.identifier.range); 113 | } 114 | if !(hint_info.type_info.contains(&ty)) { 115 | let valid_hints = hint_info.type_info.iter() 116 | .map(|x| format!("'{}'", x.to_string())) 117 | .collect::>() 118 | .join(" or "); 119 | let message = format!("Hint expects type {}.", valid_hints); 120 | memory.alert_error(&message, hint.identifier.range); 121 | } 122 | } else { 123 | memory.alert_error("Invalid hint.", hint.identifier.range); 124 | } 125 | } 126 | 127 | let (is_const, editable) = (false, false); 128 | memory.scopes.insert( name, ValueInfo { 129 | ty, 130 | is_const, 131 | editable, 132 | range: Some(range), 133 | description: None 134 | }); 135 | Ok(()) 136 | } 137 | 138 | pub fn evaluate_struct(memory: &mut Memory, node: StructNode) -> EvaluateResult { 139 | ensure_valid_id(memory, node.identifier)?; 140 | 141 | let mut fields: Vec = vec![]; 142 | for field in &node.fields { 143 | let field_name = memory.get_token_text(field.identifier); 144 | if fields.iter().any(|x| x.name == field_name) { 145 | let message = format!("Duplicate field: '{}'", field_name); 146 | return Err(memory.alert_error(&message, field.range)); 147 | } 148 | ensure_valid_type(memory, &field.type_node)?; 149 | fields.push(StructField{ 150 | name: field_name, 151 | ty: field.type_node.info.clone(), 152 | range: field.range 153 | }) 154 | } 155 | memory.structs.insert( 156 | memory.get_token_text(node.identifier), StructInfo { 157 | fields, 158 | range: node.identifier.range 159 | } 160 | ); 161 | Ok(()) 162 | } 163 | 164 | pub fn evaluate_function( 165 | memory: &mut Memory, 166 | node: FunctionNode 167 | ) -> Result<(), EvaluateError> { 168 | let function_name = memory.get_token_text(node.identifier); 169 | ensure_valid_id(memory, node.identifier)?; 170 | let mut params: Vec<(FunctionParam, Range)> = vec![]; 171 | for param in &node.params { 172 | let param_name = memory.get_token_text(param.value_node.identifier); 173 | if params.iter().any(|x| x.0.name == param_name) { 174 | let message = format!("Duplicate param: '{}'", param_name); 175 | return Err(memory.alert_error(&message, param.value_node.range)); 176 | } 177 | ensure_valid_type(memory, ¶m.value_node.type_node)?; 178 | params.push((FunctionParam { 179 | name: param_name, 180 | ty: param.value_node.type_node.info.clone(), 181 | qualifier: param.qualifier.clone().map(|x| FunctionParamQualifier::from(x)) 182 | }, param.value_node.range)) 183 | } 184 | 185 | let expected = match node.type_node.info.clone() { 186 | ty if ty.base == "void" => None, 187 | ty => Some(ty) 188 | }; 189 | let function_return = FunctionReturn { 190 | expected, 191 | returned: None 192 | }; 193 | let scope_type = ScopeType::Function(Box::new(function_return)); 194 | 195 | memory.scopes.enter_scope(scope_type, node.block.range); 196 | for param in ¶ms { 197 | memory.scopes.insert( 198 | param.0.name.clone(), 199 | ValueInfo { 200 | ty: param.0.ty.clone(), 201 | is_const: false, 202 | editable: true, 203 | range: Some(param.1), 204 | description: None 205 | } 206 | ); 207 | } 208 | match memory.shader_type { 209 | ShaderType::Spatial => match function_name.as_str() { 210 | "vertex" => memory.scopes.extend(spatial_vertex_vars()), 211 | "fragment" => memory.scopes.extend(spatial_fragment_vars()), 212 | "light" => memory.scopes.extend(spatial_light_vars()), 213 | _ => {} 214 | } 215 | ShaderType::CanvasItem => match function_name.as_str() { 216 | "vertex" => memory.scopes.extend(canvas_item_vertex_vars()), 217 | "fragment" => memory.scopes.extend(canvas_item_fragment_vars()), 218 | "light" => memory.scopes.extend(canvas_item_light_vars()), 219 | _ => {} 220 | } 221 | ShaderType::Particles => match function_name.as_str() { 222 | "start" => { 223 | memory.scopes.extend(particle_start_process()); 224 | memory.scopes.extend(particle_start()); 225 | } 226 | "process" => { 227 | memory.scopes.extend(particle_start_process()); 228 | memory.scopes.extend(particle_process()); 229 | } 230 | _ => {} 231 | } 232 | ShaderType::Sky => if function_name == "fog" { 233 | memory.scopes.extend(sky_stuff()) 234 | } 235 | ShaderType::Fog => if function_name == "sky" { 236 | memory.scopes.extend(fog_stuff()) 237 | }, 238 | } 239 | eval_block(memory, node.block); 240 | if !memory.scopes.assert_returned() { 241 | let message = format!("Expected return type '{}'", node.type_node.info.to_string()); 242 | _ = memory.alert_error(&message, node.identifier.range); 243 | } 244 | memory.scopes.leave_scope(); 245 | 246 | memory.functions.insert( 247 | function_name, FunctionInfo { 248 | signatures: vec![ 249 | FunctionSignature { 250 | return_type: node.type_node.info, 251 | params: params.into_iter().map(|(x, _)| x).collect() 252 | } 253 | ], 254 | range: Some(node.identifier.range), 255 | description: None, 256 | is_const: false 257 | } 258 | ); 259 | Ok(()) 260 | } 261 | 262 | fn evaluate_include(memory: &mut Memory, node: IncludeNode) -> Result<(), EvaluateError> { 263 | let root = if let Some(root) = memory.root_dir.clone() { 264 | root 265 | } else { 266 | return Ok(()); 267 | }; 268 | let include_path = memory.get_token_text(node.path) 269 | .replace("res://", &root) 270 | .trim_matches(|x| x == '"').to_string(); 271 | 272 | let include_text = if let Ok(text) = fs::read_to_string(include_path.clone()) { 273 | text 274 | } else { 275 | let message = "invalid shader include directory"; 276 | return Err(memory.alert_error(message, node.path.range)); 277 | }; 278 | 279 | let mut include_stream = TokenStream::new(&include_text, None); 280 | let mut include_memory = Memory::new(&include_text, None); 281 | let include_tree = parse_tokens(&mut include_stream); 282 | evaluate_tree(&mut include_memory, include_tree); 283 | 284 | for (key, value) in &include_memory.scopes.scopes[0].values { 285 | memory.scopes.insert(key.clone(), value.clone()); 286 | } 287 | for (key, value) in &include_memory.functions { 288 | memory.functions.insert(key.clone(), value.clone()); 289 | } 290 | 291 | Ok(()) 292 | } 293 | 294 | 295 | 296 | 297 | 298 | 299 | -------------------------------------------------------------------------------- /src/lexer/mod.rs: -------------------------------------------------------------------------------- 1 | mod token; 2 | mod token_stream; 3 | mod operations; 4 | 5 | pub use token::*; 6 | pub use token_stream::*; 7 | pub use operations::*; 8 | -------------------------------------------------------------------------------- /src/lexer/operations.rs: -------------------------------------------------------------------------------- 1 | use crate::lexer::{Token, TokenKind}; 2 | use TokenKind::*; 3 | 4 | pub enum OperationType { 5 | Comparison, 6 | Equal, 7 | Number, 8 | Int, 9 | Bool, 10 | } 11 | 12 | pub trait MaybeOperator { 13 | fn to_unary_op(&self) -> Option; 14 | fn to_binary_op(&self) -> Option; 15 | fn to_assignment_op(&self) -> Option; 16 | } 17 | impl MaybeOperator for Token { 18 | fn to_unary_op(&self) -> Option { 19 | match self.kind { 20 | Plus | Dash => Some(OperationType::Number), 21 | Bang => Some(OperationType::Bool), 22 | Tilde => Some(OperationType::Int), 23 | _ => None 24 | } 25 | } 26 | 27 | fn to_binary_op(&self) -> Option { 28 | match self.kind { 29 | LeftOp | RightOp | Percent | VerticalBar | Caret | Ampersand => 30 | Some(OperationType::Int), 31 | LeqOp | GeqOp | LeftAngle | RightAngle | And | Or => 32 | Some(OperationType::Comparison), 33 | Plus | Dash | Star | Slash => 34 | Some(OperationType::Number), 35 | EqOp | NeqOp => 36 | Some(OperationType::Equal), 37 | _ => None 38 | } 39 | } 40 | 41 | fn to_assignment_op(&self) -> Option { 42 | match self.kind { 43 | AddAssign | SubAssign | MulAssign | DivAssign | ModAssign => 44 | Some(OperationType::Number), 45 | Equal => Some(OperationType::Equal), 46 | _ => None 47 | } 48 | } 49 | } 50 | 51 | 52 | -------------------------------------------------------------------------------- /src/lexer/token.rs: -------------------------------------------------------------------------------- 1 | use std::usize; 2 | 3 | use lsp_types::{Position, Range}; 4 | use strum_macros::AsRefStr; 5 | use logos::{skip, Logos, Skip, Source}; 6 | 7 | #[derive(Clone, Copy, Debug)] 8 | pub struct Token { 9 | pub kind: TokenKind, 10 | pub range: Range 11 | } 12 | 13 | impl Token { 14 | pub fn as_ref(&self) -> &str { 15 | self.kind.as_ref() 16 | } 17 | 18 | pub fn text(&self, source: &String) -> String { 19 | let line = &source.lines().nth(self.range.start.line as usize); 20 | if let Some(line) = line { 21 | let start = self.range.start.character as usize; 22 | let end = self.range.end.character as usize; 23 | let span = start .. end; 24 | 25 | line.slice(span).map_or("".to_string(), |x| x.to_string()) 26 | } else { 27 | "".to_string() 28 | } 29 | } 30 | } 31 | 32 | pub trait ExtraRange { 33 | fn contains_position(&self, position: Position) -> bool; 34 | fn preceeds_position(&self, position: Position) -> bool; 35 | } 36 | impl ExtraRange for Range { 37 | fn contains_position(&self, position: Position) -> bool { 38 | if position.line > self.start.line && 39 | position.line < self.end.line { return true } 40 | 41 | let after_start = position.character >= self.start.character; 42 | let before_end = position.character <= self.end.character; 43 | let on_start_line = position.line == self.start.line; 44 | let on_end_line = position.line == self.end.line; 45 | 46 | if on_start_line && on_end_line { 47 | return after_start && before_end 48 | } 49 | 50 | if on_start_line { 51 | return after_start; 52 | } 53 | if on_end_line { 54 | return before_end; 55 | } 56 | 57 | false 58 | } 59 | 60 | fn preceeds_position(&self, position: Position) -> bool { 61 | if position.character == 0 { return false } 62 | position.line > self.end.line || 63 | (position.line == self.end.line && 64 | position.character - 1 >= self.end.character) 65 | } 66 | } 67 | 68 | #[derive(AsRefStr, Copy, Clone, Debug, Eq, Logos, PartialEq)] 69 | #[logos(extras=Vec, skip r"[ \t\f]+")] 70 | pub enum TokenKind { 71 | // Preprocessors 72 | #[token("#include")] Include, 73 | // Top Level Keywords 74 | #[token("shader_type")] ShaderType, 75 | #[token("render_mode")] RenderMode, 76 | #[token("group_uniforms")] GroupUniforms, 77 | #[token("global")] Global, 78 | #[token("instance")] Instance, 79 | #[token("const")] Const, 80 | #[token("uniform")] Uniform, 81 | #[token("varying")] Varying, 82 | #[token("struct")] Struct, 83 | #[token("void")] Void, 84 | // Keywords 85 | #[token("break")] Break, 86 | #[token("continue")] Continue, 87 | #[token("while")] While, 88 | #[token("do")] Do, 89 | #[token("else")] Else, 90 | #[token("for")] For, 91 | #[token("if")] If, 92 | #[token("discard")] Discard, 93 | #[token("return")] Return, 94 | #[token("switch")] Switch, 95 | #[token("case")] Case, 96 | #[token("default")] Default, 97 | #[token("in")] In, 98 | #[token("out")] Out, 99 | #[token("inout")] InOut, 100 | #[regex("flat|smooth")] Interpolation, 101 | #[regex("lowp|mediump|highp")] Precision, 102 | // Double Operations 103 | #[token("+=")] AddAssign, 104 | #[token("-=")] SubAssign, 105 | #[token("*=")] MulAssign, 106 | #[token("/=")] DivAssign, 107 | #[token("%=")] ModAssign, 108 | #[token("++")] Increment, 109 | #[token("--")] Decrement, 110 | #[token("&&")] And, 111 | #[token("||")] Or, 112 | #[token("<<")] LeftOp, 113 | #[token(">>")] RightOp, 114 | #[token("<=")] LeqOp, 115 | #[token(">=")] GeqOp, 116 | #[token("==")] EqOp, 117 | #[token("!=")] NeqOp, 118 | // Single Operations 119 | #[token("=")] Equal, 120 | #[token("!")] Bang, 121 | #[token("-")] Dash, 122 | #[token("~")] Tilde, 123 | #[token("+")] Plus, 124 | #[token("*")] Star, 125 | #[token("/")] Slash, 126 | #[token("%")] Percent, 127 | #[token("<")] LeftAngle, 128 | #[token(">")] RightAngle, 129 | #[token("|")] VerticalBar, 130 | #[token("^")] Caret, 131 | #[token("&")] Ampersand, 132 | #[token("?")] Question, 133 | // Punctiation 134 | #[token("(")] LeftParen, 135 | #[token(")")] RightParen, 136 | #[token("[")] LeftBracket, 137 | #[token("]")] RightBracket, 138 | #[token("{")] LeftBrace, 139 | #[token("}")] RightBrace, 140 | #[token(".")] Dot, 141 | #[token(",")] Comma, 142 | #[token(":")] Colon, 143 | #[token(";")] Semicolon, 144 | // Other 145 | #[regex("[a-zA-Z_][a-zA-Z0-9_]*")] 146 | Identifier, 147 | #[regex("[0-9]+")] 148 | IntConstant, 149 | #[regex("[0-9]+u")] 150 | UintConstant, 151 | #[regex("[0-9]*\\.[0-9]+|[0-9]+\\.[0-9]*(f)?")] 152 | FloatConstant, 153 | #[regex("true|false")] 154 | BoolConstant, 155 | #[regex("\"(?:[^\"]|\\\\\")*\"")] 156 | String, 157 | #[regex("//[^\n]*", skip)] 158 | #[regex("/\\*(?:[^*]|\\*[^/])*\\*/", skip)] 159 | Comment, 160 | #[regex(r"\n", newline_callback)] 161 | Newline, 162 | Error 163 | } 164 | 165 | fn newline_callback(lex: &mut logos::Lexer) -> Skip { 166 | lex.extras.push(lex.span().end); 167 | Skip 168 | } 169 | 170 | -------------------------------------------------------------------------------- /src/lexer/token_stream.rs: -------------------------------------------------------------------------------- 1 | use logos::{Logos, Lexer}; 2 | use lsp_types::{Position, Range}; 3 | use crate::{completion::CompletionElement, source_code::SourceDocument}; 4 | use super::{ExtraRange, Token, TokenKind}; 5 | 6 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 7 | pub enum TokenError { 8 | EofError, 9 | SyntaxError 10 | } 11 | 12 | pub type TokenResult = Result; 13 | 14 | pub struct TokenStream { 15 | current_idx: usize, 16 | pub tokens: Vec, 17 | source: SourceDocument, 18 | cursor: Option, 19 | pub cursor_element: CompletionElement, 20 | cursor_element_queue: CompletionElement, 21 | pub parsing_const: bool, 22 | pub parsing_statement: bool, 23 | record_errors: bool, 24 | } 25 | 26 | impl TokenStream { 27 | pub fn new(source_str: &str, cursor: Option) -> Self { 28 | let mut lexer = ::lexer(&source_str); 29 | let mut tokens = vec![]; 30 | let mut next_kind = lexer.next(); 31 | let mut range = get_lexer_current_range(&lexer); 32 | 33 | while let Some(maybe_kind) = next_kind { 34 | match maybe_kind { 35 | Ok(kind) => tokens.push(Token { kind, range }), 36 | Err(_) => tokens.push(Token { kind: TokenKind::Error, range }), 37 | } 38 | next_kind = lexer.next(); 39 | range = get_lexer_current_range(&lexer); 40 | }; 41 | 42 | let source = SourceDocument::new(source_str); 43 | 44 | Self { 45 | current_idx: 0, 46 | tokens, 47 | source, 48 | cursor, 49 | cursor_element: CompletionElement::TopLevelKeyword, 50 | cursor_element_queue: CompletionElement::TopLevelKeyword, 51 | parsing_const: false, 52 | parsing_statement: false, 53 | record_errors: true, 54 | } 55 | } 56 | 57 | pub fn destroy(self) -> (Vec, SourceDocument) { 58 | (self.tokens, self.source) 59 | } 60 | 61 | pub fn get_source(&self) -> &SourceDocument { 62 | &self.source 63 | } 64 | 65 | pub fn current(&self) -> TokenResult { 66 | match self.tokens.get(self.current_idx) { 67 | Some(&x) => Ok(x), 68 | _ => Err(TokenError::EofError) 69 | } 70 | } 71 | 72 | pub fn current_idx(&self) -> usize { 73 | self.current_idx 74 | } 75 | 76 | pub fn turn_off_errors(&mut self) { 77 | self.record_errors = false; 78 | } 79 | 80 | pub fn turn_on_errors(&mut self) { 81 | self.record_errors = true; 82 | } 83 | 84 | pub fn force_change_position(&mut self, idx: usize) { 85 | self.current_idx = idx; 86 | } 87 | 88 | pub fn queue_cursor_element( 89 | &mut self, 90 | element: CompletionElement 91 | ) { 92 | self.cursor_element_queue = element; 93 | } 94 | 95 | pub fn advance(&mut self) { 96 | if let Some(cursor) = self.cursor { 97 | if let Ok(current) = self.current() { 98 | if current.range.contains_position(cursor) { 99 | self.cursor_element = self.cursor_element_queue.clone(); 100 | } 101 | }; 102 | } 103 | self.current_idx += 1; 104 | self.parsing_statement = false; 105 | } 106 | 107 | pub fn retreat(&mut self) { 108 | self.current_idx -= 1; 109 | } 110 | 111 | pub fn advance_with(&mut self, data: T) -> T { 112 | let current = self.current(); 113 | self.advance(); 114 | data 115 | } 116 | 117 | pub fn consume(&mut self) -> TokenResult { 118 | let current = self.current(); 119 | self.advance(); 120 | current 121 | } 122 | 123 | pub fn consume_if(&mut self, f: F) -> Option 124 | where 125 | F: Fn(Token) -> bool, 126 | { 127 | match self.current() { 128 | Ok(x) => if f(x) { self.advance(); Some(x) } else { None }, 129 | _ => None, 130 | } 131 | } 132 | 133 | pub fn consume_token_kind(&mut self, expected: TokenKind) -> TokenResult { 134 | let message = format!("Expected {}", expected.as_ref()); 135 | match self.current()? { 136 | e if e.kind == expected => self.consume(), 137 | e => Err(self.alert_error(&message, e.range)) 138 | } 139 | } 140 | 141 | pub fn alert_error(&mut self, msg: &str, range: Range) -> TokenError { 142 | if !self.record_errors { 143 | return TokenError::SyntaxError; 144 | } 145 | let message = format!("Syntax Error: {}", msg); 146 | self.source.push_error(msg, range, TokenError::SyntaxError) 147 | } 148 | 149 | pub fn alert_error_with(&mut self, msg: &str, range: Range, data: T) -> T { 150 | if !self.record_errors { 151 | return data; 152 | } 153 | let message = format!("Syntax Error: {}", msg); 154 | self.source.push_error(msg, range, TokenError::SyntaxError); 155 | data 156 | } 157 | 158 | pub fn find_cursor_text(&self) -> Option { 159 | if let Some(cursor) = self.cursor { 160 | for token in &self.tokens { 161 | if token.range.contains_position(cursor) { 162 | return Some(token.text(&self.source.get_code())) 163 | } 164 | } 165 | } 166 | None 167 | } 168 | } 169 | 170 | fn get_lexer_current_range<'a>(lexer: &Lexer<'a, TokenKind>) -> Range { 171 | let span = lexer.span(); 172 | let lines = &lexer.extras; 173 | let current_line = lines.iter().filter(|&&c| c <= span.start).count(); 174 | let last_line_end = if current_line == 0 { 0 } else { lines[current_line - 1] }; 175 | let current_character = span.start - last_line_end; 176 | let last_character = current_character + span.len(); 177 | 178 | Range::new( 179 | Position::new(current_line as u32, current_character as u32), 180 | Position::new(current_line as u32, last_character as u32) 181 | ) 182 | } 183 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use interpreter::evaluate_top_level_node; 2 | use lexer::TokenStream; 3 | use lsp_types::{Position, TextDocumentContentChangeEvent}; 4 | use memory::Memory; 5 | use nodes::TopLevelNode; 6 | use parser::parse_top_level; 7 | 8 | pub mod lexer; 9 | pub mod source_code; 10 | pub mod nodes; 11 | pub mod parser; 12 | pub mod interpreter; 13 | pub mod memory; 14 | pub mod completion; 15 | 16 | pub fn parse_tokens( 17 | stream: &mut TokenStream, 18 | ) -> Vec { 19 | let mut nodes = vec![]; 20 | loop { 21 | match parse_top_level(stream) { 22 | Ok(maybe_node) => { 23 | if let Some(node) = maybe_node { 24 | nodes.push(node); 25 | } 26 | } 27 | _ => break 28 | } 29 | } 30 | nodes 31 | } 32 | 33 | pub fn evaluate_tree( 34 | memory: &mut Memory, 35 | top_levels: Vec 36 | ) { 37 | for top_level in top_levels { 38 | _ = evaluate_top_level_node(top_level, memory); 39 | } 40 | } 41 | pub fn apply_change(source: &mut String, change: &TextDocumentContentChangeEvent){ 42 | if let Some(range) = change.range { 43 | let start_byte = get_byte_offset_from_position(source, range.start); 44 | let end_byte = get_byte_offset_from_position(source, range.end); 45 | let new_text = &change.text; 46 | if start_byte <= source.len() && end_byte <= source.len() && start_byte <= end_byte { 47 | source.replace_range(start_byte..end_byte, new_text); 48 | } 49 | } 50 | } 51 | 52 | pub fn get_byte_offset_from_position(source: &String, position: Position) -> usize { 53 | let mut byte_offset = 0; 54 | for (i, line) in source.lines().enumerate() { 55 | if i as u32 == position.line { 56 | byte_offset += position.character as usize; 57 | break; 58 | } 59 | byte_offset += line.len() + 1; 60 | } 61 | byte_offset 62 | } 63 | 64 | pub fn calculate_new_end_position(start: Position, new_text: &str) -> Position { 65 | let new_lines = new_text.lines().collect::>(); 66 | let line_count = new_lines.len(); 67 | 68 | let new_end_line = if line_count > 1 { 69 | start.line + line_count as u32 - 1 70 | } else { 71 | start.line 72 | }; 73 | let new_end_character = if line_count > 1 { 74 | new_lines.last().unwrap().len() as u32 75 | } else { 76 | start.character + new_text.len() as u32 77 | }; 78 | 79 | Position { 80 | line: new_end_line, 81 | character: new_end_character, 82 | } 83 | } 84 | 85 | pub const DID_OPEN: &'static str = "textDocument/didOpen"; 86 | pub const DID_CHANGE: &'static str = "textDocument/didChange"; 87 | pub const DID_CLOSE: &'static str = "textDocument/didClose"; 88 | pub const DID_SAVE: &'static str = "textDocument/didSave"; 89 | pub const DID_MOUSE_MOVE: &'static str = "textDocument/didSave"; 90 | pub const PUBLISH_DIAGNOSTICS: &'static str = "textDocument/publishDiagnostics"; 91 | pub const INITIALIZE: &'static str = "initialize"; 92 | pub const SHUTDOWN: &'static str = "shutdown"; 93 | pub const EXIT: &'static str = "exit"; 94 | pub const COMPLETION: &'static str = "textDocument/completion"; 95 | pub const HOVER: &'static str = "textDocument/hover"; 96 | pub const DEFINITION: &'static str = "textDocument/definition"; 97 | 98 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use gdshader_lsp::{ 4 | completion::{get_completion_items, get_hover_description}, 5 | lexer::TokenStream, 6 | memory::Memory, 7 | source_code::send_errors, 8 | * 9 | }; 10 | use lsp_server::{Connection, Message, Request, Response}; 11 | use lsp_types::*; 12 | use serde_json::Value; 13 | 14 | const TEST : bool = false; 15 | 16 | pub struct Server<'a> { 17 | memories: HashMap, 18 | root_dir: Option, 19 | connection: &'a Connection 20 | } 21 | impl<'a> Server<'a> { 22 | pub fn open_document(&mut self, params: DidOpenTextDocumentParams) { 23 | let mut memory = Memory::new(¶ms.text_document.text, self.root_dir.clone()); 24 | let diagnostics = memory.evaluate_new(None).clone(); 25 | send_errors(&self.connection, ¶ms.text_document.uri, diagnostics); 26 | 27 | self.memories.insert(params.text_document.uri.to_string(), memory); 28 | } 29 | 30 | pub fn change_document(&mut self, params: DidChangeTextDocumentParams) { 31 | let maybe_memory = self.memories.get_mut(¶ms.text_document.uri.to_string()); 32 | if let Some(memory) = maybe_memory { 33 | memory.root_dir = self.root_dir.clone(); 34 | for change in params.content_changes { 35 | memory.apply_change(change); 36 | } 37 | } 38 | } 39 | 40 | pub fn save_document(&mut self, params: DidSaveTextDocumentParams) { 41 | let maybe_memory = self.memories.get_mut(¶ms.text_document.uri.to_string()); 42 | if let Some(old_memory) = maybe_memory { 43 | *old_memory = Memory::new(&old_memory.get_source().get_code(), self.root_dir.clone()); 44 | old_memory.root_dir = self.root_dir.clone(); 45 | let diagnostics = old_memory.evaluate_new(None).clone(); 46 | send_errors(&self.connection, ¶ms.text_document.uri, diagnostics); 47 | } 48 | } 49 | 50 | pub fn get_memory_from_uri(&mut self, req: &Request) -> Option<&mut Memory> { 51 | let uri = req.params.get("textDocument") 52 | .unwrap() 53 | .get("uri") 54 | .unwrap() 55 | .to_string() 56 | .trim_matches(|x| x == '\"') 57 | .to_string(); 58 | self.memories.get_mut(&uri) 59 | } 60 | } 61 | 62 | fn main() { 63 | if TEST { test(); return; } 64 | 65 | let (connection, io_threads) = Connection::stdio(); 66 | 67 | let mut server = Server { 68 | memories: HashMap::new(), 69 | root_dir: Some("donkey".to_string()), 70 | connection: &connection 71 | }; 72 | 73 | loop { 74 | match connection.receiver.recv() { 75 | Ok(msg) => match msg { 76 | Message::Notification(notif) => { 77 | if notif.method == DID_OPEN { 78 | let did_open_params: Result = 79 | notif.extract(DID_OPEN); 80 | if let Ok(params) = did_open_params { 81 | server.open_document(params); 82 | } 83 | 84 | } else if notif.method == DID_CHANGE { 85 | let did_change_params: Result = 86 | notif.extract(DID_CHANGE); 87 | if let Ok(params) = did_change_params { 88 | server.change_document(params); 89 | } 90 | 91 | } else if notif.method == DID_SAVE { 92 | let did_save_params: Result = 93 | notif.extract(DID_SAVE); 94 | if let Ok(param) = did_save_params { 95 | server.save_document(param) 96 | } 97 | } 98 | }, 99 | Message::Request(req) => { 100 | if connection.handle_shutdown(&req).unwrap() { 101 | io_threads.join().unwrap(); 102 | return; 103 | } 104 | match handle_request(req, &mut server) { 105 | Ok(response) => 106 | connection.sender.send(Message::Response(response)).unwrap(), 107 | Err(ResponseError::Shutdown)=> { 108 | io_threads.join().unwrap(); 109 | return; 110 | } 111 | _ => {} 112 | } 113 | }, 114 | Message::Response(x) => { } 115 | } 116 | Err(_) => { 117 | io_threads.join().unwrap(); 118 | return 119 | } 120 | } 121 | } 122 | } 123 | 124 | pub enum ResponseError { 125 | DoNothing, 126 | Shutdown 127 | } 128 | 129 | pub fn handle_request( 130 | req: Request, 131 | server: &mut Server, 132 | ) -> Result { 133 | match req.method.as_str() { 134 | INITIALIZE => { 135 | server.root_dir = req.params.get("rootPath") 136 | .map(|x| x 137 | .to_string() 138 | .trim_matches(|x| x == '"') 139 | .to_string() 140 | ); 141 | for x in server.memories.iter_mut() { 142 | x.1.root_dir = server.root_dir.clone(); 143 | } 144 | Ok(Response::new_ok(req.id, serde_json::to_value( 145 | InitializeResult { 146 | capabilities: ServerCapabilities { 147 | text_document_sync: Some( 148 | TextDocumentSyncCapability::Kind( 149 | TextDocumentSyncKind::INCREMENTAL 150 | ) 151 | ), 152 | completion_provider: Some( 153 | lsp_types::CompletionOptions { 154 | trigger_characters: Some( 155 | vec![ 156 | "#".to_string(), 157 | ".".to_string(), 158 | ":".to_string(), 159 | "\"".to_string(), 160 | ] 161 | ), 162 | ..Default::default() 163 | } 164 | ), 165 | hover_provider: Some( 166 | lsp_types::HoverProviderCapability::Simple(true) 167 | ), 168 | ..Default::default() 169 | }, 170 | ..Default::default() 171 | }).unwrap())) 172 | } 173 | COMPLETION => { 174 | let cursor = get_cursor(&req.params); 175 | let maybe_memory = server.get_memory_from_uri(&req); 176 | 177 | if let Some(memory) = maybe_memory { 178 | let mut stream = TokenStream::new(&memory.get_source().get_code(), Some(cursor)); 179 | let tree = parse_tokens(&mut stream); 180 | memory.evaluate(tree); 181 | let names = get_completion_items(memory, cursor, &stream.cursor_element); 182 | 183 | Ok(Response::new_ok( 184 | req.id, 185 | serde_json::to_value(CompletionResponse::Array(names)).unwrap() 186 | )) 187 | } else { 188 | Err(ResponseError::DoNothing) 189 | } 190 | } 191 | HOVER => { 192 | let cursor = get_cursor(&req.params); 193 | let maybe_memory = server.get_memory_from_uri(&req); 194 | 195 | if let Some(memory) = maybe_memory { 196 | let text = { 197 | let stream = TokenStream::new(&memory.get_source().get_code(), Some(cursor)); 198 | let text = stream.find_cursor_text().map_or("".to_string(), |x| x); 199 | text 200 | }; 201 | if let Some(contents) = get_hover_description(memory, cursor, &text){ 202 | Ok(Response::new_ok( 203 | req.id, 204 | serde_json::to_value(lsp_types::Hover{ 205 | contents, 206 | range: None 207 | }).unwrap() 208 | )) 209 | } else { Err(ResponseError::DoNothing) } 210 | } else { Err(ResponseError::DoNothing) } 211 | }, 212 | DEFINITION => Err(ResponseError::DoNothing), 213 | EXIT => Err(ResponseError::Shutdown), 214 | SHUTDOWN => Err(ResponseError::Shutdown), 215 | _ =>Err(ResponseError::DoNothing) 216 | } 217 | } 218 | 219 | //fn pos(a:u32, b:u32) -> Position{ 220 | // Position::new(a,b) 221 | //} 222 | // 223 | //fn range(a:u32, b:u32, c:u32, d:u32) -> Range { 224 | // Range::new(pos(a,b), pos(c,d)) 225 | //} 226 | // 227 | pub fn get_cursor(val: &Value) -> Position { 228 | let position_json = val.get("position").unwrap(); 229 | let char: u32 = position_json.get("character").unwrap().as_u64().unwrap() as u32; 230 | let line: u32 = position_json.get("line").unwrap().as_u64().unwrap() as u32; 231 | Position{character: char, line} 232 | } 233 | 234 | fn test() { 235 | } 236 | 237 | 238 | 239 | 240 | 241 | 242 | -------------------------------------------------------------------------------- /src/memory/hint.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap}; 2 | 3 | use super::TypeInfo; 4 | 5 | #[derive(Clone, Debug)] 6 | pub struct HintInfo { 7 | pub type_info: Vec, 8 | pub num_arguments: Vec, 9 | pub description: String, 10 | } 11 | 12 | macro_rules! build_hint { 13 | (($($base:ident)*) $hint:ident $description:literal ) => { 14 | build_hint!(($($base)*) $hint (0) $description) 15 | }; 16 | (($($base:ident)*) $hint:ident ($($number:literal)*) $description:literal ) => { 17 | (stringify!($hint).to_string(), HintInfo { 18 | type_info: vec![$(TypeInfo::from_str(stringify!($base)),)*], 19 | num_arguments: vec![$($number,)*], 20 | description: $description.to_string() 21 | }) 22 | }; 23 | } 24 | 25 | pub fn make_builtin_hints() -> HashMap { 26 | HashMap::from([ 27 | build_hint!( 28 | (vec3 vec4 sampler2D) source_color 29 | "Used as color." 30 | ), 31 | build_hint!( 32 | (float int) hint_range (2 3) 33 | "Restricted to values in a range (with min/max/step)." 34 | ), 35 | build_hint!( 36 | (sampler2D) hint_normal 37 | "Used as normalmap." 38 | ), 39 | build_hint!( 40 | (sampler2D) hint_default_white 41 | "As value or albedo color, default to opaque white." 42 | ), 43 | build_hint!( 44 | (sampler2D) hint_default_black 45 | "As value or albedo color, default to opaque black." 46 | ), 47 | build_hint!( 48 | (sampler2D) hint_default_transparent 49 | "As value or albedo color, default to transparent black." 50 | ), 51 | build_hint!( 52 | (sampler2D) hint_anisotropy 53 | "As flowmap, default to right." 54 | ), 55 | build_hint!( 56 | (sampler2D) hint_roughness_r 57 | "Used for roughness limiter on import (attempts reducing specular aliasing). 58 | _normal is a normal map that guides the roughness limiter, with roughness 59 | increasing in areas that have high-frequency detail." 60 | ), 61 | build_hint!( 62 | (sampler2D) hint_roughness_g 63 | "Used for roughness limiter on import (attempts reducing specular aliasing). 64 | _normal is a normal map that guides the roughness limiter, with roughness 65 | increasing in areas that have high-frequency detail." 66 | ), 67 | build_hint!( 68 | (sampler2D) hint_roughness_b 69 | "Used for roughness limiter on import (attempts reducing specular aliasing). 70 | _normal is a normal map that guides the roughness limiter, with roughness 71 | increasing in areas that have high-frequency detail." 72 | ), 73 | build_hint!( 74 | (sampler2D) hint_roughness_a 75 | "Used for roughness limiter on import (attempts reducing specular aliasing). 76 | _normal is a normal map that guides the roughness limiter, with roughness 77 | increasing in areas that have high-frequency detail." 78 | ), 79 | build_hint!( 80 | (sampler2D) hint_roughness_normal 81 | "Used for roughness limiter on import (attempts reducing specular aliasing). 82 | _normal is a normal map that guides the roughness limiter, with roughness 83 | increasing in areas that have high-frequency detail." 84 | ), 85 | build_hint!( 86 | (sampler2D) hint_roughness_gray 87 | "Used for roughness limiter on import (attempts reducing specular aliasing). 88 | _normal is a normal map that guides the roughness limiter, with roughness 89 | increasing in areas that have high-frequency detail." 90 | ), 91 | build_hint!( 92 | (sampler2D) filter_nearest 93 | "Enabled specified texture filtering." 94 | ), 95 | build_hint!( 96 | (sampler2D) filter_linear 97 | "Enabled specified texture filtering." 98 | ), 99 | build_hint!( 100 | (sampler2D) filter_nearest_mipmap 101 | "Enabled specified texture filtering." 102 | ), 103 | build_hint!( 104 | (sampler2D) filter_linear_mipmap 105 | "Enabled specified texture filtering." 106 | ), 107 | build_hint!( 108 | (sampler2D) filter_nearest_mipmap_anisotropic 109 | "Enabled specified texture filtering." 110 | ), 111 | build_hint!( 112 | (sampler2D) filter_linear_mipmap_anisotropic 113 | "Enabled specified texture filtering." 114 | ), 115 | build_hint!( 116 | (sampler2D) repeat_enable 117 | "Enabled texture repeating." 118 | ), 119 | build_hint!( 120 | (sampler2D) repeat_disable 121 | "Enabled texture repeating." 122 | ), 123 | build_hint!( 124 | (sampler2D) hint_screen_texture 125 | "Texture is the screen texture." 126 | ), 127 | build_hint!( 128 | (sampler2D) hint_depth_texture 129 | "Texture is the depth texture." 130 | ), 131 | build_hint!( 132 | (sampler2D) hint_normal_roughness_texture 133 | "Texture is the normal roughness texture (only supported in Forward+)" 134 | ) 135 | ]) 136 | } 137 | 138 | 139 | 140 | -------------------------------------------------------------------------------- /src/memory/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use lsp_types::*; 3 | 4 | mod types; 5 | mod functions; 6 | mod scope; 7 | mod hint; 8 | mod variables; 9 | mod render_modes; 10 | pub use variables::*; 11 | pub use types::*; 12 | pub use functions::*; 13 | pub use scope::*; 14 | pub use hint::*; 15 | pub use render_modes::*; 16 | 17 | use crate::{get_byte_offset_from_position, interpreter::{evaluate_top_level_node, EvaluateError}, lexer::{Token, TokenStream}, nodes::TopLevelNode, parse_tokens, source_code::SourceDocument}; 18 | 19 | pub struct Memory { 20 | pub root_dir: Option, 21 | pub shader_type: ShaderType, 22 | pub valid_render_modes: HashMap, 23 | pub builtin_types: HashMap, 24 | pub functions: HashMap, 25 | pub hints: HashMap, 26 | pub structs: HashMap, 27 | pub scopes: ScopeList, 28 | 29 | source: SourceDocument 30 | } 31 | impl Memory { 32 | pub fn new(source_str: &str, root_dir: Option) -> Self { 33 | let source = SourceDocument::new(source_str); 34 | let mut scopes = ScopeList::new(); 35 | if root_dir.is_some() { 36 | scopes.extend(variable_builtins()); 37 | } 38 | let (functions, hints) = if root_dir.is_none() { 39 | (HashMap::new(), HashMap::new()) 40 | } else { 41 | (make_builtin_functions(), make_builtin_hints()) 42 | }; 43 | 44 | Memory { 45 | root_dir, 46 | shader_type: ShaderType::Spatial, 47 | valid_render_modes: HashMap::new(), 48 | builtin_types: make_builtin_types(), 49 | functions, 50 | hints, 51 | structs: HashMap::new(), 52 | scopes, 53 | source 54 | } 55 | } 56 | 57 | pub fn evaluate(&mut self, top_levels: Vec) -> &Vec { 58 | let mut scopes = ScopeList::new(); 59 | if self.root_dir.is_some() { 60 | scopes.extend(variable_builtins()); 61 | } 62 | let functions = if self.root_dir.is_none() { HashMap::new() } 63 | else { make_builtin_functions() }; 64 | self.scopes = scopes; 65 | self.functions = functions; 66 | 67 | for top_level in top_levels { 68 | _ = evaluate_top_level_node(top_level, self); 69 | } 70 | 71 | self.source.get_diagnostics() 72 | } 73 | 74 | pub fn evaluate_new(&mut self, cursor: Option) -> &Vec { 75 | let mut stream = TokenStream::new(self.source.get_code(), cursor); 76 | let tree = parse_tokens(&mut stream); 77 | 78 | let mut diagnostics = stream.get_source().get_diagnostics().clone(); 79 | self.source.add_diagnostics(diagnostics); 80 | self.evaluate(tree) 81 | } 82 | 83 | 84 | pub fn get_builtin_types(&self, scope: usize) -> Vec { 85 | self.builtin_types 86 | .iter() 87 | .filter_map(|(name, info)| { 88 | if scope.clone() == 0 || info.used_anywhere { 89 | Some(CompletionItem { 90 | label: name.clone(), 91 | kind: Some(CompletionItemKind::TYPE_PARAMETER), 92 | ..Default::default() 93 | }) 94 | } else { None } 95 | }) 96 | .collect() 97 | } 98 | 99 | pub fn get_hints(&self, ty: TypeInfo) -> Vec { 100 | self.hints.iter() 101 | .filter_map(|(name, info)| { 102 | let insert_text = if info.num_arguments.iter().any(|&x| x > 0) { "($0)" } 103 | else { "" }; 104 | if info.type_info.contains(&ty) { 105 | Some(CompletionItem { 106 | label: name.clone(), 107 | kind: None, 108 | insert_text_format: Some(InsertTextFormat::SNIPPET), 109 | insert_text: Some(format!("{}{}", &name, insert_text)), 110 | ..Default::default() 111 | }) 112 | } else { 113 | None 114 | } 115 | }) 116 | .collect() 117 | 118 | 119 | } 120 | 121 | pub fn get_structs(&self) -> Vec { 122 | self.structs.keys().map(|x| CompletionItem { 123 | label: x.to_string(), 124 | kind: Some(CompletionItemKind::STRUCT), 125 | ..Default::default() 126 | }).collect() 127 | } 128 | 129 | pub fn get_functions(&self, cursor: Position, is_const: bool) -> Vec { 130 | self.functions 131 | .iter() 132 | .filter_map(|(name, info)| { 133 | if !is_const || info.is_const { 134 | Some(CompletionItem { 135 | label: format!("{}()", &name), 136 | kind: Some(CompletionItemKind::FUNCTION), 137 | insert_text_format: Some(InsertTextFormat::SNIPPET), 138 | insert_text: Some(format!("{}($0)", &name)), 139 | ..Default::default() 140 | }) 141 | } else { None } 142 | }) 143 | .collect() 144 | } 145 | 146 | pub fn get_variables(&self, scope: usize, is_const: bool) -> Vec{ 147 | self.scopes.collect_scopes_from(scope) 148 | .iter() 149 | .flat_map(|x| x.iter()) 150 | .filter_map(|(name, info)| { 151 | if !is_const || info.is_const { 152 | Some(CompletionItem { 153 | label: name.clone(), 154 | kind: Some(CompletionItemKind::VARIABLE), 155 | ..Default::default() 156 | }) 157 | } else { None } 158 | }) 159 | .collect() 160 | } 161 | 162 | pub fn is_id_in_use(&self, id: &str) -> bool { 163 | self.builtin_types.contains_key(id) 164 | || self.functions.contains_key(id) 165 | || self.structs.contains_key(id) 166 | || self.scopes.collect_scopes().iter().any(|scope| scope.contains_key(id)) 167 | } 168 | 169 | pub fn is_id_valid_type(&self, id: &str) -> bool { 170 | self.builtin_types.contains_key(id) 171 | || self.structs.contains_key(id) 172 | } 173 | 174 | pub fn get_token_text(&self, token: Token) -> String { 175 | token.text(self.source.get_code()) 176 | } 177 | 178 | pub fn get_source(&self) -> &SourceDocument { 179 | &self.source 180 | } 181 | 182 | pub fn alert_error(&mut self, msg: &str, range: Range) -> EvaluateError { 183 | let message = format!("Syntax Error: {}", msg); 184 | self.source.push_error(msg, range, EvaluateError::SemanticsError) 185 | } 186 | 187 | pub fn fetch_gdshaderinc_files(&self, root_path: &str) -> Vec { 188 | let mut result = Vec::new(); 189 | 190 | let walker = walkdir::WalkDir::new(root_path).into_iter(); 191 | for entry in walker.filter_map(|e| e.ok()) { 192 | let path = entry.path(); 193 | if path.is_file() && path.extension().unwrap_or_default() == "gdshaderinc" { 194 | if let Some(path) = path.to_str() { 195 | let better_path = path.replace(root_path, "res://"); 196 | result.push(better_path); 197 | } 198 | } 199 | } 200 | 201 | result 202 | } 203 | 204 | pub fn apply_change(&mut self, change: TextDocumentContentChangeEvent) { 205 | self.source.apply_change(change); 206 | } 207 | } 208 | 209 | #[derive(Clone, Debug)] 210 | pub struct ValueInfo { 211 | pub ty: TypeInfo, 212 | pub editable: bool, 213 | pub is_const: bool, 214 | pub range: Option, 215 | pub description: Option 216 | } 217 | 218 | #[derive(Debug)] 219 | pub struct StructInfo { 220 | pub fields: Vec, 221 | pub range: Range 222 | } 223 | 224 | #[derive(Debug)] 225 | pub struct StructField { 226 | pub name: String, 227 | pub ty: TypeInfo, 228 | pub range: Range, 229 | } 230 | 231 | #[derive(Debug)] 232 | pub enum ShaderType { 233 | Spatial, 234 | CanvasItem, 235 | Particles, 236 | Fog, 237 | Sky 238 | } 239 | 240 | -------------------------------------------------------------------------------- /src/memory/render_modes.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | pub struct RenderModeInfo { 4 | pub description: String 5 | } 6 | 7 | macro_rules! render_mode { 8 | ($name:ident $description:literal) => { 9 | (stringify!($name).to_string(), $description.to_string()) 10 | }; 11 | } 12 | 13 | pub fn spatial_render_modes() -> HashMap { 14 | HashMap::from([ 15 | render_mode!(blend_mix "Mix blend mode (alpha is transparency), default."), 16 | render_mode!(blend_add "Additive blend mode."), 17 | render_mode!(blend_sub "Subtractive blend mode."), 18 | render_mode!(blend_mul "Multiplicative blend mode."), 19 | render_mode!(depth_draw_opaque "Only draw depth for opaque geometry (not transparent)."), 20 | render_mode!(depth_draw_always "Always draw depth (opaque and transparent)."), 21 | render_mode!(depth_draw_never "Never draw depth."), 22 | render_mode!(depth_prepass_alpha "Do opaque depth pre-pass for transparent geometry."), 23 | render_mode!(depth_test_disabled "Disable depth testing."), 24 | render_mode!(sss_mode_skin "Subsurface Scattering mode for skin."), 25 | render_mode!(cull_back "Cull back-faces (default)."), 26 | render_mode!(cull_front "Cull front-faces."), 27 | render_mode!(cull_disabled "Culling disabled (double sided)."), 28 | render_mode!(unshaded "Result is just albedo. No lighting/shading happens in material."), 29 | render_mode!(wireframe "Geometry draws using lines."), 30 | render_mode!(diffuse_burley "Burley (Disney PBS) for diffuse (default)."), 31 | render_mode!(diffuse_lambert "Lambert shading for diffuse."), 32 | render_mode!(diffuse_lambert_wrap "Lambert wrapping (roughness dependent) for diffuse."), 33 | render_mode!(diffuse_toon "Toon shading for diffuse."), 34 | render_mode!(specular_schlick_ggx "Schlick-GGX for specular (default)."), 35 | render_mode!(specular_toon "Toon for specular."), 36 | render_mode!(specular_disabled "Disable specular."), 37 | render_mode!(skip_vertex_transform 38 | "VERTEX/NORMAL/etc. need to be transformed manually in vertex function."), 39 | render_mode!(world_vertex_coords 40 | "VERTEX/NORMAL/etc. are modified in world coordinates instead of local."), 41 | render_mode!(ensure_correct_normals "Use when non-uniform scale is applied to mesh."), 42 | render_mode!(shadows_disabled "Disable computing shadows in shader."), 43 | render_mode!(ambient_light_disabled 44 | "Disable contribution from ambient light and radiance map."), 45 | render_mode!(shadow_to_opacity 46 | "Lighting modifies the alpha so shadowed areas are opaque and non-shadowed 47 | areas are transparent. Useful for overlaying shadows onto a camera feed in AR."), 48 | render_mode!(vertex_lighting "Use vertex-based lighting."), 49 | render_mode!(particle_trails "Enables the trails when used on particles geometry."), 50 | render_mode!(alpha_to_coverage "Alpha antialiasing mode."), 51 | render_mode!(alpha_to_coverage_and_one "Alpha antialiasing mode."), 52 | render_mode!(fog_disabled 53 | "Disable receiving depth-based or volumetric fog. Useful for blend_add materials 54 | like particles."), 55 | ])} 56 | 57 | pub fn canvas_item_render_modes() -> HashMap { 58 | HashMap::from([ 59 | render_mode!(blend_mix "Mix blend mode (alpha is transparency), default."), 60 | render_mode!(blend_add "Additive blend mode."), 61 | render_mode!(blend_sub "Subtractive blend mode."), 62 | render_mode!(blend_mul "Multiplicative blend mode."), 63 | render_mode!(blend_premul_alpha "Pre-multiplied alpha blend mode."), 64 | render_mode!(blend_disabled 65 | "Disable blending, values (including alpha) are written as-is."), 66 | render_mode!(unshaded "Result is just albedo. No lighting/shading happens in material."), 67 | render_mode!(light_only "Only draw on light pass."), 68 | render_mode!(skip_vertex_transform 69 | "VERTEX needs to be transformed manually in vertex function."), 70 | render_mode!(world_vertex_coords 71 | "VERTEX is modified in world coordinates instead of local."), 72 | ]) 73 | } 74 | pub fn particle_render_modes() -> HashMap { 75 | HashMap::from([ 76 | render_mode!(keep_data "Do not clear previous data on restart."), 77 | render_mode!(disable_force "Disable attractor force."), 78 | render_mode!(disable_velocity "Ignore VELOCITY value."), 79 | render_mode!(collision_use_scale "Scale the particle's size for collisions."), 80 | ]) 81 | } 82 | 83 | pub fn sky_render_modes() -> HashMap { 84 | HashMap::from([ 85 | render_mode!(use_half_res_pass 86 | "Allows the shader to write to and access the half resolution pass."), 87 | render_mode!(use_quarter_res_pass 88 | "Allows the shader to write to and access the quarter resolution pass."), 89 | render_mode!(disable_fog "If used, fog will not affect the sky."), 90 | ]) 91 | } 92 | 93 | 94 | 95 | -------------------------------------------------------------------------------- /src/memory/scope.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use lsp_types::{Position, Range}; 3 | 4 | use crate::lexer::ExtraRange; 5 | 6 | use super::{TypeInfo, ValueInfo}; 7 | 8 | #[derive(Clone, PartialEq)] 9 | pub struct FunctionReturn { 10 | pub expected: Option, 11 | pub returned: Option 12 | } 13 | 14 | #[derive(Clone, PartialEq)] 15 | pub enum ScopeType { 16 | TopLevel, 17 | Block, 18 | Loop, 19 | Function(Box) 20 | } 21 | 22 | pub struct Scope { 23 | pub scope_type: ScopeType, 24 | pub parent: usize, 25 | pub range: Range, 26 | pub values: HashMap 27 | } 28 | 29 | pub struct ScopeList { 30 | pub scopes: Vec, 31 | current: usize 32 | } 33 | 34 | impl ScopeList { 35 | pub fn new() -> Self { 36 | Self { 37 | scopes: vec![ 38 | Scope { 39 | parent: 0, 40 | range: Range::default(), 41 | scope_type: ScopeType::TopLevel, 42 | values: HashMap::new(), 43 | } 44 | ], 45 | current: 0 46 | } 47 | } 48 | pub fn insert(&mut self, k: String, v: ValueInfo) { 49 | self.scopes[self.current].values.insert(k, v); 50 | } 51 | pub fn extend(&mut self, extra: Vec<(String, ValueInfo)>) { 52 | self.scopes[self.current].values.extend(extra.into_iter()) 53 | } 54 | pub fn force_scope(&mut self, scope: usize) { 55 | self.current = scope; 56 | } 57 | pub fn scope_type(&mut self) -> ScopeType { 58 | self.scopes[self.current].scope_type.clone() 59 | } 60 | pub fn enter_scope(&mut self, scope_type: ScopeType, range: Range) { 61 | self.scopes.push(Scope { 62 | parent: self.current, 63 | range, 64 | scope_type, 65 | values: HashMap::new() 66 | }); 67 | self.current = self.scopes.len()-1; 68 | } 69 | /// True means we succeeded. 70 | pub fn leave_scope(&mut self) -> bool { 71 | if self.current == 0 { 72 | false 73 | } else { 74 | self.current = self.scopes[self.current].parent; 75 | true 76 | } 77 | } 78 | 79 | pub fn get_expected_return_type(&self) -> Option { 80 | let mut current = self.current; 81 | while current != 0 { 82 | if let ScopeType::Function(x) = &self.scopes[current].scope_type { 83 | return x.expected.clone(); 84 | } 85 | current = self.scopes[current].parent; 86 | } 87 | None 88 | } 89 | 90 | pub fn set_actual_return_type(&mut self, return_type: Option) { 91 | let mut current = self.current; 92 | while current != 0 { 93 | if let ScopeType::Function(function_return) = &self.scopes[current].scope_type { 94 | let new_function_return = Box::new(FunctionReturn { 95 | expected: function_return.expected.clone(), 96 | returned: return_type.clone() 97 | }); 98 | self.scopes[current].scope_type = ScopeType::Function(new_function_return); 99 | } 100 | current = self.scopes[current].parent; 101 | } 102 | } 103 | 104 | pub fn assert_returned(&mut self) -> bool { 105 | let mut current = self.current; 106 | while current != 0 { 107 | if let ScopeType::Function(function_return) = &self.scopes[current].scope_type { 108 | return function_return.expected.is_some() && function_return.returned.is_some() 109 | || function_return.returned.is_none() && function_return.expected.is_none() 110 | } 111 | current = self.scopes[current].parent; 112 | } 113 | false 114 | } 115 | 116 | pub fn collect_scopes(&self) -> Vec<&HashMap> { 117 | self.collect_scopes_from(self.current) 118 | } 119 | 120 | pub fn find_scope_from_position(&self, position: Position) -> usize { 121 | self.scopes 122 | .iter() 123 | .enumerate() 124 | .filter_map(|(i,scope)| { 125 | if scope.range.contains_position(position) { Some(i) } 126 | else { None } 127 | }) 128 | .max() 129 | .map_or(0, |x| x) 130 | } 131 | 132 | pub fn collect_scopes_from( 133 | &self, 134 | location: usize 135 | ) -> Vec<&HashMap> { 136 | let mut indices = vec![0]; 137 | let mut current = location; 138 | 139 | while current != 0 { 140 | indices.push(current); 141 | if let Some(idx) = self.scopes.get(current) { 142 | current = idx.parent; 143 | } else { 144 | break; 145 | } 146 | }; 147 | let scopes: Vec<&HashMap> = self.scopes 148 | .iter() 149 | .enumerate() 150 | .filter_map(move |(idx, scope)| { 151 | if indices.contains(&idx) { Some(&scope.values) } else { None } 152 | }).collect(); 153 | 154 | scopes 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /src/memory/types.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use crate::lexer::{Token, TokenKind}; 4 | 5 | #[derive(Clone, Debug, PartialEq, Eq)] 6 | pub enum Primitive { 7 | Float, 8 | Int, 9 | Uint, 10 | Bool, 11 | } 12 | 13 | #[derive(Debug, PartialEq, Eq)] 14 | pub enum GenericSize { 15 | Number, 16 | GVec2Type, 17 | GVec3Type, 18 | GVec4Type, 19 | GSampler2D, 20 | GSampler2DArray, 21 | GSampler3D, 22 | } 23 | impl GenericSize { 24 | pub fn as_size(&self) -> Option { 25 | match self { 26 | GenericSize::Number => Some(1), 27 | GenericSize::GVec2Type => Some(2), 28 | GenericSize::GVec3Type => Some(3), 29 | GenericSize::GVec4Type => Some(4), 30 | GenericSize::GSampler2D => None, 31 | GenericSize::GSampler2DArray => None, 32 | GenericSize::GSampler3D => None, 33 | } 34 | } 35 | } 36 | 37 | #[derive(Clone, Debug)] 38 | pub struct TypeInfo { 39 | pub base: String, 40 | /// A size of '0' means that this is not an array. 41 | pub size: u32, 42 | } 43 | impl TypeInfo { 44 | pub fn void() -> Self { 45 | Self { base: "void".to_string(), size: 0 } 46 | } 47 | pub fn from_str(base: &str) -> Self { 48 | Self { base: base.to_string(), size: 0 } 49 | } 50 | pub fn from_primitive(token: Token) -> Self { 51 | match token.kind { 52 | TokenKind::IntConstant => Self { base: "int".to_string(), size: 0}, 53 | TokenKind::UintConstant => Self { base: "uint".to_string(), size: 0}, 54 | TokenKind::FloatConstant => Self { base: "float".to_string(), size: 0}, 55 | TokenKind::BoolConstant => Self { base: "bool".to_string(), size: 0}, 56 | _ => unreachable!() 57 | } 58 | } 59 | pub fn to_string(&self) -> String { 60 | if self.size != 0 { 61 | format!("{}[{}]", self.base, self.size.to_string()) 62 | } else { 63 | self.base.clone() 64 | } 65 | } 66 | pub fn from_pieces(primitive_type: Primitive, generic_size: GenericSize) -> Self { 67 | use Primitive::*; 68 | use GenericSize::*; 69 | match (primitive_type, generic_size) { 70 | (Float, Number) => Self { base: "float".to_string(), size: 0 }, 71 | (Float, GVec2Type) => Self { base: "vec2".to_string(), size: 0 }, 72 | (Float, GVec3Type) => Self { base: "vec3".to_string(), size: 0 }, 73 | (Float, GVec4Type) => Self { base: "vec4".to_string(), size: 0 }, 74 | (Int, Number) => Self { base: "int".to_string(), size: 0 }, 75 | (Int, GVec2Type) => Self { base: "ivec2".to_string(), size: 0 }, 76 | (Int, GVec3Type) => Self { base: "ivec3".to_string(), size: 0 }, 77 | (Int, GVec4Type) => Self { base: "ivec4".to_string(), size: 0 }, 78 | (Uint, Number) => Self { base: "uint".to_string(), size: 0 }, 79 | (Uint, GVec2Type) => Self { base: "uvec2".to_string(), size: 0 }, 80 | (Uint, GVec3Type) => Self { base: "uvec3".to_string(), size: 0 }, 81 | (Uint, GVec4Type) => Self { base: "uvec4".to_string(), size: 0 }, 82 | (Bool, Number) => Self { base: "bool".to_string(), size: 0 }, 83 | (Bool, GVec2Type) => Self { base: "bvec2".to_string(), size: 0 }, 84 | (Bool, GVec3Type) => Self { base: "bvec3".to_string(), size: 0 }, 85 | (Bool, GVec4Type) => Self { base: "bvec4".to_string(), size: 0 }, 86 | (Float, GSampler2D) => Self { base: "sampler2D".to_string(), size: 0 }, 87 | (Float, GSampler2DArray) => Self { base: "sampler2DArray".to_string(), size: 0 }, 88 | (Float, GSampler3D) => Self { base: "sampler3D".to_string(), size: 0 }, 89 | (Int, GSampler2D) => Self { base: "isampler2D".to_string(), size: 0 }, 90 | (Int, GSampler2DArray) => Self { base: "isampler2DArray".to_string(), size: 0 }, 91 | (Int, GSampler3D) => Self { base: "isampler3D".to_string(), size: 0 }, 92 | (Uint, GSampler2D) => Self { base: "usampler2D".to_string(), size: 0 }, 93 | (Uint, GSampler2DArray) => Self { base: "usampler2DArray".to_string(), size: 0 }, 94 | (Uint, GSampler3D) => Self { base: "usampler3D".to_string(), size: 0 }, 95 | _ => panic!() 96 | } 97 | } 98 | pub fn get_generic_type(&self) -> Option { 99 | match self.base.as_str() { 100 | "float" => Some(Primitive::Float), 101 | "vec2" => Some(Primitive::Float), 102 | "vec3" => Some(Primitive::Float), 103 | "vec4" => Some(Primitive::Float), 104 | "vec_type" => Some(Primitive::Float), 105 | "int" => Some(Primitive::Int), 106 | "ivec2" => Some(Primitive::Int), 107 | "ivec3" => Some(Primitive::Int), 108 | "ivec4" => Some(Primitive::Int), 109 | "ivec_type" => Some(Primitive::Int), 110 | "uint" => Some(Primitive::Uint), 111 | "uvec2" => Some(Primitive::Uint), 112 | "uvec3" => Some(Primitive::Uint), 113 | "uvec4" => Some(Primitive::Uint), 114 | "uvec_type" => Some(Primitive::Uint), 115 | "bool" => Some(Primitive::Bool), 116 | "bvec2" => Some(Primitive::Bool), 117 | "bvec3" => Some(Primitive::Bool), 118 | "bvec4" => Some(Primitive::Bool), 119 | "bvec_type" => Some(Primitive::Float), 120 | "sampler2D" => Some(Primitive::Float), 121 | "sampler2DArray" => Some(Primitive::Float), 122 | "sampler3D" => Some(Primitive::Float), 123 | "samplerCube" => Some(Primitive::Float), 124 | "samplerCubeArray" => Some(Primitive::Float), 125 | "isampler2D" => Some(Primitive::Int), 126 | "isampler2DArray" => Some(Primitive::Int), 127 | "isampler3D" => Some(Primitive::Int), 128 | "usampler2D" => Some(Primitive::Uint), 129 | "usampler2DArray" => Some(Primitive::Uint), 130 | "usampler3D" => Some(Primitive::Uint), 131 | _ => None 132 | } 133 | } 134 | pub fn get_generic_size(&self) -> Option { 135 | match self.base.as_str() { 136 | "float" => Some(GenericSize::Number), 137 | "int" => Some(GenericSize::Number), 138 | "uint" => Some(GenericSize::Number), 139 | "number" => Some(GenericSize::Number), 140 | "vec2" => Some(GenericSize::GVec2Type), 141 | "ivec2" => Some(GenericSize::GVec2Type), 142 | "uvec2" => Some(GenericSize::GVec2Type), 143 | "gvec2_type" => Some(GenericSize::GVec2Type), 144 | "vec3" => Some(GenericSize::GVec3Type), 145 | "ivec3" => Some(GenericSize::GVec3Type), 146 | "uvec3" => Some(GenericSize::GVec3Type), 147 | "gvec3_type" => Some(GenericSize::GVec3Type), 148 | "vec4" => Some(GenericSize::GVec4Type), 149 | "ivec4" => Some(GenericSize::GVec4Type), 150 | "uvec4" => Some(GenericSize::GVec4Type), 151 | "gvec4_type" => Some(GenericSize::GVec3Type), 152 | "sampler2D" => Some(GenericSize::GSampler2D), 153 | "isampler2D" => Some(GenericSize::GSampler2D), 154 | "usampler2D" => Some(GenericSize::GSampler2D), 155 | "gsampler2D" => Some(GenericSize::GSampler2D), 156 | "sampler2DArray" => Some(GenericSize::GSampler2DArray), 157 | "isampler2DArray" => Some(GenericSize::GSampler2DArray), 158 | "usampler2DArray" => Some(GenericSize::GSampler2DArray), 159 | "gsampler2DArray" => Some(GenericSize::GSampler2DArray), 160 | "sampler3D" => Some(GenericSize::GSampler3D), 161 | "isampler3D" => Some(GenericSize::GSampler3D), 162 | "usampler3D" => Some(GenericSize::GSampler3D), 163 | "gsampler3D" => Some(GenericSize::GSampler3D), 164 | _ => None 165 | } 166 | } 167 | /// Checks if this type is a generically typed type, and returns its size. 168 | pub fn is_generically_typed(&self) -> Option { 169 | match self.base.as_str() { 170 | "number" => Some(GenericSize::Number), 171 | "gvec2_type" => Some(GenericSize::GVec2Type), 172 | "gvec3_type" => Some(GenericSize::GVec3Type), 173 | "gvec4_type" => Some(GenericSize::GVec4Type), 174 | "gsampler2D" => Some(GenericSize::GSampler2D), 175 | "gsampler2DArray" => Some(GenericSize::GSampler2DArray), 176 | "gsampler3D" => Some(GenericSize::GSampler3D), 177 | _ => None, 178 | } 179 | } 180 | /// Checks if this type is a generically sized type, and returns its primitive type. 181 | pub fn is_generically_sized(&self) -> Option { 182 | match self.base.as_str() { 183 | "vec_type" => Some(Primitive::Float), 184 | "ivec_type" => Some(Primitive::Int), 185 | "uvec_type" => Some(Primitive::Uint), 186 | "bvec_type" => Some(Primitive::Bool), 187 | "mat_type" => Some(Primitive::Float), 188 | _ => None, 189 | } 190 | } 191 | } 192 | impl PartialEq for TypeInfo { 193 | fn eq(&self, other: &Self) -> bool { 194 | if self.size != other.size { 195 | return false; 196 | } 197 | if self.base == other.base { 198 | return true; 199 | } 200 | let base = self.base.as_str(); 201 | let other_base = other.base.as_str(); 202 | match base { 203 | "vec_type" => if ["float", "vec2", "vec3", "vec4"].contains(&other_base) { 204 | return true; 205 | } 206 | "ivec_type" => if ["int", "ivec2", "ivec3", "ivec4"].contains(&other_base) { 207 | return true; 208 | } 209 | "uvec_type" => if ["uint", "uvec2", "uvec3", "uvec4"].contains(&other_base) { 210 | return true; 211 | } 212 | "bvec_type" => if ["bool", "bvec2", "bvec3", "bvec4"].contains(&other_base) { 213 | return true; 214 | } 215 | "mat_type" => if ["mat2", "mat3", "mat4"].contains(&other_base) { 216 | return true; 217 | } 218 | "number" => if ["float", "int", "uint"].contains(&other_base) { 219 | return true; 220 | } 221 | "gvec2_type" => if ["vec2", "ivec2", "uvec2"].contains(&other_base) { 222 | return true; 223 | } 224 | "gvec3_type" => if ["vec3", "ivec3", "uvec3"].contains(&other_base) { 225 | return true; 226 | } 227 | "gvec4_type" => if ["vec4", "ivec4", "uvec4"].contains(&other_base) { 228 | return true; 229 | } 230 | "gsampler2D" => if ["sampler2D", "isampler2D", "usampler2D"].contains(&other_base) { 231 | return true; 232 | } 233 | "gsampler3D" => if ["sampler3D", "isampler3D", "usampler3D"].contains(&other_base) { 234 | return true; 235 | } 236 | "gsampler2DArray" => if ["sampler2DArray", "isampler2DArray", "usampler2DArray"].contains(&other_base) { 237 | return true; 238 | } 239 | _ => {} 240 | } 241 | match other_base { 242 | "vec_type" => if ["float", "vec2", "vec3", "vec4"].contains(&base) { 243 | return true; 244 | } 245 | "ivec_type" => if ["int", "ivec2", "ivec3", "ivec4"].contains(&base) { 246 | return true; 247 | } 248 | "uvec_type" => if ["uint", "uvec2", "uvec3", "uvec4"].contains(&base) { 249 | return true; 250 | } 251 | "bvec_type" => if ["bool", "bvec2", "bvec3", "bvec4"].contains(&base) { 252 | return true; 253 | } 254 | "mat_type" => if ["mat2", "mat3", "mat4"].contains(&base) { 255 | return true; 256 | } 257 | "number" => if ["float", "int", "uint"].contains(&base) { 258 | return true; 259 | } 260 | "gvec2_type" => if ["vec2", "ivec2", "uvec2"].contains(&base) { 261 | return true; 262 | } 263 | "gvec3_type" => if ["vec2", "ivec2", "uvec2"].contains(&base) { 264 | return true; 265 | } 266 | "gvec4_type" => if ["vec2", "ivec2", "uvec2"].contains(&base) { 267 | return true; 268 | } 269 | "gsampler2D" => if ["sampler2D", "isampler2D", "usampler2D"].contains(&base) { 270 | return true; 271 | } 272 | "gsampler3D" => if ["sampler3D", "isampler3D", "usampler3D"].contains(&base) { 273 | return true; 274 | } 275 | "gsampler2DArray" => if ["sampler2DArray", "isampler2DArray", "usampler2DArray"].contains(&base) { 276 | return true; 277 | } 278 | _ => {} 279 | } 280 | false 281 | } 282 | } 283 | impl Eq for TypeInfo{} 284 | 285 | pub struct BuiltinTypeInfo { 286 | pub description: String, 287 | pub used_anywhere: bool 288 | } 289 | 290 | macro_rules! builtin_type { 291 | ($name:ident $anywhere:literal $desc:literal) => { 292 | (stringify!($name).to_string(), BuiltinTypeInfo { 293 | description: $desc.to_string(), 294 | used_anywhere: $anywhere 295 | }) 296 | }; 297 | } 298 | 299 | pub fn make_builtin_types() -> HashMap { 300 | HashMap::from([ 301 | builtin_type!(void false "Void datatype, useful only for functions that return nothing."), 302 | builtin_type!(bool true "Boolean datatype, can only contain true or false."), 303 | builtin_type!(bvec2 true "Two-component vector of booleans."), 304 | builtin_type!(bvec3 true "Three-component vector of booleans."), 305 | builtin_type!(bvec4 true "Four-component vector of booleans."), 306 | builtin_type!(int true "Signed scalar integer."), 307 | builtin_type!(ivec2 true "Two-component vector of signed integers."), 308 | builtin_type!(ivec3 true "Three-component vector of signed integers."), 309 | builtin_type!(ivec4 true "Four-component vector of signed integers."), 310 | builtin_type!(uint true "Unsigned scalar integer; can't contain negative numbers."), 311 | builtin_type!(uvec2 true "Two-component vector of unsigned integers."), 312 | builtin_type!(uvec3 true "Three-component vector of unsigned integers."), 313 | builtin_type!(uvec4 true "Four-component vector of unsigned integers."), 314 | builtin_type!(float true "Floating-point scalar."), 315 | builtin_type!(vec2 true "Two-component vector of floating-point values."), 316 | builtin_type!(vec3 true "Three-component vector of floating-point values."), 317 | builtin_type!(vec4 true "Four-component vector of floating-point values."), 318 | builtin_type!(mat2 true "2x2 matrix, in column major order."), 319 | builtin_type!(mat3 true "3x3 matrix, in column major order."), 320 | builtin_type!(mat4 true "4x4 matrix, in column major order."), 321 | builtin_type!(sampler2D false 322 | "Sampler type for binding 2D textures, which are read as float."), 323 | builtin_type!(isampler2D false 324 | "Sampler type for binding 2D textures, which are read as signed integer."), 325 | builtin_type!(usampler2D false 326 | "Sampler type for binding 2D textures, which are read as unsigned integer."), 327 | builtin_type!(sampler2DArray false 328 | "Sampler type for binding 2D texture arrays, which are read as float."), 329 | builtin_type!(isampler2DArray false 330 | "Sampler type for binding 2D texture arrays, which are read as signed integer."), 331 | builtin_type!(usampler2DArray false 332 | "Sampler type for binding 2D texture arrays, which are read as unsigned integer."), 333 | builtin_type!(sampler3D false 334 | "Sampler type for binding 3D textures, which are read as float."), 335 | builtin_type!(isampler3D false 336 | "Sampler type for binding 3D textures, which are read as signed integer."), 337 | builtin_type!(usampler3D false 338 | "Sampler type for binding 3D textures, which are read as unsigned integer."), 339 | builtin_type!(samplerCube false 340 | "Sampler type for binding Cubemaps, which are read as float."), 341 | builtin_type!(samplerCubeArray false 342 | "Sampler type for binding Cubemap arrays, which are read as float."), 343 | ]) 344 | } 345 | 346 | 347 | -------------------------------------------------------------------------------- /src/nodes/expression_node.rs: -------------------------------------------------------------------------------- 1 | use lsp_types::Range; 2 | 3 | use crate::lexer::{Token, TokenError}; 4 | 5 | pub type ExpressionResult = Result; 6 | 7 | #[derive(Clone, Debug)] 8 | pub enum ExpressionNode { 9 | Primitive(Token), 10 | Identifier(Token), 11 | Unary(UnaryNode), 12 | Binary(BinaryNode), 13 | Assignment(AssignmentNode), 14 | Increment(IncrementNode), 15 | Paren(Box), 16 | Conditional(ConditionalNode), 17 | Call(CallNode), 18 | ArrayAccess(ArrayAccessNode), 19 | MemberAccess(MemberAccessNode), 20 | ArrayLiteral(Vec) 21 | } 22 | impl ExpressionNode { 23 | pub fn range(&self) -> Range { 24 | use ExpressionNode::*; 25 | match self { 26 | Primitive(x) => x.range, 27 | Identifier(x) => x.range, 28 | Paren(x) => x.range(), 29 | Unary(x) => 30 | Range::new(x.op.range.start, x.arg.range().end), 31 | Binary(x) => 32 | Range::new(x.left.range().start, x.right.range().end), 33 | Assignment(x) => 34 | Range::new(x.left.range().start, x.right.range().end), 35 | MemberAccess(x) => 36 | Range::new(x.argument.range().start, x.member.range.end), 37 | ArrayAccess(x) => 38 | Range::new(x.argument.range().start, x.index.range().end), 39 | Conditional(x) => 40 | Range::new(x.condition.range().start, x.alternate.range().end), 41 | Increment(x) => match x.is_prefix { 42 | true => Range::new(x.op.range.start, x.arg.range().end), 43 | false => Range::new(x.arg.range().start, x.op.range.end), 44 | } 45 | ArrayLiteral(x) => { 46 | let start = x.first().unwrap().range().start; 47 | let end = x.last().unwrap().range().end; 48 | Range::new(start, end) 49 | } 50 | Call(x) => { 51 | let start = x.identifier.range.start; 52 | match x.args.last() { 53 | Some(last_arg) => 54 | Range::new(start, last_arg.expression.range().end), 55 | _ => x.identifier.range 56 | } 57 | } 58 | } 59 | } 60 | } 61 | 62 | #[derive(Clone, Debug)] 63 | pub struct UnaryNode { 64 | pub op: Token, 65 | pub arg: Box, 66 | } 67 | 68 | #[derive(Clone, Debug)] 69 | pub struct BinaryNode { 70 | pub op: Token, 71 | pub left: Box, 72 | pub right: Box, 73 | } 74 | 75 | #[derive(Clone, Debug)] 76 | pub struct AssignmentNode { 77 | pub op: Token, 78 | pub left: Box, 79 | pub right: Box, 80 | } 81 | 82 | #[derive(Clone, Debug)] 83 | pub struct IncrementNode { 84 | pub op: Token, 85 | pub is_prefix: bool, 86 | pub arg: Box, 87 | } 88 | 89 | #[derive(Clone, Debug)] 90 | pub struct ConditionalNode { 91 | pub condition: Box, 92 | pub action: Box, 93 | pub alternate: Box, 94 | } 95 | 96 | #[derive(Clone, Debug)] 97 | pub struct CallNode { 98 | pub identifier: Token, 99 | pub args: Vec 100 | } 101 | 102 | #[derive(Clone, Debug)] 103 | pub struct CallArgumentNode { 104 | pub qualifier: Option, 105 | pub expression: ExpressionNode 106 | } 107 | 108 | #[derive(Clone, Debug)] 109 | pub struct ArrayAccessNode { 110 | pub argument: Box, 111 | pub index: Box, 112 | } 113 | 114 | #[derive(Clone, Debug)] 115 | pub struct MemberAccessNode { 116 | pub argument: Box, 117 | pub member: Token 118 | } 119 | 120 | -------------------------------------------------------------------------------- /src/nodes/mod.rs: -------------------------------------------------------------------------------- 1 | use lsp_types::Range; 2 | use crate::{lexer::{Token, TokenError}, memory::TypeInfo}; 3 | 4 | mod top_level_nodes; 5 | mod expression_node; 6 | mod statement_node; 7 | pub use top_level_nodes::*; 8 | pub use expression_node::*; 9 | pub use statement_node::*; 10 | 11 | pub type ValueResult = Result; 12 | 13 | #[derive(Clone, Debug)] 14 | pub struct ValueNode { 15 | pub identifier: Token, 16 | pub type_node: TypeNode, 17 | pub range: Range 18 | } 19 | 20 | pub type TypeResult = Result; 21 | 22 | #[derive(Clone, Debug)] 23 | pub struct TypeNode { 24 | pub info: TypeInfo, 25 | pub range: Range 26 | } 27 | 28 | -------------------------------------------------------------------------------- /src/nodes/statement_node.rs: -------------------------------------------------------------------------------- 1 | use lsp_types::Range; 2 | 3 | use crate::lexer::{Token, TokenError}; 4 | use super::{expression_node::ExpressionNode, ValueNode}; 5 | 6 | pub type StatementResult = Result; 7 | 8 | #[derive(Clone, Debug)] 9 | pub enum StatementNode { 10 | VarDeclaration(VarDeclarationNode), 11 | If(IfNode), 12 | While(WhileNode), 13 | For(ForNode), 14 | Switch(SwitchNode), 15 | Expression(Box), 16 | Continue(Token), 17 | Break(Token), 18 | Return(ReturnNode), 19 | Block(BlockNode), 20 | } 21 | 22 | #[derive(Clone, Debug)] 23 | pub struct BlockNode { 24 | pub range: Range, 25 | pub statements: Vec 26 | } 27 | 28 | #[derive(Clone, Debug)] 29 | pub struct VarDeclarationNode { 30 | pub keyword: Option, 31 | pub value: Box, 32 | pub expression: Option>, 33 | pub is_const: bool 34 | } 35 | 36 | #[derive(Clone, Debug)] 37 | pub struct IfNode { 38 | pub keyword: Token, 39 | pub condition: Box, 40 | pub action: Box, 41 | pub alternate: Option 42 | } 43 | 44 | #[derive(Clone, Debug)] 45 | pub struct ElseNode { 46 | pub keyword: Token, 47 | pub action: Box 48 | } 49 | 50 | #[derive(Clone, Debug)] 51 | pub struct WhileNode { 52 | pub keyword: Token, 53 | pub condition: Box, 54 | pub action: Box 55 | } 56 | 57 | #[derive(Clone, Debug)] 58 | pub struct ForNode { 59 | pub keyword: Token, 60 | pub initializer: Box, 61 | pub condition: Box, 62 | pub update: Box, 63 | pub action: Box 64 | } 65 | 66 | #[derive(Clone, Debug)] 67 | pub struct SwitchNode { 68 | pub keyword: Token, 69 | pub condition: Box, 70 | pub cases: Vec 71 | } 72 | 73 | #[derive(Clone, Debug)] 74 | pub struct SwitchCaseNode { 75 | pub keyword: Token, 76 | pub statements: Vec 77 | } 78 | 79 | #[derive(Clone, Debug)] 80 | pub struct ReturnNode { 81 | pub keyword: Token, 82 | pub expression: Option> 83 | } 84 | 85 | 86 | 87 | -------------------------------------------------------------------------------- /src/nodes/top_level_nodes.rs: -------------------------------------------------------------------------------- 1 | use crate::lexer::{Token, TokenError}; 2 | use self::expression_node::ExpressionNode; 3 | 4 | use super::{*, statement_node::BlockNode}; 5 | 6 | pub type TopLevelResult = Result; 7 | 8 | #[derive(Clone, Debug)] 9 | pub enum TopLevelNode { 10 | ShaderType(ShaderTypeNode), 11 | RenderMode(RenderModeNode), 12 | GroupUniforms(GroupUniformsNode), 13 | Const(ConstNode), 14 | Varying(VaryingNode), 15 | Uniform(UniformNode), 16 | Struct(StructNode), 17 | Function(FunctionNode), 18 | Include(IncludeNode) 19 | } 20 | 21 | #[derive(Clone, Debug)] 22 | pub struct IncludeNode{ 23 | pub keyword: Token, 24 | pub path: Token, 25 | } 26 | 27 | #[derive(Clone, Debug)] 28 | pub struct ShaderTypeNode { 29 | pub keyword: Token, 30 | pub shader_type: Token, 31 | } 32 | #[derive(Clone, Debug)] 33 | pub struct RenderModeNode { 34 | pub keyword: Token, 35 | pub render_modes: Vec, 36 | } 37 | #[derive(Clone, Debug)] 38 | pub struct GroupUniformsNode { 39 | pub keyword: Token, 40 | pub group: Option, 41 | pub subgroup: Option 42 | } 43 | #[derive(Clone, Debug)] 44 | pub struct ConstNode { 45 | pub keyword: Token, 46 | pub precision: Option, 47 | pub value: Box, 48 | pub expression: Box, 49 | } 50 | #[derive(Clone, Debug)] 51 | pub struct VaryingNode { 52 | pub keyword: Token, 53 | pub interpolation: Option, 54 | pub precision: Option, 55 | pub value: Box, 56 | } 57 | #[derive(Clone, Debug)] 58 | pub struct UniformNode { 59 | pub global_instance: Option, 60 | pub keyword: Token, 61 | pub precision: Option, 62 | pub value: Box, 63 | pub hint: Option, 64 | pub expression: Option>, 65 | } 66 | #[derive(Clone, Debug)] 67 | pub struct HintNode { 68 | pub identifier: Token, 69 | pub params: Option> 70 | } 71 | #[derive(Clone, Debug)] 72 | pub struct StructNode { 73 | pub keyword: Token, 74 | pub identifier: Token, 75 | pub fields: Vec, 76 | } 77 | #[derive(Clone, Debug)] 78 | pub struct FunctionNode { 79 | pub type_node: TypeNode, 80 | pub identifier: Token, 81 | pub params: Vec, 82 | pub block: BlockNode 83 | } 84 | #[derive(Clone, Debug)] 85 | pub struct ParamNode { 86 | pub qualifier: Option, 87 | pub value_node: ValueNode, 88 | } 89 | 90 | -------------------------------------------------------------------------------- /src/parser/expression.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | pub fn parse_expression( 4 | stream: &mut TokenStream, 5 | ) -> ExpressionResult { 6 | let mut original = parse_expression_priority(stream)?; 7 | loop { 8 | if let Some(op) = parse_binary_operation(stream) { 9 | original = parse_binary_expression(stream, op, original)? 10 | } else if let Some(op) = parse_assignment_operation(stream) { 11 | original = parse_assignment_expression(stream, op, original)? 12 | } else { 13 | break Ok(original) 14 | } 15 | } 16 | } 17 | 18 | pub fn parse_expression_priority( 19 | stream: &mut TokenStream, 20 | ) -> ExpressionResult { 21 | if !stream.parsing_statement { 22 | stream.queue_cursor_element(CompletionElement::Identifier(stream.parsing_const)); 23 | } 24 | 25 | let mut original = if let Some(primitive) = parse_primitive(stream) { 26 | ExpressionNode::Primitive(primitive) 27 | } else if let Some(op) = parse_unary_operation(stream){ 28 | parse_unary_expression(stream, op)? 29 | } else { 30 | let current = stream.current()?; 31 | match current.kind { 32 | Identifier => parse_identifier_expression(stream)?, 33 | LeftParen => parse_parenthetical_expression(stream)?, 34 | LeftBrace => parse_array_literal_expression(stream)?, 35 | Increment | Decrement => parse_increment_expression(stream, None)?, 36 | _ => { 37 | return Err(stream.alert_error("Expected Expression", current.range)) 38 | } 39 | } 40 | }; 41 | loop { 42 | let current = stream.current()?; 43 | match current.kind { 44 | LeftBracket => original = parse_array_access_expression(stream, original)?, 45 | Dot => original = parse_member_access_expression(stream, original)?, 46 | Question => original = parse_conditional_expression(stream, original)?, 47 | Increment | Decrement => 48 | original = parse_increment_expression(stream, Some(original))?, 49 | _ => break Ok(original) 50 | } 51 | } 52 | } 53 | 54 | pub fn parse_identifier_expression(stream: &mut TokenStream) -> ExpressionResult { 55 | let identifier = parse_identifier(stream)?; 56 | match stream.current()?.kind { 57 | LeftParen => parse_call_expression(stream, identifier), 58 | _ => Ok(ExpressionNode::Identifier(identifier)) 59 | } 60 | } 61 | 62 | pub fn parse_call_expression( 63 | stream: &mut TokenStream, 64 | identifier: Token 65 | ) -> ExpressionResult { 66 | stream.advance(); 67 | let args = parse_list( 68 | stream, 69 | Comma, 70 | RightParen, 71 | Trailing::Optional, 72 | |s| parse_expression(s) 73 | .map(|e| CallArgumentNode{qualifier: None, expression: e}) 74 | )?; 75 | stream.advance(); 76 | Ok(ExpressionNode::Call(CallNode { 77 | identifier, 78 | args 79 | })) 80 | } 81 | 82 | pub fn parse_unary_expression(stream: &mut TokenStream, op: Token) -> ExpressionResult { 83 | let arg = Box::new(parse_expression_priority(stream)?); 84 | Ok(ExpressionNode::Unary(UnaryNode{ arg, op })) 85 | } 86 | 87 | pub fn parse_binary_expression( 88 | stream: &mut TokenStream, 89 | op: Token, 90 | original: ExpressionNode 91 | ) -> ExpressionResult { 92 | let right = parse_expression_priority(stream)?; 93 | 94 | Ok(ExpressionNode::Binary(BinaryNode{ 95 | left: Box::new(original), 96 | right: Box::new(right), 97 | op 98 | })) 99 | } 100 | 101 | pub fn parse_assignment_expression( 102 | stream: &mut TokenStream, 103 | op: Token, 104 | original: ExpressionNode 105 | ) -> ExpressionResult { 106 | let right = parse_expression(stream)?; 107 | 108 | Ok(ExpressionNode::Assignment(AssignmentNode{ 109 | left: Box::new(original), 110 | right: Box::new(right), 111 | op 112 | })) 113 | } 114 | 115 | pub fn parse_increment_expression( 116 | stream: &mut TokenStream, 117 | original: Option, 118 | ) -> ExpressionResult { 119 | let op = stream.consume()?; 120 | let (arg, is_prefix) = match original { 121 | Some(arg) => (arg, false), 122 | None => (parse_expression(stream)?, true) 123 | }; 124 | 125 | Ok(ExpressionNode::Increment(IncrementNode{ 126 | op, 127 | is_prefix, 128 | arg: Box::new(arg) 129 | })) 130 | } 131 | 132 | pub fn parse_parenthetical_expression(stream: &mut TokenStream) -> ExpressionResult { 133 | stream.advance(); 134 | let expr = Box::new(parse_expression(stream)?); 135 | parse_kind(stream, RightParen)?; 136 | eprintln!("current: {:?}", stream.cursor_element); 137 | 138 | Ok(ExpressionNode::Paren(expr)) 139 | } 140 | 141 | pub fn parse_conditional_expression( 142 | stream: &mut TokenStream, 143 | originial: ExpressionNode 144 | ) -> ExpressionResult { 145 | stream.advance(); 146 | let action = parse_expression(stream)?; 147 | parse_kind(stream, Colon)?; 148 | let alternate = parse_expression(stream)?; 149 | 150 | Ok(ExpressionNode::Conditional(ConditionalNode{ 151 | condition: Box::new(originial), 152 | action: Box::new(action), 153 | alternate: Box::new(alternate) 154 | })) 155 | } 156 | 157 | pub fn parse_array_access_expression( 158 | stream: &mut TokenStream, 159 | original: ExpressionNode 160 | ) -> ExpressionResult { 161 | stream.advance(); 162 | let index = parse_expression(stream)?; 163 | parse_kind(stream, RightBracket)?; 164 | 165 | Ok(ExpressionNode::ArrayAccess(ArrayAccessNode{ 166 | argument: Box::new(original), 167 | index: Box::new(index) 168 | })) 169 | } 170 | 171 | pub fn parse_member_access_expression( 172 | stream: &mut TokenStream, 173 | original: ExpressionNode 174 | ) -> ExpressionResult { 175 | stream.queue_cursor_element(CompletionElement::Member(Box::new(original.clone()))); 176 | stream.advance(); 177 | let member = parse_identifier(stream)?; 178 | 179 | Ok(ExpressionNode::MemberAccess(MemberAccessNode{ 180 | argument: Box::new(original), 181 | member 182 | })) 183 | } 184 | 185 | 186 | pub fn parse_array_literal_expression(stream: &mut TokenStream) -> ExpressionResult { 187 | stream.advance(); 188 | let vec = parse_list( 189 | stream, 190 | Comma, 191 | RightBrace, 192 | Trailing::Optional, 193 | |s| parse_expression(s) 194 | )?; 195 | 196 | Ok(ExpressionNode::ArrayLiteral(vec)) 197 | } 198 | 199 | 200 | 201 | 202 | -------------------------------------------------------------------------------- /src/parser/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{completion::CompletionElement, lexer::*, memory::TypeInfo, nodes::*}; 2 | use lsp_types::Range; 3 | use TokenKind::*; 4 | 5 | mod top_level_parser; 6 | mod expression; 7 | mod statement; 8 | pub use top_level_parser::*; 9 | pub use expression::*; 10 | pub use statement::*; 11 | 12 | 13 | pub fn parse_kind(stream: &mut TokenStream, kind: TokenKind) -> TokenResult { 14 | stream.consume_token_kind(kind) 15 | } 16 | 17 | pub fn parse_conditional(stream: &mut TokenStream, kind: TokenKind) -> Option { 18 | stream.consume_if(|x| x.kind == kind) 19 | } 20 | 21 | pub fn parse_semicolon(stream: &mut TokenStream) -> TokenResult { 22 | stream.consume_token_kind(Semicolon) 23 | } 24 | 25 | pub fn parse_identifier(stream: &mut TokenStream) -> TokenResult { 26 | stream.consume_token_kind(Identifier) 27 | } 28 | 29 | pub fn parse_primitive(stream: &mut TokenStream) -> Option { 30 | stream.consume_if(|x| match x.kind { 31 | BoolConstant | IntConstant | UintConstant | FloatConstant => true, 32 | _ => false 33 | }) 34 | } 35 | 36 | pub fn parse_qualifier(stream: &mut TokenStream) -> Option { 37 | stream.queue_cursor_element(CompletionElement::FunctionQualifier); 38 | stream.consume_if(|x| match x.kind { 39 | In | Out | InOut => { 40 | true 41 | }, 42 | _ => false 43 | }) 44 | } 45 | 46 | pub fn parse_binary_operation(stream: &mut TokenStream) -> Option { 47 | stream.consume_if(|t| t.to_binary_op().is_some()) 48 | } 49 | 50 | pub fn parse_unary_operation(stream: &mut TokenStream) -> Option { 51 | stream.consume_if(|t| t.to_unary_op().is_some()) 52 | } 53 | 54 | pub fn parse_assignment_operation(stream: &mut TokenStream) -> Option { 55 | stream.consume_if(|t| t.to_assignment_op().is_some()) 56 | } 57 | 58 | pub fn parse_increment_operation(stream: &mut TokenStream) -> Option { 59 | stream.consume_if(|t| match t.kind { 60 | Increment | Decrement => true, 61 | _ => false 62 | }) 63 | } 64 | 65 | pub fn parse_int(stream: &mut TokenStream) -> Result { 66 | let message = "Expected integer constant."; 67 | let current = stream.current()?; 68 | match current.kind { 69 | IntConstant | UintConstant => stream.consume(), 70 | _ => Err(stream.alert_error(message, current.range)) 71 | } 72 | } 73 | 74 | pub fn parse_number(stream: &mut TokenStream) -> Result { 75 | let message = "Expected number"; 76 | let current = stream.current()?; 77 | match current.kind { 78 | IntConstant | UintConstant | FloatConstant => stream.consume(), 79 | _ => Err(stream.alert_error(message, current.range)) 80 | } 81 | } 82 | 83 | pub fn parse_positive_int(stream: &mut TokenStream) -> Option { 84 | let lines = stream.get_source().get_code(); 85 | if stream.current().is_err() { return None } 86 | 87 | let current = stream.current().unwrap(); 88 | match current.kind { 89 | IntConstant => match current.text(lines).parse::() { 90 | Ok(x) => stream.advance_with(Some(x)), 91 | _ => None 92 | }, 93 | UintConstant => { 94 | let text = current.text(lines); 95 | let without_u = &text[..text.len() - 1]; 96 | match without_u.parse::() { 97 | Ok(x) => stream.advance_with(Some(x)), 98 | _ => None 99 | } 100 | } 101 | _ => None 102 | } 103 | } 104 | 105 | pub fn parse_size(stream: &mut TokenStream) -> Result<(u32, Option), TokenError> { 106 | let message = "Expected positive integer constant."; 107 | if stream.consume_if(|x| x.kind == LeftBracket).is_some() { 108 | let range = stream.current()?.range; 109 | let size = match parse_positive_int(stream) { 110 | None => Err(stream.alert_error(message, range)), 111 | Some(x) if x == 0 => Err(stream.alert_error(message, range)), 112 | Some(x) => Ok(x) 113 | }?; 114 | Ok((size, Some(range))) 115 | } else { 116 | Ok((0, None)) 117 | } 118 | } 119 | 120 | pub fn parse_type(stream: &mut TokenStream) -> Result { 121 | let id = parse_identifier(stream)?; 122 | 123 | let (size, size_range) = parse_size(stream)?; 124 | let range = Range::new( 125 | id.range.start, 126 | size_range.map_or(id.range.end, |x| x.end) 127 | ); 128 | let base = id.text(stream.get_source().get_code()); 129 | Ok(TypeNode { info: TypeInfo {base, size}, range }) 130 | 131 | } 132 | 133 | pub fn parse_value_specifier(stream: &mut TokenStream) -> Result { 134 | let mut type_node = parse_type(stream)?; 135 | 136 | stream.queue_cursor_element(CompletionElement::None); 137 | let identifier = parse_identifier(stream)?; 138 | 139 | let (other_size, other_size_range) = parse_size(stream)?; 140 | 141 | let range = Range::new( 142 | type_node.range.start, 143 | other_size_range.map_or(identifier.range.end, |x| x.end) 144 | ); 145 | 146 | if other_size != 0 { 147 | if type_node.info.size != 0 { 148 | let message = "Array size cannot be defined twice."; 149 | return Err(stream.alert_error(message, range)); 150 | } else { 151 | type_node.info.size = other_size; 152 | } 153 | } 154 | 155 | Ok(ValueNode { identifier, type_node, range }) 156 | } 157 | 158 | #[derive(PartialEq, Eq)] 159 | enum Trailing{ 160 | Enforced, 161 | Optional, 162 | None 163 | } 164 | 165 | /// Designed to keep pushing forward as much as possible. If the ending is not 166 | /// reached it will parse the whole document. 167 | pub fn parse_list( 168 | stream: &mut TokenStream, 169 | separator: TokenKind, 170 | stop: TokenKind, 171 | trailing: Trailing, 172 | content: F 173 | ) -> Result, TokenError> 174 | where 175 | F: Fn(&mut TokenStream) -> Result 176 | { 177 | use Trailing::*; 178 | let mut parsing_separator = false; 179 | let mut vec = vec![]; 180 | let bad_stop_err = format!("Unexpected {}", stop.as_ref()); 181 | let sep_error = match trailing { 182 | Optional | None => format!("Expected {} or {}", separator.as_ref(), stop.as_ref()), 183 | Enforced => format!("Expected {}", separator.as_ref()), 184 | }; 185 | 186 | loop { match parsing_separator { 187 | true => match stream.current()? { 188 | t if t.kind == stop => match trailing { 189 | Optional | None => break Ok(vec), 190 | Enforced => break stream.alert_error_with(&sep_error, t.range, Ok(vec)), 191 | } 192 | t if t.kind == separator => parsing_separator = stream.advance_with(false), 193 | t => parsing_separator = stream.alert_error_with(&sep_error, t.range, false), 194 | }, 195 | false => match stream.current()? { 196 | t if t.kind == stop => match trailing { 197 | Optional | Enforced => break Ok(vec), 198 | None => break stream.alert_error_with(&bad_stop_err, t.range, Ok(vec)), 199 | }, 200 | _ => parsing_separator = match content(stream) { 201 | Ok(ok) => {vec.push(ok); true}, 202 | Err(TokenError::EofError) => break Err(TokenError::EofError), 203 | _ => stream.advance_with(true) 204 | } 205 | } 206 | }} 207 | } 208 | 209 | 210 | 211 | -------------------------------------------------------------------------------- /src/parser/statement.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | pub fn parse_block(stream: &mut TokenStream) -> StatementResult { 4 | let left = parse_kind(stream, LeftBrace)?; 5 | stream.queue_cursor_element(CompletionElement::Statement); 6 | let mut statements = vec![]; 7 | loop { 8 | stream.parsing_statement = true; 9 | let current = stream.current()?; 10 | match current.kind { 11 | RightBrace => break, 12 | _ => match parse_statement(stream) { 13 | Ok(statement) => statements.push(statement), 14 | _ => stream.advance() 15 | } 16 | } 17 | } 18 | let right = parse_kind(stream, RightBrace)?; 19 | Ok(StatementNode::Block(BlockNode { 20 | statements, 21 | range: Range::new(left.range.start, right.range.end) 22 | })) 23 | } 24 | 25 | pub fn parse_statement(stream: &mut TokenStream) -> StatementResult { 26 | let current = stream.current()?; 27 | stream.queue_cursor_element(CompletionElement::Statement); 28 | stream.parsing_statement = true; 29 | match current.kind { 30 | Const => parse_var_declaration_statement(stream), 31 | If => parse_if_statement(stream), 32 | While => parse_while_statement(stream), 33 | For => parse_for_statement(stream), 34 | Switch => parse_switch_statement(stream), 35 | Continue => Ok(StatementNode::Continue(stream.consume()?)), 36 | Break => Ok(StatementNode::Break(stream.consume()?)), 37 | Return => parse_return_statement(stream), 38 | Identifier => parse_identifier_statement(stream), 39 | LeftBrace => parse_block(stream), 40 | _ => parse_expression_statement(stream) 41 | } 42 | } 43 | 44 | pub fn parse_expression_statement(stream: &mut TokenStream) -> StatementResult { 45 | let expression = parse_expression(stream)?; 46 | _ = parse_semicolon(stream); 47 | Ok(StatementNode::Expression(Box::new(expression))) 48 | } 49 | 50 | pub fn parse_identifier_statement(stream: &mut TokenStream) -> StatementResult { 51 | let parsing_statement = stream.parsing_statement; 52 | stream.turn_off_errors(); 53 | let old_idx = stream.current_idx(); 54 | let maybe_type = parse_type(stream); 55 | let maybe_name = parse_identifier(stream); 56 | stream.force_change_position(old_idx); 57 | stream.turn_on_errors(); 58 | stream.parsing_statement = parsing_statement; 59 | 60 | match maybe_type.is_ok() && maybe_name.is_ok() { 61 | true => parse_var_declaration_statement(stream), 62 | false => parse_expression_statement(stream) 63 | } 64 | } 65 | 66 | pub fn parse_var_declaration_statement(stream: &mut TokenStream) -> StatementResult { 67 | let (keyword, is_const) = parse_conditional(stream, Const) 68 | .map_or((None, false), |x| (Some(x), true)); 69 | 70 | if is_const { 71 | stream.queue_cursor_element(CompletionElement::Type); 72 | stream.parsing_const = is_const; 73 | } 74 | let value = parse_value_specifier(stream); 75 | stream.parsing_const = false; 76 | let value = value?; 77 | 78 | let expression = parse_conditional(stream, Equal) 79 | .map(|_| parse_expression(stream)) 80 | .transpose()? 81 | .map(|x| Box::new(x)); 82 | _ = parse_semicolon(stream); 83 | 84 | Ok(StatementNode::VarDeclaration(VarDeclarationNode{ 85 | keyword, 86 | value: Box::new(value), 87 | expression, 88 | is_const 89 | })) 90 | } 91 | 92 | pub fn parse_if_statement(stream: &mut TokenStream) -> StatementResult { 93 | let keyword = stream.consume()?; 94 | parse_kind(stream, LeftParen)?; 95 | stream.parsing_statement = false; 96 | let condition = Box::new(parse_expression(stream)?); 97 | parse_kind(stream, RightParen)?; 98 | let action = Box::new(parse_statement(stream)?); 99 | stream.queue_cursor_element(CompletionElement::Identifier(stream.parsing_const)); 100 | let alternate = match parse_conditional(stream, Else) { 101 | Some(token) => Some(ElseNode { 102 | keyword: token, 103 | action: Box::new(parse_statement(stream)?) 104 | }), 105 | None => None 106 | }; 107 | Ok(StatementNode::If(IfNode { 108 | keyword, 109 | condition, 110 | action, 111 | alternate 112 | })) 113 | } 114 | 115 | pub fn parse_while_statement(stream: &mut TokenStream) -> StatementResult { 116 | let keyword = stream.consume()?; 117 | parse_kind(stream, LeftParen)?; 118 | let condition = Box::new(parse_expression(stream)?); 119 | parse_kind(stream, RightParen)?; 120 | let action = Box::new(parse_statement(stream)?); 121 | 122 | Ok(StatementNode::While(WhileNode { 123 | keyword, 124 | condition, 125 | action 126 | })) 127 | } 128 | 129 | pub fn parse_for_statement(stream: &mut TokenStream) -> StatementResult { 130 | let keyword = stream.consume()?; 131 | parse_kind(stream, LeftParen)?; 132 | let initializer = Box::new(parse_statement(stream)?); 133 | let condition = Box::new(parse_expression(stream)?); 134 | _ = parse_semicolon(stream); 135 | let update = Box::new(parse_expression(stream)?); 136 | parse_kind(stream, RightParen)?; 137 | let mut action = parse_statement(stream)?; 138 | action = if let StatementNode::Block(mut block) = action { 139 | block.range.start = keyword.range.start; 140 | StatementNode::Block(block) 141 | } else { 142 | action 143 | }; 144 | 145 | Ok(StatementNode::For(ForNode { 146 | keyword, 147 | initializer, 148 | condition, 149 | update, 150 | action: Box::new(action) 151 | })) 152 | 153 | } 154 | 155 | pub fn parse_switch_statement(stream: &mut TokenStream) -> StatementResult { 156 | let keyword = stream.consume()?; 157 | parse_kind(stream, LeftParen)?; 158 | let condition = Box::new(parse_expression(stream)?); 159 | parse_kind(stream, RightParen)?; 160 | parse_kind(stream, LeftBrace)?; 161 | let mut cases = vec![]; 162 | while stream.current()?.kind != RightBrace { 163 | cases.push(parse_switch_case(stream)?); 164 | } 165 | stream.advance(); 166 | 167 | Ok(StatementNode::Switch(SwitchNode{ 168 | keyword, 169 | condition, 170 | cases 171 | })) 172 | } 173 | 174 | pub fn parse_switch_case(stream: &mut TokenStream) -> Result { 175 | stream.queue_cursor_element(CompletionElement::SwitchCase); 176 | let keyword = match parse_conditional(stream, Default) { 177 | Some(keyword) => keyword, 178 | None => { 179 | let keyword = parse_kind(stream, Case)?; 180 | parse_int(stream)?; 181 | keyword 182 | } 183 | }; 184 | parse_kind(stream, Colon)?; 185 | let mut statements = vec![]; 186 | while ![RightBrace, Case, Default].contains(&stream.current()?.kind) { 187 | statements.push(parse_statement(stream)?) 188 | }; 189 | 190 | Ok(SwitchCaseNode{ 191 | keyword, 192 | statements 193 | }) 194 | } 195 | 196 | pub fn parse_return_statement(stream: &mut TokenStream) -> StatementResult { 197 | let keyword = stream.consume()?; 198 | let expression = match stream.current()?.kind { 199 | Semicolon => None, 200 | _ => Some(Box::new(parse_expression(stream)?)), 201 | }; 202 | _ = parse_semicolon(stream); 203 | Ok(StatementNode::Return(ReturnNode{ 204 | keyword, 205 | expression 206 | })) 207 | } 208 | -------------------------------------------------------------------------------- /src/parser/top_level_parser.rs: -------------------------------------------------------------------------------- 1 | use crate::memory::TypeInfo; 2 | use super::*; 3 | 4 | pub fn parse_top_level( 5 | stream: &mut TokenStream 6 | ) -> Result,TokenError> { 7 | stream.queue_cursor_element(CompletionElement::TopLevelKeyword); 8 | if let Ok(token) = stream.current() { 9 | let result = match token.kind { 10 | ShaderType => parse_shader_type(stream), 11 | RenderMode => parse_render_mode(stream), 12 | Const => parse_const(stream), 13 | Varying => parse_varying(stream), 14 | Uniform => parse_uniform(stream), 15 | Global => parse_uniform(stream), 16 | Instance => parse_uniform(stream), 17 | GroupUniforms => parse_group_uniforms(stream), 18 | Struct => parse_struct(stream), 19 | Void => parse_function(stream), 20 | Identifier => parse_function(stream), 21 | Include => parse_include(stream), 22 | Semicolon => { 23 | stream.advance(); 24 | Err(TokenError::SyntaxError) 25 | } 26 | _ => { 27 | stream.advance(); 28 | Err(TokenError::SyntaxError) 29 | } 30 | }; 31 | match result { 32 | Ok(declaration) => Ok(Some(declaration)), 33 | Err(TokenError::EofError) => { 34 | Ok(None) 35 | }, 36 | _ => Ok(None) 37 | } 38 | } else { 39 | Err(TokenError::EofError) 40 | } 41 | } 42 | 43 | 44 | pub fn parse_shader_type(stream: &mut TokenStream) -> TopLevelResult { 45 | let keyword = stream.consume()?; 46 | 47 | stream.queue_cursor_element(CompletionElement::ShaderType); 48 | let shader_type = parse_identifier(stream)?; 49 | 50 | _ = parse_kind(stream, Semicolon); 51 | Ok(TopLevelNode::ShaderType(ShaderTypeNode { 52 | keyword, 53 | shader_type 54 | })) 55 | } 56 | 57 | pub fn parse_render_mode(stream: &mut TokenStream) -> TopLevelResult { 58 | let keyword = stream.consume()?; 59 | stream.queue_cursor_element(CompletionElement::RenderMode); 60 | let render_modes = parse_list( 61 | stream, 62 | Comma, 63 | Semicolon, 64 | Trailing::None, 65 | |s| parse_identifier(s) 66 | )?; 67 | 68 | _ = parse_kind(stream, Semicolon); 69 | Ok(TopLevelNode::RenderMode(RenderModeNode{ 70 | keyword, 71 | render_modes 72 | })) 73 | } 74 | 75 | pub fn parse_group_uniforms(stream: &mut TokenStream) -> TopLevelResult { 76 | let keyword = stream.consume()?; 77 | stream.queue_cursor_element(CompletionElement::None); 78 | let group = parse_conditional(stream, Identifier); 79 | let subgroup = parse_conditional(stream, Dot) 80 | .map(|_| parse_identifier(stream)) 81 | .transpose()?; 82 | 83 | _ = parse_kind(stream, Semicolon); 84 | Ok(TopLevelNode::GroupUniforms(GroupUniformsNode { 85 | keyword, 86 | group, 87 | subgroup 88 | })) 89 | } 90 | 91 | pub fn parse_const(stream: &mut TokenStream) -> TopLevelResult { 92 | let keyword = stream.consume()?; 93 | 94 | stream.queue_cursor_element(CompletionElement::Precision); 95 | let precision = parse_conditional(stream, Precision); 96 | if precision.is_some() { stream.queue_cursor_element(CompletionElement::Type) } 97 | 98 | let value = parse_value_specifier(stream)?; 99 | parse_kind(stream, TokenKind::Equal)?; 100 | let expression: ExpressionNode = parse_expression(stream)?; // Parse Expression 101 | 102 | _ = parse_kind(stream, Semicolon); 103 | Ok(TopLevelNode::Const(ConstNode { 104 | keyword, 105 | precision, 106 | value: Box::new(value), 107 | expression: Box::new(expression) 108 | })) 109 | } 110 | 111 | pub fn parse_varying(stream: &mut TokenStream) -> TopLevelResult { 112 | let keyword = stream.consume()?; 113 | 114 | stream.queue_cursor_element(CompletionElement::Interpolation); 115 | let interpolation = parse_conditional(stream, Interpolation); 116 | if interpolation.is_some() { stream.queue_cursor_element(CompletionElement::Precision) } 117 | let precision = parse_conditional(stream, Precision); 118 | if precision.is_some() { stream.queue_cursor_element(CompletionElement::Type) } 119 | 120 | let value = parse_value_specifier(stream)?; 121 | 122 | _ = parse_kind(stream, Semicolon); 123 | Ok(TopLevelNode::Varying(VaryingNode { 124 | keyword, 125 | interpolation, 126 | precision, 127 | value: Box::new(value) 128 | })) 129 | } 130 | 131 | pub fn parse_uniform(stream: &mut TokenStream) -> TopLevelResult { 132 | let (global_instance, keyword) = match parse_conditional(stream, Uniform) { 133 | Some(x) => (None, x), 134 | None => { 135 | let global_instance = stream.consume_if(|x| [Global, Instance].contains(&x.kind)); 136 | stream.queue_cursor_element(CompletionElement::Uniform); 137 | let keyword = stream.consume()?; 138 | if keyword.kind != Uniform { 139 | let err = stream.alert_error("Expected 'Uniform' keyword", keyword.range); 140 | return Err(err); 141 | } 142 | (global_instance, keyword) 143 | } 144 | }; 145 | stream.queue_cursor_element(CompletionElement::Precision); 146 | let precision = parse_conditional(stream, Precision); 147 | 148 | if precision.is_some() { stream.queue_cursor_element(CompletionElement::Type) } 149 | 150 | let value = parse_value_specifier(stream)?; 151 | stream.queue_cursor_element(CompletionElement::Hint(value.type_node.info.clone())); 152 | let hint = if parse_conditional(stream, Colon).is_some() { 153 | stream.queue_cursor_element(CompletionElement::Hint(value.type_node.info.clone())); 154 | let identifier = parse_identifier(stream)?; 155 | let params = if parse_conditional(stream, LeftParen).is_some() { 156 | let list = parse_list( 157 | stream, 158 | Comma, 159 | RightParen, 160 | Trailing::None, 161 | |s| parse_number(s) 162 | )?; 163 | stream.advance(); 164 | Some(list) 165 | } else { None }; 166 | Some(HintNode{ identifier, params}) 167 | } else { None }; 168 | 169 | let expression = parse_conditional(stream, Equal) 170 | .map(|_| parse_expression(stream)) 171 | .transpose()?; 172 | 173 | _ = parse_kind(stream, Semicolon); 174 | Ok(TopLevelNode::Uniform(UniformNode{ 175 | global_instance, 176 | keyword, 177 | precision, 178 | value: Box::new(value), 179 | hint, 180 | expression: expression.map(|x| Box::new(x)) 181 | })) 182 | 183 | } 184 | 185 | pub fn parse_struct(stream: &mut TokenStream) -> TopLevelResult { 186 | let keyword = stream.consume()?; 187 | stream.queue_cursor_element(CompletionElement::None); 188 | let identifier = parse_identifier(stream)?; 189 | 190 | stream.queue_cursor_element(CompletionElement::Type); 191 | parse_kind(stream, LeftBrace)?; 192 | let fields = parse_list( 193 | stream, 194 | Semicolon, 195 | RightBrace, 196 | Trailing::Enforced, 197 | |s| { 198 | s.queue_cursor_element(CompletionElement::Type); 199 | parse_value_specifier(s) 200 | } 201 | )?; 202 | parse_kind(stream, RightBrace)?; 203 | 204 | _ = parse_kind(stream, Semicolon); 205 | Ok(TopLevelNode::Struct(StructNode{ 206 | keyword, 207 | identifier, 208 | fields 209 | })) 210 | } 211 | 212 | pub fn parse_function_arg(stream: &mut TokenStream) -> Result { 213 | let qualifier = parse_qualifier(stream); 214 | let value_node = parse_value_specifier(stream)?; 215 | Ok(ParamNode { 216 | qualifier, 217 | value_node 218 | }) 219 | } 220 | 221 | pub fn parse_function(stream: &mut TokenStream) -> TopLevelResult { 222 | stream.queue_cursor_element(CompletionElement::TopLevelKeyword); 223 | let mut is_void = false; 224 | let type_node = match parse_conditional(stream, Void) { 225 | Some(x) => { 226 | is_void = true; 227 | let text = x.text(stream.get_source().get_code()); 228 | TypeNode { info: TypeInfo::from_str(&text), range: x.range} 229 | } 230 | None => parse_type(stream)?, 231 | }; 232 | if is_void { 233 | stream.queue_cursor_element(CompletionElement::FunctionName); 234 | } else { 235 | stream.queue_cursor_element(CompletionElement::None); 236 | } 237 | let identifier = parse_identifier(stream)?; 238 | 239 | parse_kind(stream, LeftParen)?; 240 | let params = parse_list( 241 | stream, 242 | Comma, 243 | RightParen, 244 | Trailing::Optional, 245 | |s| parse_function_arg(s) 246 | )?; 247 | 248 | parse_kind(stream, RightParen)?; 249 | let block = match parse_block(stream)? { 250 | StatementNode::Block(block) => block, 251 | _ => unreachable!() 252 | }; 253 | stream.parsing_statement = true; 254 | Ok(TopLevelNode::Function(FunctionNode{ 255 | type_node, 256 | identifier, 257 | params, 258 | block 259 | })) 260 | } 261 | 262 | pub fn parse_include(stream: &mut TokenStream) -> TopLevelResult { 263 | let keyword = parse_kind(stream, Include)?; 264 | 265 | stream.queue_cursor_element(CompletionElement::IncludeString); 266 | let path = stream.consume()?; 267 | if path.kind != String { 268 | return Err(stream.alert_error("Expected String", path.range)); 269 | } 270 | // let path = parse_kind(stream, String)?; 271 | 272 | Ok(TopLevelNode::Include(IncludeNode{ 273 | keyword, 274 | path 275 | })) 276 | } 277 | 278 | 279 | 280 | 281 | -------------------------------------------------------------------------------- /src/source_code.rs: -------------------------------------------------------------------------------- 1 | use lsp_server::{Connection, Message, Notification}; 2 | use lsp_types::*; 3 | 4 | use crate::{get_byte_offset_from_position, lexer::TokenError}; 5 | 6 | pub struct SourceDocument { 7 | code: String, 8 | diagnostics: Vec 9 | } 10 | impl SourceDocument { 11 | pub fn new(source: &str) -> Self { 12 | let diagnostics = vec![]; 13 | Self {diagnostics, code: source.to_string() } 14 | } 15 | 16 | pub fn push_error(&mut self, msg: &str, range: Range, error: T) -> T{ 17 | self.diagnostics.push(create_diagnostic(range, msg)); 18 | error 19 | } 20 | 21 | pub fn push_eof_error(&mut self, range: Range) -> TokenError { 22 | self.diagnostics.push(create_diagnostic(range, "Unexpected EOF")); 23 | TokenError::EofError 24 | } 25 | 26 | pub fn force_change_diagnostics(&mut self, diagnostics: Vec) { 27 | self.diagnostics = diagnostics; 28 | } 29 | 30 | pub fn get_diagnostics(&self) -> &Vec { 31 | &self.diagnostics 32 | } 33 | 34 | pub fn get_code(&self) -> &String { 35 | &self.code 36 | } 37 | 38 | pub fn add_diagnostics(&mut self, diagnostics: Vec) { 39 | self.diagnostics.extend(diagnostics) 40 | } 41 | 42 | pub fn apply_change(&mut self, change: TextDocumentContentChangeEvent) { 43 | if let Some(range) = change.range { 44 | let start_byte = get_byte_offset_from_position(&self.code, range.start); 45 | let end_byte = get_byte_offset_from_position(&self.code, range.end); 46 | let new_text = &change.text; 47 | if start_byte <= self.code.len() && end_byte <= self.code.len() && start_byte <= end_byte { 48 | self.code.replace_range(start_byte..end_byte, new_text); 49 | } 50 | } 51 | } 52 | } 53 | 54 | fn create_unused_warning(range: Range, id_name: &str) -> Diagnostic { 55 | Diagnostic { 56 | range, 57 | severity: Some(DiagnosticSeverity::WARNING), 58 | code: None, 59 | code_description: None, 60 | source: Some("GDShaderServer".to_string()), 61 | message: format!("'{}' is unused.", id_name), 62 | related_information: None, 63 | tags: Some(vec![DiagnosticTag::UNNECESSARY]), 64 | data: None, 65 | } 66 | } 67 | 68 | fn create_diagnostic(range: Range, message: &str) -> Diagnostic { 69 | Diagnostic { 70 | range, 71 | severity: Some(DiagnosticSeverity::ERROR), 72 | code: None, 73 | code_description: None, 74 | source: Some("GDShaderServer".to_string()), 75 | message: message.to_string(), 76 | related_information: None, 77 | tags: None, 78 | data: None, 79 | } 80 | } 81 | 82 | pub fn send_errors(connection: &Connection, uri: &Url, diagnostics: Vec) { 83 | let params = PublishDiagnosticsParams { 84 | uri: uri.clone(), 85 | diagnostics: diagnostics.clone(), 86 | version: None, 87 | }; 88 | 89 | connection.sender.send(Message::Notification(Notification { 90 | method: "textDocument/publishDiagnostics".to_string(), 91 | params: serde_json::to_value(params).unwrap(), 92 | })).unwrap(); 93 | } 94 | 95 | -------------------------------------------------------------------------------- /test.gdshader: -------------------------------------------------------------------------------- 1 | void f() { 2 | } 3 | -------------------------------------------------------------------------------- /yorb.gdshaderinc: -------------------------------------------------------------------------------- 1 | void fragment() { 2 | vec4 x(); 3 | } --------------------------------------------------------------------------------