├── .gitattributes ├── .github └── workflows │ ├── extension.yml │ ├── lsif.yml │ ├── release.yml │ └── server.yml ├── .gitignore ├── .rustfmt.toml ├── .vscode ├── launch.json ├── settings.json └── tasks.json ├── .vscodeignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── client ├── package-lock.json ├── package.json ├── rollup.config.js ├── src │ ├── commands.ts │ ├── extension.ts │ ├── log.ts │ ├── lspClient.ts │ ├── lspExt.ts │ ├── net.ts │ └── persistent_state.ts └── tsconfig.json ├── logo-min.png ├── logo-mini.png ├── logo.png ├── package-lock.json ├── package.json ├── server ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── Makefile ├── logging │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── logging_macro │ ├── Cargo.toml │ └── src │ │ └── lib.rs └── main │ ├── Cargo.toml │ ├── src │ ├── commands │ │ ├── graph_dot.rs │ │ ├── merged_includes.rs │ │ ├── mod.rs │ │ └── parse_tree.rs │ ├── configuration.rs │ ├── consts.rs │ ├── dfs.rs │ ├── diagnostics_parser.rs │ ├── graph.rs │ ├── linemap.rs │ ├── lsp_ext.rs │ ├── main.rs │ ├── merge_views.rs │ ├── navigation.rs │ ├── opengl.rs │ ├── source_mapper.rs │ ├── test.rs │ └── url_norm.rs │ └── testdata │ ├── 01 │ ├── common.glsl │ ├── final.fsh │ └── final.fsh.merge │ ├── 02 │ ├── final.fsh │ ├── final.fsh.merge │ └── utils │ │ ├── burger.glsl │ │ ├── sample.glsl │ │ └── test.glsl │ ├── 03 │ ├── final.fsh │ ├── final.fsh.merge │ └── utils │ │ ├── burger.glsl │ │ ├── sample.glsl │ │ └── test.glsl │ ├── 04 │ ├── final.fsh │ ├── final.fsh.merge │ ├── lib │ │ └── matrices.glsl │ └── utils │ │ ├── stuff1.glsl │ │ ├── stuff2.glsl │ │ └── utilities.glsl │ ├── 05 │ ├── common.glsl │ ├── final.fsh │ ├── final.fsh.merge │ └── test │ │ ├── banana.glsl │ │ └── burger.glsl │ └── 06 │ ├── final.fsh │ ├── final.fsh.merge │ └── test.glsl ├── shaders.py └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | * text eol=lf 2 | *.png binary -------------------------------------------------------------------------------- /.github/workflows/extension.yml: -------------------------------------------------------------------------------- 1 | name: Build Extension 2 | on: 3 | push: 4 | branches: [ rust-rewrite ] 5 | pull_request: 6 | branches: [ rust-rewrite ] 7 | jobs: 8 | build-vscode-extension: 9 | runs-on: ubuntu-20.04 10 | steps: 11 | - uses: actions/checkout@v2 12 | - run: npm i 13 | - uses: HaaLeo/publish-vscode-extension@v0 14 | id: vsce_build 15 | with: 16 | pat: 'sample text' 17 | dryRun: true 18 | - uses: actions/upload-artifact@v2 19 | with: 20 | name: vscode-mc-shader.vsix 21 | path: ${{ steps.vsce_build.outputs.vsixPath }} -------------------------------------------------------------------------------- /.github/workflows/lsif.yml: -------------------------------------------------------------------------------- 1 | name: LSIF 2 | on: 3 | - push 4 | jobs: 5 | index: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v2 9 | - name: Generate LSIF data 10 | uses: sourcegraph/lsif-rust-action@main 11 | - name: Upload LSIF data 12 | uses: sourcegraph/lsif-upload-action@master 13 | with: 14 | github_token: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Create Github Release 2 | on: 3 | push: 4 | tags: [ "*" ] 5 | env: 6 | CARGO_TERM_COLOR: always 7 | jobs: 8 | empty-release: 9 | runs-on: ubuntu-20.04 10 | outputs: 11 | upload_url: ${{ steps.create_release.outputs.upload_url }} 12 | steps: 13 | - name: Get tag 14 | id: tag 15 | run: echo "::set-output name=version::${GITHUB_REF/refs\/tags\//}" 16 | - name: Create Release 17 | id: create_release 18 | uses: actions/create-release@v1 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | with: 22 | tag_name: ${{ steps.tag.outputs.version }} 23 | release_name: Release ${{ steps.tag.outputs.version }} 24 | draft: false 25 | prerelease: false 26 | release-server: 27 | runs-on: ${{ matrix.platforms.os }} 28 | defaults: 29 | run: 30 | working-directory: server 31 | needs: empty-release 32 | strategy: 33 | matrix: 34 | platforms: 35 | - os: ubuntu-18.04 36 | target: x86_64-unknown-linux-gnu 37 | dir: server/mcshader-lsp 38 | artifact: x86_64-unknown-linux-gnu 39 | - os: windows-latest 40 | target: x86_64-pc-windows-msvc 41 | dir: server/mcshader-lsp.exe 42 | artifact: x86_64-windows-msvc.exe 43 | - os: macos-11 44 | target: x86_64-apple-darwin 45 | dir: server/mcshader-lsp 46 | artifact: x86_64-apple-darwin 47 | - os: macos-11 48 | target: aarch64-apple-darwin 49 | dir: server/mcshader-lsp 50 | artifact: aarch64-apple-darwin 51 | steps: 52 | - uses: actions/checkout@v2 53 | - name: Install latest nightly 54 | uses: actions-rs/toolchain@v1 55 | with: 56 | toolchain: nightly 57 | default: true 58 | target: ${{ matrix.platforms.target }} 59 | override: true 60 | - name: Build server 61 | run: cargo build --release --target ${{ matrix.platforms.target }} --out-dir . -Z unstable-options 62 | - name: Upload release file 63 | uses: actions/upload-release-asset@v1 64 | env: 65 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 66 | with: 67 | upload_url: ${{ needs.empty-release.outputs.upload_url }} 68 | asset_path: ${{ matrix.platforms.dir }} 69 | asset_name: mcshader-lsp-${{ matrix.platforms.artifact }} 70 | asset_content_type: application/octet-stream 71 | release-vscode-extension: 72 | runs-on: ubuntu-20.04 73 | needs: [release-server, empty-release] 74 | steps: 75 | - uses: actions/checkout@v2 76 | - run: npm i 77 | - uses: HaaLeo/publish-vscode-extension@v0 78 | id: vsce_release 79 | with: 80 | pat: ${{ secrets.VSCE_TOKEN }} 81 | registryUrl: https://marketplace.visualstudio.com 82 | - name: Upload VSIX file 83 | uses: actions/upload-release-asset@v1 84 | env: 85 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 86 | with: 87 | upload_url: ${{ needs.empty-release.outputs.upload_url }} 88 | asset_path: ${{ steps.vsce_release.outputs.vsixPath }} 89 | asset_name: vscode-mc-shader.vsix 90 | asset_content_type: application/vsix -------------------------------------------------------------------------------- /.github/workflows/server.yml: -------------------------------------------------------------------------------- 1 | name: Test Server 2 | on: 3 | push: 4 | branches: [ rust-rewrite ] 5 | pull_request: 6 | branches: [ rust-rewrite ] 7 | env: 8 | CARGO_TERM_COLOR: always 9 | jobs: 10 | build-and-test: 11 | runs-on: ${{ matrix.platforms.os }} 12 | defaults: 13 | run: 14 | working-directory: server 15 | strategy: 16 | matrix: 17 | platforms: 18 | - os: ubuntu-18.04 19 | target: x86_64-unknown-linux-gnu 20 | dir: server/mcshader-lsp 21 | artifact: x86_64-unknown-linux-gnu 22 | - os: windows-latest 23 | target: x86_64-pc-windows-msvc 24 | dir: server/mcshader-lsp.exe 25 | artifact: x86_64-windows-msvc.exe 26 | - os: macos-11 27 | target: x86_64-apple-darwin 28 | dir: server/mcshader-lsp 29 | artifact: x86_64-apple-darwin 30 | - os: macos-11 31 | target: aarch64-apple-darwin 32 | dir: server/mcshader-lsp 33 | artifact: aarch64-apple-darwin 34 | steps: 35 | - uses: actions/checkout@v2 36 | - name: Install latest nightly 37 | uses: actions-rs/toolchain@v1 38 | with: 39 | toolchain: nightly 40 | default: true 41 | target: ${{ matrix.platforms.target }} 42 | override: true 43 | - name: Build server 44 | run: cargo build --target ${{ matrix.platforms.target }} --out-dir . -Z unstable-options 45 | - uses: actions/upload-artifact@v2 46 | with: 47 | name: mcshader-lsp-${{ matrix.platforms.artifact }} 48 | path: ${{ matrix.platforms.dir }} 49 | - name: Run tests 50 | run: cargo test --target ${{ matrix.platforms.target }} 51 | if: ${{ matrix.platforms.target != 'aarch64-apple-darwin' }} 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | out 3 | *.out 4 | *.txt 5 | glslangValidator 6 | *.vsix 7 | graph.dot -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2021" 2 | fn_args_layout = "compressed" 3 | max_width = 140 -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | // A launch configuration that compiles the extension and then opens it inside a new window 2 | { 3 | "version": "0.2.0", 4 | "configurations": [ 5 | { 6 | "type": "extensionHost", 7 | "request": "launch", 8 | "name": "Launch Client", 9 | "runtimeExecutable": "${execPath}", 10 | "env": { 11 | "MCSHADER_DEBUG": "true" 12 | }, 13 | "args": ["--extensionDevelopmentPath=${workspaceRoot}"], 14 | "outFiles": ["${workspaceRoot}/client/out/**/*.js"], 15 | "preLaunchTask": { 16 | "type": "npm", 17 | "script": "watch" 18 | } 19 | } 20 | ] 21 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "out": false 4 | }, 5 | "npm.exclude": [ 6 | "**/client", 7 | "**/server" 8 | ], 9 | "search.exclude": { 10 | "out": true 11 | }, 12 | "yaml.schemas": { 13 | "https://json.schemastore.org/github-workflow": [".github/*"] 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=733558 3 | // for the documentation about the tasks.json format 4 | "version": "2.0.0", 5 | "tasks": [ 6 | { 7 | "type": "npm", 8 | "script": "fix", 9 | "group": { 10 | "kind": "build", 11 | "isDefault": true 12 | }, 13 | "presentation": { 14 | "echo": false, 15 | "reveal": "silent", 16 | "focus": false, 17 | "panel": "shared" 18 | } 19 | }, 20 | { 21 | "type": "npm", 22 | "script": "compile", 23 | "group": "build", 24 | "presentation": { 25 | "panel": "dedicated", 26 | "reveal": "never" 27 | }, 28 | "problemMatcher": [ 29 | "$tsc" 30 | ] 31 | }, 32 | { 33 | "type": "npm", 34 | "script": "watch", 35 | "isBackground": true, 36 | "group": { 37 | "kind": "build", 38 | "isDefault": true 39 | }, 40 | "presentation": { 41 | "panel": "dedicated", 42 | "reveal": "never" 43 | }, 44 | "problemMatcher": { 45 | "pattern": { 46 | "regexp": "(\\[Finished running. Exit status: 101\\])|(Found [1-9]\\d* errors. Watching for file changes.)" 47 | }, 48 | "background": { 49 | "activeOnStart": true, 50 | "beginsPattern": "[Running 'cargo build']", 51 | "endsPattern": "(Watching for file changes)|(\\[Finished running. Exit status: 0\\])" 52 | } 53 | } 54 | } 55 | ] 56 | } -------------------------------------------------------------------------------- /.vscodeignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | **/*.ts 3 | server/ 4 | **/*.js.map 5 | .vscode 6 | *.dot 7 | *.py 8 | .gitignore 9 | **/*.yml 10 | **/*.txt -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to the "vscode-mc-shader" extension will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) 6 | 7 | ## [0.9.9] 8 | 9 | ### Added 10 | 11 | - Support for mod world folders, outside the standard world{-1,0,1}. 12 | - Support for compute shader files ending in \_a to \_z. 13 | 14 | ### Fixed 15 | 16 | - Crash when running with eglot as LSP client. 17 | - Extension icon client not displaying (encoding issue). 18 | 19 | ## [0.9.8] 20 | 21 | ### Fixed 22 | 23 | - NVIDIA diagnostics line offset off-by-one due to confusion with erroneous (non-proper) GLSL files resulting in both -1 and -2 offsets appearing to be valid when only the former is. 24 | - Non-toplevel files being treated as toplevel files when they have .fsh/.vsh/etc and not imported into a valid toplevel file. 25 | - Fix issue in the depth-first-search iterator when a file is imported twice into another file with a different include in between. 26 | 27 | ## [0.9.7] 28 | 29 | ### Fixed 30 | 31 | - Fixed bad release tag format 32 | - Fixed extension silently failing on activation 33 | 34 | ## [0.9.6] 35 | 36 | ### Added 37 | 38 | - MacOS M1 binary releases 39 | - AMD OpenGL driver diagnostics output support. AMD linting is a-go 🚀 40 | - Tree-sitter based go-to-definition/find-references/document symbols. Currently disabled until stabilized 41 | 42 | ### Fixed 43 | 44 | - Another `#include` merging bug when a file is imported twice into another file at different lines 45 | 46 | ## [0.9.5] 47 | 48 | ### Added 49 | 50 | - Filesystem watcher reads custom defined file associations 51 | 52 | ### Fixed 53 | 54 | - Fixed `#include` merging for when file is merged twice that would normally be `#ifdef` guarded. Please see commit message of [551380a](https://github.com/Strum355/mcshader-lsp/commit/551380a6ed00709287460b7d8c88e7803956052c) for detailed explanation 55 | 56 | ## [0.9.4] 57 | 58 | ### Fixed 59 | 60 | - `#include` merging when project consists of files with both CRLF and LF files 61 | - Out-of-tree shader files are not linted or added to the dependency graph 62 | - Client no longer attempts to bootstrap server when `MCSHADER_DEBUG=true` 63 | 64 | ## [0.9.3] 65 | 66 | ### Fixed 67 | 68 | - Language server download for windows 69 | 70 | ## [0.9.2] 71 | 72 | ### Changed 73 | 74 | - VSCode extension activation predicate to only when `shaders` folder exists at top level 75 | 76 | ### Added 77 | 78 | - Additional client-side logging 79 | 80 | ## [0.9.1] 81 | 82 | ### Fixed 83 | 84 | - Windows support in client not adding `.exe` to language server path 85 | - Binary release CI 86 | 87 | ## [0.9.0] 88 | 89 | ### Changed 90 | 91 | - Replaced in-process Typescript language server with Rust based language server 92 | 93 | ### Fixed 94 | 95 | - Due to the above, `#include` directive handling is vastly improved 96 | 97 | ### Added 98 | 99 | - Command to view read-only document representing a top-level file with all includes merged 100 | - Command to generate a DOT graph file of the entire project 101 | - Command to restart language server 102 | 103 | ### Removed 104 | 105 | - `glslangValidatorPath` and `shaderpacksPath` config settings 106 | 107 | ## [0.8.5] 108 | 109 | ### Fixed 110 | 111 | - Fixed for latest VSCode version 112 | 113 | ### Removed 114 | 115 | - Filters from 0.8.4 116 | 117 | ## [0.8.4] 118 | 119 | ### Fixed 120 | 121 | - Filtering out `global const initializers must be constant`. "Something something non-standard shader extensions that GPU developers implicitly enable" - Dethraid 122 | 123 | ## [0.8.3] 124 | 125 | ### Fixed 126 | 127 | - Filtering out gpu_shader4 in code 128 | 129 | ## [0.8.2] 130 | 131 | ### Added 132 | 133 | - Support for #include directives 134 | - Basic linting with highlighting with error propogation to all known parents of an include. 135 | - Support for .fsh, .vsh, .glsl and .gsh files. 136 | - Incomplete completion items 137 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Want to contribute? I sure want you to! Heres how you can help: 4 | 5 | ## Setting up environment 6 | 7 | Development requirements (did I miss any? Submit a PR!): 8 | 9 | `Git`, `NPM`, `NodeJS`, `VSCode`, `Rust` 10 | 11 | Fork the repo and clone it (you are using [SSH keys](https://help.github.com/articles/connecting-to-github-with-ssh/), right?): 12 | 13 | `git clone git@github.com:/vscode-mc-shader.git` 14 | 15 | Install dependencies: 16 | 17 | `cd vscode-mc-shader/server && npm i && cd ../client && npm i` 18 | 19 | Follow [this](https://code.visualstudio.com/docs/extensions/overview) link to learn your way around making extensions as well as [here](https://code.visualstudio.com/docs/extensions/example-language-server) to learn a bit about the Language Server Protocol. 20 | 21 | To test out your changes, choose `Launch Client` in the debug menu or press `F5`. 22 | 23 | ## Submitting a Pull Request 24 | 25 | Please adhere to the following guidelines before submitting a pull request: 26 | 27 | - Passes eslint checks with the given config. 28 | - Provide some comments in the code where applicable. 29 | - Provide a good explanation of the changes provided. This helps me follow your code better. 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Noah Santschi-Cooney 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Minecraft GLSL Shaders Language Server 2 | ## mcshader-lsp 3 | 4 | [![Marketplace Version](https://img.shields.io/visual-studio-marketplace/v/strum355.vscode-mc-shader.svg)](https://marketplace.visualstudio.com/items?itemName=strum355.vscode-mc-shader) [![Installs](https://img.shields.io/visual-studio-marketplace/i/strum355.vscode-mc-shader.svg)](https://marketplace.visualstudio.com/items?itemName=strum355.vscode-mc-shader) 5 | [![license](https://img.shields.io/github/license/Strum355/vscode-mc-shader.svg)](https://github.com/Strum355/mcshader-lsp) 6 | [![Issues](https://img.shields.io/github/issues-raw/Strum355/mcshader-lsp.svg)](https://github.com/Strum355/mcshader-lsp/issues) 7 | [![Build Status](https://img.shields.io/drone/build/Strum355/mcshader-lsp)](https://cloud.drone.io/Strum355/mcshader-lsp) 8 | 9 | mcshader-lsp is a [Language Server](https://langserver.org/) and collection of editor extensions for developing Minecraft GLSL Shaders for [Optifine](http://optifine.net). It currently provides linting and syntax highlighting. 10 | 11 | Currently supported editors: 12 | 13 | - [Visual Studio Code](https://code.visualstudio.com/) with `vscode-mc-shader` 14 | 15 | 16 | 17 | ## Features 18 | 19 | - Linting 20 | - Syntax highlighting 21 | - Support for `#include` directives 22 | - Displaying `#include` flattened file 23 | - Generating Graphviz DOT `#include` dependency graph 24 | 25 | 26 | ## Requirements 27 | 28 | - That you've only one shader folder open. Multiple workspaces aren't currently supported. 29 | - The root folder of the workspace is the parent folder of `shaders` folder. 30 | 31 | 36 | 37 | ## Contributing 38 | 39 | Please see [CONTRIBUTING.md](https://github.com/Strum355/mcshader-lsp/blob/master/CONTRIBUTING.md). 40 | 41 | ## Planned 42 | 43 | - Multi-workspaces (currently only one is supported and using multiple is very undefined behaviour) 44 | - Warnings for unused uniforms/varyings 45 | - Lint for all #define value combinations 46 | - Compute shader support 47 | - Some cool `DRAWBUFFERS` stuff 48 | 49 | Got a feature request? Chuck it into an Issue! 50 | 51 | ## Known Issues 52 | 53 | Check the issues on Github [here](https://github.com/Strum355/mcshader-lsp/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc+label%3Abug). 54 | 55 | ## Release Notes 56 | 57 | Check [CHANGELOG.md](https://github.com/Strum355/mcshader-lsp/blob/master/CHANGELOG.md). 58 | 59 | ## License 60 | 61 | This code is released under the [MIT License](https://github.com/Strum355/mcshader-lsp/blob/master/LICENSE). Copyright (c) 2021 Noah Santschi-Cooney 62 | -------------------------------------------------------------------------------- /client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "vscode-mc-shader-client", 3 | "scripts": { 4 | "compile": "tsc -p ./", 5 | "rollup": "rollup -c" 6 | }, 7 | "dependencies": { 8 | "@rollup/plugin-json": "^4.1.0", 9 | "adm-zip": "^0.5.9", 10 | "encoding": "^0.1.13", 11 | "node-fetch": "^2.6.7", 12 | "vscode-languageclient": "^6.1.4" 13 | }, 14 | "devDependencies": { 15 | "@rollup/plugin-commonjs": "^21.0.2", 16 | "@rollup/plugin-node-resolve": "^13.1.3", 17 | "@types/adm-zip": "^0.4.34", 18 | "@types/node-fetch": "^2.6.1", 19 | "@types/vscode": "^1.65.0", 20 | "rollup": "^2.70.1" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /client/rollup.config.js: -------------------------------------------------------------------------------- 1 | import resolve from '@rollup/plugin-node-resolve'; 2 | import commonjs from '@rollup/plugin-commonjs'; 3 | import json from '@rollup/plugin-json'; 4 | import nodeBuiltins from 'builtin-modules'; 5 | 6 | /** @type { import('rollup').RollupOptions } */ 7 | export default { 8 | input: 'out/extension.js', 9 | plugins: [ 10 | json(), 11 | resolve({ 12 | preferBuiltins: true 13 | }), 14 | commonjs() 15 | ], 16 | external: [...nodeBuiltins, 'vscode'], 17 | output: { 18 | file: './out/extension.js', 19 | format: 'cjs' 20 | } 21 | }; -------------------------------------------------------------------------------- /client/src/commands.ts: -------------------------------------------------------------------------------- 1 | import path = require('path') 2 | import * as vscode from 'vscode' 3 | import * as lsp from 'vscode-languageclient' 4 | import { Extension } from './extension' 5 | import { log } from './log' 6 | 7 | export type Command = (...args: any[]) => unknown 8 | 9 | export function generateGraphDot(e: Extension): Command { 10 | return async () => { 11 | await e.lspClient.sendRequest(lsp.ExecuteCommandRequest.type.method, { 12 | command: 'graphDot', 13 | arguments: [vscode.workspace.workspaceFolders[0].uri.path], 14 | }) 15 | } 16 | } 17 | 18 | export function restartExtension(e: Extension): Command { 19 | return async () => { 20 | vscode.window.showInformationMessage('Reloading Minecraft GLSL language server...') 21 | await e.deactivate() 22 | await e.activate(e.context).catch(log.error) 23 | } 24 | } 25 | 26 | export function virtualMergedDocument(e: Extension): Command { 27 | const getVirtualDocument = async (path: string): Promise => { 28 | let content: string = '' 29 | try { 30 | content = await e.lspClient.sendRequest(lsp.ExecuteCommandRequest.type.method, { 31 | command: 'virtualMerge', 32 | arguments: [path] 33 | }) 34 | } catch (e) { } 35 | 36 | return content 37 | } 38 | 39 | const docProvider = new class implements vscode.TextDocumentContentProvider { 40 | onDidChangeEmitter = new vscode.EventEmitter() 41 | onDidChange = this.onDidChangeEmitter.event 42 | 43 | provideTextDocumentContent(uri: vscode.Uri, __: vscode.CancellationToken): vscode.ProviderResult { 44 | return getVirtualDocument(uri.path.replace('.flattened' + path.extname(uri.path), path.extname(uri.path))) 45 | } 46 | } 47 | 48 | e.context.subscriptions.push(vscode.workspace.registerTextDocumentContentProvider('mcglsl', docProvider)) 49 | 50 | return async () => { 51 | if (vscode.window.activeTextEditor.document.languageId != 'glsl') return 52 | 53 | const uri = vscode.window.activeTextEditor.document.uri.path 54 | .substring(0, vscode.window.activeTextEditor.document.uri.path.lastIndexOf('.')) 55 | + '.flattened.' 56 | + vscode.window.activeTextEditor.document.uri.path 57 | .slice(vscode.window.activeTextEditor.document.uri.path.lastIndexOf('.') + 1) 58 | const path = vscode.Uri.parse(`mcglsl:${uri}`) 59 | 60 | const doc = await vscode.workspace.openTextDocument(path) 61 | docProvider.onDidChangeEmitter.fire(path) 62 | await vscode.window.showTextDocument(doc, { 63 | viewColumn: vscode.ViewColumn.Two, 64 | preview: true 65 | }) 66 | } 67 | } 68 | 69 | export function parseTree(e: Extension): Command { 70 | const getVirtualDocument = async (path: string): Promise => { 71 | let content: string = '' 72 | try { 73 | content = await e.lspClient.sendRequest(lsp.ExecuteCommandRequest.type.method, { 74 | command: 'parseTree', 75 | arguments: [path] 76 | }) 77 | } catch (e) { } 78 | 79 | return content 80 | } 81 | 82 | const docProvider = new class implements vscode.TextDocumentContentProvider { 83 | onDidChangeEmitter = new vscode.EventEmitter() 84 | onDidChange = this.onDidChangeEmitter.event 85 | 86 | provideTextDocumentContent(uri: vscode.Uri, _: vscode.CancellationToken): vscode.ProviderResult { 87 | if (uri.path.includes('.flattened.')) return '' 88 | return getVirtualDocument(uri.path.substring(0, uri.path.lastIndexOf('.'))) 89 | } 90 | } 91 | 92 | e.context.subscriptions.push(vscode.workspace.registerTextDocumentContentProvider('mcglsl', docProvider)) 93 | 94 | return async () => { 95 | if (vscode.window.activeTextEditor.document.languageId != 'glsl') return 96 | 97 | const uri = vscode.window.activeTextEditor.document.uri 98 | const path = vscode.Uri.parse(`mcglsl:${uri.path}.ast`) 99 | 100 | const doc = await vscode.workspace.openTextDocument(path) 101 | docProvider.onDidChangeEmitter.fire(path) 102 | await vscode.window.showTextDocument(doc, { 103 | viewColumn: vscode.ViewColumn.Two, 104 | preview: true 105 | }) 106 | } 107 | } -------------------------------------------------------------------------------- /client/src/extension.ts: -------------------------------------------------------------------------------- 1 | import { mkdirSync, promises as fs } from 'fs' 2 | import * as vscode from 'vscode' 3 | import * as lsp from 'vscode-languageclient' 4 | import * as commands from './commands' 5 | import { log } from './log' 6 | import { LanguageClient } from './lspClient' 7 | import { download, getReleaseInfo } from './net' 8 | import { PersistentState } from './persistent_state' 9 | import * as path from 'path' 10 | 11 | const platforms: { [key: string]: string } = { 12 | 'x64 win32': 'x86_64-windows-msvc', 13 | 'x64 linux': 'x86_64-unknown-linux-gnu', 14 | 'x64 darwin': 'x86_64-apple-darwin', 15 | 'arm64 darwin': 'aarch64-apple-darwin' 16 | } 17 | 18 | export class Extension { 19 | private statusBarItem: vscode.StatusBarItem | null = null 20 | private extensionContext: vscode.ExtensionContext | null = null 21 | private client: lsp.LanguageClient 22 | private state: PersistentState 23 | 24 | readonly extensionID = 'strum355.vscode-mc-shader' 25 | 26 | readonly package: { 27 | version: string 28 | } = vscode.extensions.getExtension(this.extensionID)!.packageJSON 29 | 30 | public get context(): vscode.ExtensionContext { 31 | return this.extensionContext 32 | } 33 | 34 | public get lspClient(): lsp.LanguageClient { 35 | return this.client 36 | } 37 | 38 | public activate = async (context: vscode.ExtensionContext) => { 39 | this.extensionContext = context 40 | this.state = new PersistentState(context.globalState) 41 | 42 | if (!process.env['MCSHADER_DEBUG'] && !(vscode.workspace.getConfiguration('mcglsl').get('skipBootstrap') as boolean)) { 43 | await this.bootstrap() 44 | } else { 45 | log.info('skipping language server bootstrap') 46 | } 47 | 48 | this.registerCommand('graphDot', commands.generateGraphDot) 49 | this.registerCommand('restart', commands.restartExtension) 50 | this.registerCommand('virtualMerge', commands.virtualMergedDocument) 51 | this.registerCommand('parseTree', commands.parseTree) 52 | 53 | log.info('starting language server...') 54 | 55 | const lspBinary = process.env['MCSHADER_DEBUG'] ? 56 | this.context.asAbsolutePath(path.join('server', 'target', 'debug', 'mcshader-lsp')) + 57 | (process.platform === 'win32' ? '.exe' : '') : 58 | path.join(this.context.globalStorageUri.fsPath, 'mcshader-lsp') 59 | 60 | const filewatcherGlob = this.fileAssociationsToGlob(this.getGLSLFileAssociations()) 61 | 62 | this.client = await new LanguageClient(this, lspBinary, filewatcherGlob).startServer() 63 | 64 | log.info('language server started!') 65 | } 66 | 67 | fileAssociationsToGlob = (associations: string[]): string => { 68 | return '**/*.{'.concat( 69 | associations.map(s => s.substring(s.indexOf('.'))).join(',') 70 | ) + '}' 71 | } 72 | 73 | getGLSLFileAssociations = (): string[] => { 74 | const exts = ['.fsh', '.vsh', '.gsh', '.glsl'] 75 | const associations = vscode.workspace.getConfiguration('files').get('associations') as { [key: string]: string } 76 | 77 | Object.keys(associations).forEach((key) => { 78 | if (associations[key] === 'glsl') { 79 | exts.push(key.substring(key.indexOf('*') + 1)) 80 | } 81 | }) 82 | 83 | return exts 84 | } 85 | 86 | registerCommand = (name: string, f: (e: Extension) => commands.Command) => { 87 | const cmd = f(this) 88 | this.context.subscriptions.push(vscode.commands.registerCommand('mcglsl.' + name, cmd)) 89 | } 90 | 91 | deactivate = async () => { 92 | await this.lspClient.stop() 93 | while (this.context.subscriptions.length > 0) { 94 | this.context.subscriptions.pop()?.dispose() 95 | } 96 | } 97 | 98 | public updateStatus = (icon: string, text: string) => { 99 | this.statusBarItem?.dispose() 100 | this.statusBarItem = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left) 101 | this.statusBarItem.text = icon + ' [mc-shader] ' + text 102 | this.statusBarItem.show() 103 | this.context.subscriptions.push(this.statusBarItem) 104 | } 105 | 106 | public clearStatus = () => { 107 | this.statusBarItem?.dispose() 108 | } 109 | 110 | private bootstrap = async () => { 111 | mkdirSync(this.extensionContext.globalStoragePath, { recursive: true }) 112 | 113 | const dest = path.join(this.extensionContext.globalStoragePath, 'mcshader-lsp' + (process.platform === 'win32' ? '.exe' : '')) 114 | const exists = await fs.stat(dest).then(() => true, () => false) 115 | if (!exists) await this.state.updateServerVersion(undefined) 116 | 117 | const release = await getReleaseInfo(this.package.version) 118 | log.info('got release info from Github:\n\t', JSON.stringify(release)) 119 | 120 | const platform = platforms[`${process.arch} ${process.platform}`] 121 | if (platform === undefined) { 122 | vscode.window.showErrorMessage('Unfortunately we don\'t ship binaries for your platform yet.') 123 | log.warn(`incompatible architecture/platform:\n\t${process.arch} ${process.platform}`) 124 | return 125 | } 126 | 127 | if (release.tag_name === this.state.serverVersion) { 128 | log.info('server version is same as extension:\n\t', this.state.serverVersion) 129 | return 130 | } 131 | 132 | const artifact = release.assets.find(artifact => artifact.name === `mcshader-lsp-${platform}${(process.platform === 'win32' ? '.exe' : '')}`) 133 | 134 | log.info(`artifact with url ${artifact.browser_download_url} found`) 135 | 136 | const userResponse = await vscode.window.showInformationMessage( 137 | this.state.serverVersion == undefined ? 138 | `Language server version ${this.package.version} is not installed.` : 139 | `An update is available. Upgrade from ${this.state.serverVersion} to ${release.tag_name}?`, 140 | 'Download now' 141 | ) 142 | if (userResponse !== 'Download now') { 143 | log.info('user chose not to download server...') 144 | return 145 | } 146 | 147 | await download(artifact.browser_download_url, dest) 148 | 149 | this.state.updateServerVersion(release.tag_name) 150 | } 151 | } 152 | 153 | export const activate = async (context: vscode.ExtensionContext) => { 154 | try { 155 | new Extension().activate(context) 156 | } catch (e) { 157 | log.error(`failed to activate extension: ${e}`) 158 | throw(e) 159 | } 160 | } -------------------------------------------------------------------------------- /client/src/log.ts: -------------------------------------------------------------------------------- 1 | import { inspect } from 'util' 2 | import * as vscode from 'vscode' 3 | 4 | export const lspOutputChannel = vscode.window.createOutputChannel('Minecraft Shaders Language Server') 5 | 6 | // from rust-analyzer https://github.com/rust-analyzer/rust-analyzer/blob/ef223b9e6439c228e0be49861efd2067c0b22af4/editors/code/src/util.ts 7 | export const log = new class { 8 | readonly output = vscode.window.createOutputChannel('Minecraft Shaders'); 9 | 10 | // Hint: the type [T, ...T[]] means a non-empty array 11 | debug(...msg: [unknown, ...unknown[]]): void { 12 | log.write('DEBUG', ...msg) 13 | } 14 | 15 | info(...msg: [unknown, ...unknown[]]): void { 16 | log.write('INFO ', ...msg) 17 | } 18 | 19 | warn(...msg: [unknown, ...unknown[]]): void { 20 | log.write('WARN ', ...msg) 21 | } 22 | 23 | error(...msg: [unknown, ...unknown[]]): void { 24 | log.write('ERROR', ...msg) 25 | } 26 | 27 | write(label: string, ...messageParts: unknown[]): void { 28 | const message = messageParts.map(log.stringify).join(' ') 29 | const dateTime = new Date().toLocaleString() 30 | log.output.appendLine(`${label} [${dateTime}]: ${message}`) 31 | } 32 | 33 | private stringify(val: unknown): string { 34 | if (typeof val === 'string') return val 35 | return inspect(val, { 36 | colors: false, 37 | depth: 6, // heuristic 38 | }) 39 | } 40 | } 41 | 42 | -------------------------------------------------------------------------------- /client/src/lspClient.ts: -------------------------------------------------------------------------------- 1 | import { ConfigurationTarget, workspace } from 'vscode' 2 | import * as lsp from 'vscode-languageclient' 3 | import { Extension } from './extension' 4 | import { log, lspOutputChannel } from './log' 5 | import { ConfigUpdateParams, statusMethod, StatusParams, updateConfigMethod } from './lspExt' 6 | 7 | export class LanguageClient extends lsp.LanguageClient { 8 | private extension: Extension 9 | 10 | constructor(ext: Extension, lspBinary: string, filewatcherGlob: string) { 11 | super('vscode-mc-shader', 'VSCode MC Shader', { 12 | command: lspBinary 13 | }, { 14 | documentSelector: [{ scheme: 'file', language: 'glsl' }], 15 | outputChannel: lspOutputChannel, 16 | synchronize: { 17 | configurationSection: 'mcglsl', 18 | fileEvents: workspace.createFileSystemWatcher(filewatcherGlob) 19 | }, 20 | }) 21 | this.extension = ext 22 | 23 | log.info('server receiving events for file glob:\n\t', filewatcherGlob) 24 | log.info('running with binary at path:\n\t', lspBinary) 25 | } 26 | 27 | public startServer = async (): Promise => { 28 | this.extension.context.subscriptions.push(this.start()) 29 | 30 | await this.onReady() 31 | 32 | this.onNotification(updateConfigMethod, this.onUpdateConfig) 33 | this.onNotification(statusMethod, this.onStatusChange) 34 | 35 | return this 36 | } 37 | 38 | onStatusChange = (params: StatusParams) => { 39 | switch (params.status) { 40 | case 'loading': 41 | case 'ready': 42 | case 'failed': 43 | this.extension.updateStatus(params.icon, params.message) 44 | break 45 | case 'clear': 46 | this.extension.clearStatus() 47 | break 48 | } 49 | } 50 | 51 | onUpdateConfig = (params: ConfigUpdateParams) => { 52 | for (const kv of params.kv) { 53 | workspace.getConfiguration().update('mcglsl.' + kv.key, kv.value, ConfigurationTarget.Global) 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /client/src/lspExt.ts: -------------------------------------------------------------------------------- 1 | import * as lsp from 'vscode-languageclient' 2 | 3 | export type StatusParams = { 4 | status: 'loading' | 'ready' | 'failed' | 'clear' 5 | message: string 6 | icon: string 7 | } 8 | 9 | export const statusMethod = 'mc-glsl/status' 10 | export const status = new lsp.NotificationType(statusMethod) 11 | 12 | export const updateConfigMethod = 'mc-glsl/updateConfig' 13 | 14 | export type ConfigUpdateParams = { 15 | kv: { key: string, value: string }[] 16 | } -------------------------------------------------------------------------------- /client/src/net.ts: -------------------------------------------------------------------------------- 1 | import { log } from './log' 2 | import fetch from 'node-fetch' 3 | import * as vscode from 'vscode' 4 | import * as stream from 'stream' 5 | import * as fs from 'fs' 6 | import * as util from 'util' 7 | 8 | const pipeline = util.promisify(stream.pipeline) 9 | 10 | interface GithubRelease { 11 | tag_name: string; 12 | assets: Array<{ 13 | name: string; 14 | browser_download_url: string; 15 | }>; 16 | } 17 | 18 | export async function getReleaseInfo(releaseTag: string): Promise { 19 | log.info('fetching release info for tag', releaseTag) 20 | const response = await fetch(`https://api.github.com/repos/strum355/mcshader-lsp/releases/tags/${releaseTag}`, { 21 | headers: { Accept: 'application/vnd.github.v3+json' } 22 | }) 23 | 24 | const isRelease = (obj: unknown): obj is GithubRelease => { 25 | return obj != null && typeof obj === 'object' 26 | && typeof (obj as GithubRelease).tag_name === 'string' 27 | && Array.isArray((obj as GithubRelease).assets) 28 | && (obj as GithubRelease).assets.every((a) => typeof a.name === 'string' && typeof a.browser_download_url === 'string') 29 | } 30 | 31 | const json = await response.json() 32 | if (!isRelease(json)) { 33 | throw new TypeError(`Received malformed request from Github Release API ${JSON.stringify(json)}`) 34 | } 35 | return json 36 | } 37 | 38 | export async function download(url: string, downloadDest: string) { 39 | await vscode.window.withProgress( 40 | { 41 | location: vscode.ProgressLocation.Notification, 42 | cancellable: false, 43 | title: `Downloading ${url}` 44 | }, 45 | async (progress, _) => { 46 | let lastPercentage = 0 47 | await downloadFile(url, downloadDest, (readBytes, totalBytes) => { 48 | const newPercentage = Math.round((readBytes / totalBytes) * 100) 49 | if (newPercentage !== lastPercentage) { 50 | progress.report({ 51 | message: `${newPercentage.toFixed(0)}%`, 52 | increment: newPercentage - lastPercentage 53 | }) 54 | 55 | lastPercentage = newPercentage 56 | } 57 | }) 58 | } 59 | ) 60 | } 61 | 62 | async function downloadFile( 63 | url: string, 64 | destFilePath: fs.PathLike, 65 | onProgress: (readBytes: number, totalBytes: number) => void 66 | ): Promise { 67 | const res = await fetch(url) 68 | if (!res.ok) { 69 | log.error(res.status, 'while downloading file from', url) 70 | log.error({ body: await res.text(), headers: res.headers }) 71 | throw new Error(`Got response ${res.status} when trying to download ${url}.`) 72 | } 73 | 74 | const totalBytes = Number(res.headers.get('content-length')) 75 | 76 | log.debug('downloading file with', totalBytes, 'bytes size from', url, 'to', destFilePath) 77 | 78 | let readBytes = 0 79 | res.body.on('data', (chunk: Buffer) => { 80 | readBytes += chunk.length 81 | onProgress(readBytes, totalBytes) 82 | }) 83 | 84 | const destFileStream = fs.createWriteStream(destFilePath, { mode: 0o755 }) 85 | 86 | await pipeline(res.body, destFileStream) 87 | 88 | // Don't apply the workaround in fixed versions of nodejs, since the process 89 | // freezes on them, the process waits for no-longer emitted `close` event. 90 | // The fix was applied in commit 7eed9d6bcc in v13.11.0 91 | // See the nodejs changelog: 92 | // https://github.com/nodejs/node/blob/master/doc/changelogs/CHANGELOG_V13.md 93 | const [, major, minor] = /v(\d+)\.(\d+)\.(\d+)/.exec(process.version)! 94 | if (+major > 13 || (+major === 13 && +minor >= 11)) return 95 | 96 | await new Promise(resolve => { 97 | destFileStream.on('close', resolve) 98 | destFileStream.destroy() 99 | // This workaround is awaiting to be removed when vscode moves to newer nodejs version: 100 | // https://github.com/rust-analyzer/rust-analyzer/issues/3167 101 | }) 102 | } -------------------------------------------------------------------------------- /client/src/persistent_state.ts: -------------------------------------------------------------------------------- 1 | import { Memento } from 'vscode' 2 | import { log } from './log' 3 | 4 | export class PersistentState { 5 | constructor(private readonly state: Memento) { 6 | const { serverVersion } = this 7 | log.info('working with state', { serverVersion }) 8 | } 9 | 10 | public get serverVersion(): string | undefined { 11 | return this.state.get('serverVersion') 12 | } 13 | 14 | async updateServerVersion(value: string | undefined) { 15 | await this.state.update('serverVersion', value) 16 | } 17 | } -------------------------------------------------------------------------------- /client/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es6", 5 | "outDir": "out", 6 | "rootDir": "src", 7 | "lib": [ "es6" ], 8 | "sourceMap": true 9 | }, 10 | "include": [ 11 | "src" 12 | ], 13 | "exclude": [ 14 | "node_modules" 15 | ] 16 | } -------------------------------------------------------------------------------- /logo-min.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strum355/mcshader-lsp/85cbb6d81e9e70aadaf94bd2b8eb9a8c54818269/logo-min.png -------------------------------------------------------------------------------- /logo-mini.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strum355/mcshader-lsp/85cbb6d81e9e70aadaf94bd2b8eb9a8c54818269/logo-mini.png -------------------------------------------------------------------------------- /logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Strum355/mcshader-lsp/85cbb6d81e9e70aadaf94bd2b8eb9a8c54818269/logo.png -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "vscode-mc-shader", 3 | "displayName": "Minecraft GLSL Shaders", 4 | "description": "A Visual Studio Code extension for linting/etc Minecraft GLSL Shaders", 5 | "version": "0.9.9", 6 | "publisher": "Strum355", 7 | "author": "Noah Santschi-Cooney (Strum355)", 8 | "license": "MIT", 9 | "icon": "logo-min.png", 10 | "repository": { 11 | "url": "https://github.com/Strum355/mcshader-lsp" 12 | }, 13 | "engines": { 14 | "vscode": "^1.53.0" 15 | }, 16 | "categories": [ 17 | "Linters", 18 | "Programming Languages" 19 | ], 20 | "activationEvents": [ 21 | "onLanguage:glsl", 22 | "workspaceContains:shaders/" 23 | ], 24 | "extensionDependencies": [ 25 | "slevesque.shader" 26 | ], 27 | "main": "./client/out/extension", 28 | "contributes": { 29 | "commands": [ 30 | { 31 | "command": "mcglsl.graphDot", 32 | "title": "Generate Graphviz DOT dependency graph", 33 | "category": "Minecraft Shader" 34 | }, 35 | { 36 | "command": "mcglsl.restart", 37 | "title": "Restart Language Server", 38 | "category": "Minecraft Shader" 39 | }, 40 | { 41 | "command": "mcglsl.virtualMerge", 42 | "title": "Show flattened file", 43 | "category": "Minecraft Shader" 44 | }, 45 | { 46 | "command": "mcglsl.parseTree", 47 | "title": "Show parse tree for file", 48 | "category": "Minecraft Shader" 49 | } 50 | ], 51 | "languages": [ 52 | { 53 | "id": "glsl", 54 | "aliases": [ 55 | "OpenGL Shading Language" 56 | ], 57 | "extensions": [ 58 | ".fsh", 59 | ".vsh", 60 | ".gsh", 61 | ".glsl" 62 | ] 63 | } 64 | ], 65 | "configuration": { 66 | "title": "Minecraft GLSL Shaders", 67 | "properties": { 68 | "mcglsl.skipBootstrap": { 69 | "type": "boolean", 70 | "default": false, 71 | "description": "[DEBUG] Enable to skip bootstrapping the language server binary from Github. Set this to use a manually provided language server binary." 72 | }, 73 | "mcglsl.logLevel": { 74 | "type": "string", 75 | "default": "info", 76 | "enum": ["trace", "debug", "info", "warn", "error"], 77 | "description": "Change the log level of the language server. This change happens live and does not require a restart." 78 | } 79 | } 80 | } 81 | }, 82 | "scripts": { 83 | "vscode:prepublish": "npm run compile && cd client && npm run rollup", 84 | "compile": "tsc -b", 85 | "package": "vsce package -o vscode-mc-shader.vsix", 86 | "watch": "concurrently \"tsc -b -w\" \"cd server && cargo watch -x build\"", 87 | "postinstall": "cd client && npm install", 88 | "lint": "eslint 'client/**/*.ts' --max-warnings 1", 89 | "fix": "eslint 'client/**/*.ts' --fix" 90 | }, 91 | "devDependencies": { 92 | "@types/node": "^17.0.21", 93 | "@typescript-eslint/parser": "^5.15.0", 94 | "concurrently": "^7.0.0", 95 | "eslint": "^8.11.0", 96 | "typescript": "^4.6.2", 97 | "vsce": "^2.7.0" 98 | }, 99 | "eslintConfig": { 100 | "parser": "@typescript-eslint/parser", 101 | "parserOptions": { 102 | "ecmaVersion": 2020, 103 | "sourceType": "module" 104 | }, 105 | "rules": { 106 | "semi": [ 107 | "warn", 108 | "never" 109 | ], 110 | "quotes": [ 111 | "warn", 112 | "single" 113 | ] 114 | } 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /server/.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | sample.log -------------------------------------------------------------------------------- /server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "main", 4 | "logging", 5 | "logging_macro" 6 | ] -------------------------------------------------------------------------------- /server/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: watchtest build 2 | 3 | watchtest: 4 | RUST_BACKTRACE=0 cargo watch -x test -i Makefile 5 | 6 | test: 7 | RUST_LIB_BACKTRACE=0 RUST_BACKTRACE=0 cargo test -- --nocapture --color always 8 | 9 | build: 10 | cargo build 11 | 12 | build-release: 13 | cargo build --release -------------------------------------------------------------------------------- /server/logging/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "logging" 3 | version = "0.9.9" 4 | authors = ["Noah Santschi-Cooney "] 5 | edition = "2021" 6 | 7 | [dependencies] 8 | slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] } 9 | slog-term = "2.9" 10 | slog-scope = "4.4" 11 | slog-atomic = "3.1" 12 | rand = "0.8" 13 | lazy_static = "1.4" -------------------------------------------------------------------------------- /server/logging/src/lib.rs: -------------------------------------------------------------------------------- 1 | use rand::{rngs, Rng}; 2 | use slog::slog_o; 3 | use slog_scope::GlobalLoggerGuard; 4 | use slog_term::{FullFormat, PlainSyncDecorator}; 5 | use std::{cell::RefCell, sync::Arc}; 6 | 7 | use std::io::Stderr; 8 | 9 | use lazy_static::lazy_static; 10 | use slog::*; 11 | use slog_atomic::*; 12 | 13 | fn new_trace_id() -> String { 14 | let rng = CURRENT_RNG.with(|rng| rng.borrow_mut().gen::<[u8; 4]>()); 15 | return format!("{:04x}", u32::from_be_bytes(rng)); 16 | } 17 | 18 | pub fn slog_with_trace_id(f: F) { 19 | slog_scope::scope(&slog_scope::logger().new(slog_o!("trace" => new_trace_id())), f) 20 | } 21 | 22 | pub fn set_logger_with_level(level: Level) -> GlobalLoggerGuard { 23 | let drain = Arc::new(logger_base(level).fuse()); 24 | DRAIN_SWITCH.ctrl().set(drain.clone()); 25 | slog_scope::set_global_logger(Logger::root(drain, o!())) 26 | } 27 | 28 | fn logger_base(level: Level) -> LevelFilter>>> { 29 | let plain = slog_term::PlainSyncDecorator::new(std::io::stderr()); 30 | let drain = slog_term::FullFormat::new(plain).build().fuse(); 31 | drain.filter_level(level) 32 | } 33 | 34 | thread_local! { 35 | static CURRENT_RNG: RefCell = RefCell::new(rngs::ThreadRng::default()); 36 | } 37 | 38 | lazy_static! { 39 | static ref DRAIN_SWITCH: AtomicSwitch<()> = { 40 | let logger = logger_base(Level::Info).fuse(); 41 | AtomicSwitch::new(logger) 42 | }; 43 | } 44 | -------------------------------------------------------------------------------- /server/logging_macro/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "logging_macro" 3 | version = "0.9.9" 4 | authors = ["Noah Santschi-Cooney "] 5 | edition = "2021" 6 | 7 | [lib] 8 | proc-macro = true 9 | 10 | [dependencies] 11 | quote = "1.0" 12 | syn = { version = "1.0", features = [ "full" ] } -------------------------------------------------------------------------------- /server/logging_macro/src/lib.rs: -------------------------------------------------------------------------------- 1 | use proc_macro::TokenStream; 2 | use quote::quote; 3 | use syn::{parse_macro_input, parse_quote, ItemFn}; 4 | 5 | #[proc_macro_attribute] 6 | pub fn log_scope(_args: TokenStream, function: TokenStream) -> TokenStream { 7 | let mut function = parse_macro_input!(function as ItemFn); 8 | 9 | let function_name = function.sig.ident.to_string(); 10 | 11 | let stmts = function.block.stmts; 12 | 13 | function.block = Box::new(parse_quote!({ 14 | use slog::{slog_o, FnValue, Level}; 15 | use std::thread::current; 16 | 17 | let _guard = logging::set_logger_with_level(Level::Trace); 18 | slog_scope::scope(&slog_scope::logger().new(slog_o!("test_name" => #function_name, "thread_num" => FnValue(|_| format!("{:?}", current().id())))), || { 19 | #(#stmts)* 20 | }); 21 | })); 22 | 23 | TokenStream::from(quote!(#function)) 24 | } 25 | -------------------------------------------------------------------------------- /server/main/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mcshader-lsp" 3 | version = "0.9.9" 4 | authors = ["Noah Santschi-Cooney "] 5 | edition = "2021" 6 | 7 | [dependencies] 8 | rust_lsp = { git = "https://github.com/Strum355/RustLSP", branch = "master" } 9 | serde_json = "1.0" 10 | serde = "1.0" 11 | walkdir = "2.3" 12 | petgraph = "0.6" 13 | lazy_static = "1.4" 14 | regex = "1.4" 15 | url = "2.2" 16 | percent-encoding = "2.1" 17 | anyhow = "1.0" 18 | thiserror = "1.0" 19 | glutin = "0.28" 20 | gl = "0.14" 21 | mockall = "0.11" 22 | path-slash = "0.1" 23 | slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] } 24 | slog-scope = "4.4" 25 | once_cell = "1.7" 26 | tree-sitter = "0.20.6" 27 | tree-sitter-glsl = "0.1.2" 28 | logging = { path = "../logging" } 29 | logging_macro = { path = "../logging_macro" } 30 | 31 | [dev-dependencies] 32 | tempdir = "0.3" 33 | fs_extra = "1.2" 34 | hamcrest2 = "*" 35 | pretty_assertions = "1.1" -------------------------------------------------------------------------------- /server/main/src/commands/graph_dot.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::fs::OpenOptions; 3 | use std::io::prelude::*; 4 | use std::path::Path; 5 | use std::rc::Rc; 6 | 7 | use petgraph::dot::Config; 8 | use serde_json::Value; 9 | 10 | use petgraph::dot; 11 | 12 | use anyhow::{format_err, Result}; 13 | use slog_scope::info; 14 | 15 | use crate::graph::CachedStableGraph; 16 | 17 | use super::Invokeable; 18 | 19 | pub struct GraphDotCommand { 20 | pub graph: Rc>, 21 | } 22 | 23 | impl Invokeable for GraphDotCommand { 24 | fn run_command(&self, root: &Path, _: &[Value]) -> Result { 25 | let filepath = root.join("graph.dot"); 26 | 27 | info!("generating dot file"; "path" => filepath.as_os_str().to_str()); 28 | 29 | let mut file = OpenOptions::new().truncate(true).write(true).create(true).open(filepath).unwrap(); 30 | 31 | let mut write_data_closure = || -> Result<(), std::io::Error> { 32 | let graph = self.graph.as_ref(); 33 | 34 | file.seek(std::io::SeekFrom::Start(0))?; 35 | file.write_all("digraph {\n\tgraph [splines=ortho]\n\tnode [shape=box]\n".as_bytes())?; 36 | file.write_all( 37 | dot::Dot::with_config(&graph.borrow().graph, &[Config::GraphContentOnly]) 38 | .to_string() 39 | .as_bytes(), 40 | )?; 41 | file.write_all("\n}".as_bytes())?; 42 | file.flush()?; 43 | file.seek(std::io::SeekFrom::Start(0))?; 44 | Ok(()) 45 | }; 46 | 47 | match write_data_closure() { 48 | Err(err) => Err(format_err!("error generating graphviz data: {}", err)), 49 | _ => Ok(Value::Null), 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /server/main/src/commands/merged_includes.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::rc::Rc; 3 | use std::{ 4 | collections::HashMap, 5 | path::{Path, PathBuf}, 6 | }; 7 | 8 | use serde_json::Value; 9 | 10 | use petgraph::graph::NodeIndex; 11 | 12 | use anyhow::{format_err, Result}; 13 | 14 | use std::fs; 15 | 16 | use crate::dfs; 17 | use crate::merge_views::FilialTuple; 18 | use crate::source_mapper::SourceMapper; 19 | use crate::{graph::CachedStableGraph, merge_views, url_norm::FromJson}; 20 | 21 | use super::Invokeable; 22 | 23 | pub struct VirtualMergedDocument { 24 | pub graph: Rc>, 25 | } 26 | 27 | impl VirtualMergedDocument { 28 | // TODO: DUPLICATE CODE 29 | fn get_file_toplevel_ancestors(&self, uri: &Path) -> Result>> { 30 | let curr_node = match self.graph.borrow_mut().find_node(uri) { 31 | Some(n) => n, 32 | None => return Err(format_err!("node not found {:?}", uri)), 33 | }; 34 | let roots = self.graph.borrow().collect_root_ancestors(curr_node); 35 | if roots.is_empty() { 36 | return Ok(None); 37 | } 38 | Ok(Some(roots)) 39 | } 40 | 41 | pub fn get_dfs_for_node(&self, root: NodeIndex) -> Result, dfs::error::CycleError> { 42 | let graph_ref = self.graph.borrow(); 43 | 44 | let dfs = dfs::Dfs::new(&graph_ref, root); 45 | 46 | dfs.collect::, _>>() 47 | } 48 | 49 | pub fn load_sources(&self, nodes: &[FilialTuple]) -> Result> { 50 | let mut sources = HashMap::new(); 51 | 52 | for node in nodes { 53 | let graph = self.graph.borrow(); 54 | let path = graph.get_node(node.child); 55 | 56 | if sources.contains_key(&path) { 57 | continue; 58 | } 59 | 60 | let source = match fs::read_to_string(&path) { 61 | Ok(s) => s, 62 | Err(e) => return Err(format_err!("error reading {:?}: {}", path, e)), 63 | }; 64 | let source = source.replace("\r\n", "\n"); 65 | sources.insert(path.clone(), source); 66 | } 67 | 68 | Ok(sources) 69 | } 70 | } 71 | 72 | impl Invokeable for VirtualMergedDocument { 73 | fn run_command(&self, root: &Path, arguments: &[Value]) -> Result { 74 | let path = PathBuf::from_json(arguments.get(0).unwrap())?; 75 | 76 | let file_ancestors = match self.get_file_toplevel_ancestors(&path) { 77 | Ok(opt) => match opt { 78 | Some(ancestors) => ancestors, 79 | None => vec![], 80 | }, 81 | Err(e) => return Err(e), 82 | }; 83 | 84 | //info!("ancestors for {}:\n\t{:?}", path, file_ancestors.iter().map(|e| self.graph.borrow().graph.node_weight(*e).unwrap().clone()).collect::>()); 85 | 86 | // the set of all filepath->content. TODO: change to Url? 87 | let mut all_sources: HashMap = HashMap::new(); 88 | 89 | // if we are a top-level file (this has to be one of the set defined by Optifine, right?) 90 | if file_ancestors.is_empty() { 91 | // gather the list of all descendants 92 | let root = self.graph.borrow_mut().find_node(&path).unwrap(); 93 | let tree = match self.get_dfs_for_node(root) { 94 | Ok(tree) => tree, 95 | Err(e) => return Err(e.into()), 96 | }; 97 | 98 | let sources = match self.load_sources(&tree) { 99 | Ok(s) => s, 100 | Err(e) => return Err(e), 101 | }; 102 | all_sources.extend(sources); 103 | 104 | let mut source_mapper = SourceMapper::new(all_sources.len()); 105 | let graph = self.graph.borrow(); 106 | let view = merge_views::MergeViewBuilder::new(&tree, &all_sources, &graph, &mut source_mapper).build(); 107 | return Ok(serde_json::value::Value::String(view)); 108 | } 109 | return Err(format_err!( 110 | "{:?} is not a top-level file aka has ancestors", 111 | path.strip_prefix(root).unwrap() 112 | )); 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /server/main/src/commands/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, path::Path}; 2 | 3 | use serde_json::Value; 4 | 5 | use anyhow::{format_err, Result}; 6 | use slog_scope::info; 7 | 8 | pub mod graph_dot; 9 | pub mod merged_includes; 10 | pub mod parse_tree; 11 | 12 | pub struct CustomCommandProvider { 13 | commands: HashMap>, 14 | } 15 | 16 | impl CustomCommandProvider { 17 | pub fn new(commands: Vec<(&str, Box)>) -> CustomCommandProvider { 18 | CustomCommandProvider { 19 | commands: commands.into_iter().map(|tup| (tup.0.into(), tup.1)).collect(), 20 | } 21 | } 22 | 23 | pub fn execute(&self, command: &str, args: &[Value], root_path: &Path) -> Result { 24 | if self.commands.contains_key(command) { 25 | info!("running command"; 26 | "command" => command, 27 | "args" => format!("[{}]", args.iter().map(|v| serde_json::to_string(v).unwrap()).collect::>().join(", "))); 28 | return self.commands.get(command).unwrap().run_command(root_path, args); 29 | } 30 | Err(format_err!("command doesn't exist")) 31 | } 32 | } 33 | 34 | pub trait Invokeable { 35 | fn run_command(&self, root: &Path, arguments: &[Value]) -> Result; 36 | } 37 | -------------------------------------------------------------------------------- /server/main/src/commands/parse_tree.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | cell::RefCell, 3 | fs, 4 | path::{Path, PathBuf}, 5 | rc::Rc, 6 | }; 7 | 8 | use anyhow::{format_err, Result}; 9 | use serde_json::Value; 10 | use slog_scope::warn; 11 | use tree_sitter::{Parser, TreeCursor}; 12 | 13 | use crate::url_norm::FromJson; 14 | 15 | use super::Invokeable; 16 | 17 | pub struct TreeSitterSExpr { 18 | pub tree_sitter: Rc>, 19 | } 20 | 21 | impl Invokeable for TreeSitterSExpr { 22 | fn run_command(&self, _: &Path, arguments: &[Value]) -> Result { 23 | let path = PathBuf::from_json(arguments.get(0).unwrap())?; 24 | 25 | warn!("parsing"; "path" => path.to_str().unwrap().to_string()); 26 | 27 | let source = fs::read_to_string(path)?; 28 | 29 | let tree = match self.tree_sitter.borrow_mut().parse(source, None) { 30 | Some(tree) => tree, 31 | None => return Err(format_err!("tree-sitter parsing resulted in no parse tree")), 32 | }; 33 | 34 | let mut cursor = tree.walk(); 35 | 36 | let rendered = render_parse_tree(&mut cursor); 37 | 38 | Ok(serde_json::value::Value::String(rendered)) 39 | } 40 | } 41 | 42 | fn render_parse_tree(cursor: &mut TreeCursor) -> String { 43 | let mut string = String::new(); 44 | 45 | let mut indent = 0; 46 | let mut visited_children = false; 47 | 48 | loop { 49 | let node = cursor.node(); 50 | 51 | let display_name = if node.is_missing() { 52 | format!("MISSING {}", node.kind()) 53 | } else if node.is_named() { 54 | node.kind().to_string() 55 | } else { 56 | "".to_string() 57 | }; 58 | 59 | if visited_children { 60 | if cursor.goto_next_sibling() { 61 | visited_children = false; 62 | } else if cursor.goto_parent() { 63 | visited_children = true; 64 | indent -= 1; 65 | } else { 66 | break; 67 | } 68 | } else { 69 | if !display_name.is_empty() { 70 | let start = node.start_position(); 71 | let end = node.end_position(); 72 | 73 | let field_name = match cursor.field_name() { 74 | Some(name) => name.to_string() + ": ", 75 | None => "".to_string(), 76 | }; 77 | 78 | string += (" ".repeat(indent) 79 | + format!("{}{} [{}, {}] - [{}, {}]\n", field_name, display_name, start.row, start.column, end.row, end.column) 80 | .trim_start()) 81 | .as_str(); 82 | } 83 | 84 | if cursor.goto_first_child() { 85 | visited_children = false; 86 | indent += 1; 87 | } else { 88 | visited_children = true; 89 | } 90 | } 91 | } 92 | 93 | string 94 | } 95 | -------------------------------------------------------------------------------- /server/main/src/configuration.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use slog::Level; 4 | use slog_scope::error; 5 | 6 | 7 | pub fn handle_log_level_change(log_level: String, callback: F) { 8 | match Level::from_str(log_level.as_str()) { 9 | Ok(level) => callback(level), 10 | Err(_) => error!("got unexpected log level from config"; "level" => log_level), 11 | }; 12 | } -------------------------------------------------------------------------------- /server/main/src/consts.rs: -------------------------------------------------------------------------------- 1 | pub static SOURCE: &str = "mc-glsl"; 2 | 3 | #[allow(dead_code)] 4 | pub static INCLUDE_DIRECTIVE: &str = "#extension GL_GOOGLE_include_directive : require\n"; -------------------------------------------------------------------------------- /server/main/src/dfs.rs: -------------------------------------------------------------------------------- 1 | use petgraph::stable_graph::NodeIndex; 2 | 3 | use crate::{graph::CachedStableGraph, merge_views::FilialTuple}; 4 | 5 | use anyhow::Result; 6 | 7 | struct VisitCount { 8 | node: NodeIndex, 9 | touch: usize, 10 | children: usize, 11 | } 12 | 13 | /// Performs a depth-first search with duplicates 14 | pub struct Dfs<'a> { 15 | stack: Vec, 16 | graph: &'a CachedStableGraph, 17 | cycle: Vec, 18 | } 19 | 20 | impl<'a> Dfs<'a> { 21 | pub fn new(graph: &'a CachedStableGraph, start: NodeIndex) -> Self { 22 | Dfs { 23 | stack: vec![start], 24 | graph, 25 | cycle: Vec::new(), 26 | } 27 | } 28 | 29 | fn reset_path_to_branch(&mut self) { 30 | while let Some(par) = self.cycle.last_mut() { 31 | par.touch += 1; 32 | if par.touch > par.children { 33 | self.cycle.pop(); 34 | } else { 35 | break; 36 | } 37 | } 38 | } 39 | 40 | fn check_for_cycle(&self, children: &[NodeIndex]) -> Result<(), error::CycleError> { 41 | for prev in &self.cycle { 42 | for child in children { 43 | if prev.node == *child { 44 | let cycle_nodes: Vec = self.cycle.iter().map(|n| n.node).collect(); 45 | return Err(error::CycleError::new(&cycle_nodes, *child, self.graph)); 46 | } 47 | } 48 | } 49 | Ok(()) 50 | } 51 | } 52 | 53 | impl<'a> Iterator for Dfs<'a> { 54 | type Item = Result; 55 | 56 | fn next(&mut self) -> Option> { 57 | let parent = self.cycle.last().map(|p| p.node); 58 | 59 | if let Some(child) = self.stack.pop() { 60 | self.cycle.push(VisitCount { 61 | node: child, 62 | children: self.graph.graph.edges(child).count(), 63 | touch: 1, 64 | }); 65 | 66 | let mut children: Vec<_> = self 67 | .graph 68 | .get_all_child_positions(child) 69 | .collect(); 70 | children.reverse(); 71 | 72 | if !children.is_empty() { 73 | 74 | let child_indexes: Vec<_> = children.iter().map(|c| c.0).collect(); 75 | match self.check_for_cycle(&child_indexes) { 76 | Ok(_) => {} 77 | Err(e) => return Some(Err(e)), 78 | }; 79 | 80 | for child in children { 81 | self.stack.push(child.0); 82 | } 83 | } else { 84 | self.reset_path_to_branch(); 85 | } 86 | 87 | return Some(Ok(FilialTuple { child, parent })); 88 | } 89 | None 90 | } 91 | } 92 | 93 | pub mod error { 94 | use petgraph::stable_graph::NodeIndex; 95 | 96 | use std::{ 97 | error::Error as StdError, 98 | fmt::{Debug, Display}, 99 | path::PathBuf, 100 | }; 101 | 102 | use crate::{consts, graph::CachedStableGraph}; 103 | 104 | use rust_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range}; 105 | 106 | #[derive(Debug)] 107 | pub struct CycleError(Vec); 108 | 109 | impl StdError for CycleError {} 110 | 111 | impl CycleError { 112 | pub fn new(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph) -> Self { 113 | let mut resolved_nodes: Vec = nodes.iter().map(|i| graph.get_node(*i)).collect(); 114 | resolved_nodes.push(graph.get_node(current_node)); 115 | CycleError(resolved_nodes) 116 | } 117 | } 118 | 119 | impl Display for CycleError { 120 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 121 | let mut disp = String::new(); 122 | disp.push_str(format!("Include cycle detected:\n{:?} imports ", self.0[0]).as_str()); 123 | for p in &self.0[1..self.0.len() - 1] { 124 | disp.push_str(format!("\n{:?}, which imports ", *p).as_str()); 125 | } 126 | disp.push_str(format!("\n{:?}", self.0[self.0.len() - 1]).as_str()); 127 | f.write_str(disp.as_str()) 128 | } 129 | } 130 | 131 | impl From for Diagnostic { 132 | fn from(e: CycleError) -> Diagnostic { 133 | Diagnostic { 134 | severity: Some(DiagnosticSeverity::ERROR), 135 | range: Range::new(Position::new(0, 0), Position::new(0, 500)), 136 | source: Some(consts::SOURCE.into()), 137 | message: e.into(), 138 | code: None, 139 | tags: None, 140 | related_information: None, 141 | code_description: Option::None, 142 | data: Option::None, 143 | } 144 | } 145 | } 146 | 147 | impl From for String { 148 | fn from(e: CycleError) -> String { 149 | format!("{}", e) 150 | } 151 | } 152 | } 153 | 154 | #[cfg(test)] 155 | mod dfs_test { 156 | use std::path::PathBuf; 157 | 158 | use hamcrest2::prelude::*; 159 | use hamcrest2::{assert_that, ok}; 160 | use petgraph::{algo::is_cyclic_directed, graph::NodeIndex}; 161 | 162 | use crate::graph::CachedStableGraph; 163 | use crate::{dfs, IncludePosition}; 164 | 165 | #[test] 166 | #[logging_macro::log_scope] 167 | fn test_graph_dfs() { 168 | { 169 | let mut graph = CachedStableGraph::new(); 170 | 171 | let idx0 = graph.add_node(&PathBuf::from("0")); 172 | let idx1 = graph.add_node(&PathBuf::from("1")); 173 | let idx2 = graph.add_node(&PathBuf::from("2")); 174 | let idx3 = graph.add_node(&PathBuf::from("3")); 175 | 176 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 177 | graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 }); 178 | graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 }); 179 | 180 | let dfs = dfs::Dfs::new(&graph, idx0); 181 | 182 | let mut collection = Vec::new(); 183 | 184 | for i in dfs { 185 | assert_that!(&i, ok()); 186 | collection.push(i.unwrap()); 187 | } 188 | 189 | let nodes: Vec = collection.iter().map(|n| n.child).collect(); 190 | let parents: Vec> = collection.iter().map(|n| n.parent).collect(); 191 | // 0 192 | // / \ 193 | // 1 2 194 | // / 195 | // 3 196 | let expected_nodes = vec![idx0, idx1, idx3, idx2]; 197 | 198 | assert_eq!(expected_nodes, nodes); 199 | 200 | let expected_parents = vec![None, Some(idx0), Some(idx1), Some(idx0)]; 201 | 202 | assert_eq!(expected_parents, parents); 203 | 204 | assert!(!is_cyclic_directed(&graph.graph)); 205 | } 206 | { 207 | let mut graph = CachedStableGraph::new(); 208 | 209 | let idx0 = graph.add_node(&PathBuf::from("0")); 210 | let idx1 = graph.add_node(&PathBuf::from("1")); 211 | let idx2 = graph.add_node(&PathBuf::from("2")); 212 | let idx3 = graph.add_node(&PathBuf::from("3")); 213 | let idx4 = graph.add_node(&PathBuf::from("4")); 214 | let idx5 = graph.add_node(&PathBuf::from("5")); 215 | let idx6 = graph.add_node(&PathBuf::from("6")); 216 | let idx7 = graph.add_node(&PathBuf::from("7")); 217 | 218 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 219 | graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 }); 220 | graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 }); 221 | graph.add_edge(idx1, idx4, IncludePosition { line: 6, start: 0, end: 0 }); 222 | graph.add_edge(idx2, idx4, IncludePosition { line: 5, start: 0, end: 0 }); 223 | graph.add_edge(idx2, idx5, IncludePosition { line: 4, start: 0, end: 0 }); 224 | graph.add_edge(idx3, idx6, IncludePosition { line: 4, start: 0, end: 0 }); 225 | graph.add_edge(idx4, idx6, IncludePosition { line: 4, start: 0, end: 0 }); 226 | graph.add_edge(idx6, idx7, IncludePosition { line: 4, start: 0, end: 0 }); 227 | 228 | let dfs = dfs::Dfs::new(&graph, idx0); 229 | 230 | let mut collection = Vec::new(); 231 | 232 | for i in dfs { 233 | assert_that!(&i, ok()); 234 | collection.push(i.unwrap()); 235 | } 236 | 237 | let nodes: Vec = collection.iter().map(|n| n.child).collect(); 238 | let parents: Vec> = collection.iter().map(|n| n.parent).collect(); 239 | // 0 240 | // / \ 241 | // 1 2 242 | // / \ / \ 243 | // 3 4 5 244 | // \ / 245 | // 6 - 7 246 | let expected_nodes = vec![idx0, idx1, idx3, idx6, idx7, idx4, idx6, idx7, idx2, idx5, idx4, idx6, idx7]; 247 | 248 | assert_eq!(expected_nodes, nodes); 249 | 250 | let expected_parents = vec![ 251 | None, 252 | Some(idx0), 253 | Some(idx1), 254 | Some(idx3), 255 | Some(idx6), 256 | Some(idx1), 257 | Some(idx4), 258 | Some(idx6), 259 | Some(idx0), 260 | Some(idx2), 261 | Some(idx2), 262 | Some(idx4), 263 | Some(idx6), 264 | ]; 265 | 266 | assert_eq!(expected_parents, parents); 267 | 268 | assert!(!is_cyclic_directed(&graph.graph)); 269 | } 270 | } 271 | 272 | #[test] 273 | #[logging_macro::log_scope] 274 | fn test_graph_dfs_cycle() { 275 | { 276 | let mut graph = CachedStableGraph::new(); 277 | 278 | let idx0 = graph.add_node(&PathBuf::from("0")); 279 | let idx1 = graph.add_node(&PathBuf::from("1")); 280 | let idx2 = graph.add_node(&PathBuf::from("2")); 281 | let idx3 = graph.add_node(&PathBuf::from("3")); 282 | let idx4 = graph.add_node(&PathBuf::from("4")); 283 | let idx5 = graph.add_node(&PathBuf::from("5")); 284 | let idx6 = graph.add_node(&PathBuf::from("6")); 285 | let idx7 = graph.add_node(&PathBuf::from("7")); 286 | 287 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 288 | graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 }); 289 | graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 }); 290 | graph.add_edge(idx1, idx4, IncludePosition { line: 6, start: 0, end: 0 }); 291 | graph.add_edge(idx2, idx4, IncludePosition { line: 5, start: 0, end: 0 }); 292 | graph.add_edge(idx2, idx5, IncludePosition { line: 4, start: 0, end: 0 }); 293 | graph.add_edge(idx3, idx6, IncludePosition { line: 4, start: 0, end: 0 }); 294 | graph.add_edge(idx4, idx6, IncludePosition { line: 4, start: 0, end: 0 }); 295 | graph.add_edge(idx6, idx7, IncludePosition { line: 4, start: 0, end: 0 }); 296 | graph.add_edge(idx7, idx4, IncludePosition { line: 4, start: 0, end: 0 }); 297 | 298 | let mut dfs = dfs::Dfs::new(&graph, idx0); 299 | 300 | for _ in 0..5 { 301 | if let Some(i) = dfs.next() { 302 | assert_that!(&i, ok()); 303 | } 304 | } 305 | 306 | // 0 307 | // / \ 308 | // 1 2 309 | // / \ / \ 310 | // 3 4 5 311 | // \ / \ 312 | // 6 - 7 313 | 314 | let next = dfs.next().unwrap(); 315 | assert_that!(next, err()); 316 | 317 | assert!(is_cyclic_directed(&graph.graph)); 318 | } 319 | { 320 | let mut graph = CachedStableGraph::new(); 321 | 322 | let idx0 = graph.add_node(&PathBuf::from("0")); 323 | let idx1 = graph.add_node(&PathBuf::from("1")); 324 | 325 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 326 | graph.add_edge(idx1, idx0, IncludePosition { line: 2, start: 0, end: 0 }); 327 | 328 | let mut dfs = dfs::Dfs::new(&graph, idx1); 329 | 330 | println!("{:?}", dfs.next()); 331 | println!("{:?}", dfs.next()); 332 | println!("{:?}", dfs.next()); 333 | } 334 | } 335 | } 336 | -------------------------------------------------------------------------------- /server/main/src/diagnostics_parser.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, cell::OnceCell, path::Path}; 2 | 3 | use regex::Regex; 4 | use rust_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range}; 5 | use slog_scope::debug; 6 | use url::Url; 7 | 8 | use crate::{ 9 | consts, 10 | graph::CachedStableGraph, 11 | opengl, 12 | source_mapper::{SourceMapper, SourceNum}, 13 | }; 14 | 15 | pub struct DiagnosticsParser<'a, T: opengl::ShaderValidator + ?Sized> { 16 | line_offset: OnceCell, 17 | line_regex: OnceCell, 18 | vendor_querier: &'a T, 19 | } 20 | 21 | impl<'a, T: opengl::ShaderValidator + ?Sized> DiagnosticsParser<'a, T> { 22 | pub fn new(vendor_querier: &'a T) -> Self { 23 | DiagnosticsParser { 24 | line_offset: OnceCell::new(), 25 | line_regex: OnceCell::new(), 26 | vendor_querier, 27 | } 28 | } 29 | 30 | fn get_line_regex(&self) -> &Regex { 31 | self.line_regex.get_or_init(|| match self.vendor_querier.vendor().as_str() { 32 | "NVIDIA Corporation" => { 33 | Regex::new(r#"^(?P\d+)\((?P\d+)\) : (?Perror|warning) [A-C]\d+: (?P.+)"#).unwrap() 34 | } 35 | _ => Regex::new(r#"^(?PERROR|WARNING): (?P[^?<>*|"\n]+):(?P\d+): (?:'.*' :|[a-z]+\(#\d+\)) +(?P.+)$"#) 36 | .unwrap(), 37 | }) 38 | } 39 | 40 | fn get_line_offset(&self) -> u32 { 41 | *self.line_offset.get_or_init(|| match self.vendor_querier.vendor().as_str() { 42 | "ATI Technologies" => 0, 43 | _ => 1, 44 | }) 45 | } 46 | 47 | pub fn parse_diagnostics_output( 48 | &self, output: String, uri: &Path, source_mapper: &SourceMapper, graph: &CachedStableGraph, 49 | ) -> HashMap> { 50 | let output_lines = output.split('\n').collect::>(); 51 | let mut diagnostics: HashMap> = HashMap::with_capacity(output_lines.len()); 52 | 53 | debug!("diagnostics regex selected"; "regex" => self.get_line_regex() .as_str()); 54 | 55 | for line in output_lines { 56 | let diagnostic_capture = match self.get_line_regex().captures(line) { 57 | Some(d) => d, 58 | None => continue, 59 | }; 60 | 61 | debug!("found match for output line"; "line" => line, "capture" => format!("{:?}", diagnostic_capture)); 62 | 63 | let msg = diagnostic_capture.name("output").unwrap().as_str(); 64 | 65 | let line = match diagnostic_capture.name("linenum") { 66 | Some(c) => c.as_str().parse::().unwrap_or(0), 67 | None => 0, 68 | } - self.get_line_offset(); 69 | 70 | // TODO: line matching maybe 71 | /* let line_text = source_lines[line as usize]; 72 | let leading_whitespace = line_text.len() - line_text.trim_start().len(); */ 73 | 74 | let severity = match diagnostic_capture.name("severity") { 75 | Some(c) => match c.as_str().to_lowercase().as_str() { 76 | "error" => DiagnosticSeverity::ERROR, 77 | "warning" => DiagnosticSeverity::WARNING, 78 | _ => DiagnosticSeverity::INFORMATION, 79 | }, 80 | _ => DiagnosticSeverity::INFORMATION, 81 | }; 82 | 83 | let origin = match diagnostic_capture.name("filepath") { 84 | Some(o) => { 85 | let source_num: SourceNum = o.as_str().parse::().unwrap().into(); 86 | let graph_node = source_mapper.get_node(source_num); 87 | graph.get_node(graph_node).to_str().unwrap().to_string() 88 | } 89 | None => uri.to_str().unwrap().to_string(), 90 | }; 91 | 92 | let diagnostic = Diagnostic { 93 | range: Range::new( 94 | /* Position::new(line, leading_whitespace as u64), 95 | Position::new(line, line_text.len() as u64) */ 96 | Position::new(line, 0), 97 | Position::new(line, 1000), 98 | ), 99 | code: None, 100 | severity: Some(severity), 101 | source: Some(consts::SOURCE.into()), 102 | message: msg.trim().into(), 103 | related_information: None, 104 | tags: None, 105 | code_description: Option::None, 106 | data: Option::None, 107 | }; 108 | 109 | let origin_url = Url::from_file_path(origin).unwrap(); 110 | match diagnostics.get_mut(&origin_url) { 111 | Some(d) => d.push(diagnostic), 112 | None => { 113 | diagnostics.insert(origin_url, vec![diagnostic]); 114 | } 115 | }; 116 | } 117 | diagnostics 118 | } 119 | } 120 | 121 | #[cfg(test)] 122 | mod diagnostics_test { 123 | use std::path::PathBuf; 124 | 125 | use slog::slog_o; 126 | use url::Url; 127 | 128 | use crate::{ 129 | diagnostics_parser::DiagnosticsParser, opengl::MockShaderValidator, source_mapper::SourceMapper, test::new_temp_server, 130 | }; 131 | 132 | #[test] 133 | #[logging_macro::log_scope] 134 | fn test_nvidia_diagnostics() { 135 | slog_scope::scope(&slog_scope::logger().new(slog_o!("driver" => "nvidia")), || { 136 | let mut mockgl = MockShaderValidator::new(); 137 | mockgl.expect_vendor().returning(|| "NVIDIA Corporation".into()); 138 | let server = new_temp_server(Some(Box::new(mockgl))); 139 | 140 | let output = "0(9) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\""; 141 | 142 | #[cfg(target_family = "unix")] 143 | let path: PathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into(); 144 | #[cfg(target_family = "windows")] 145 | let path: PathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into(); 146 | 147 | let mut source_mapper = SourceMapper::new(0); 148 | source_mapper.get_num(server.graph.borrow_mut().add_node(&path)); 149 | 150 | let parser = DiagnosticsParser::new(server.opengl_context.as_ref()); 151 | 152 | let results = 153 | parser.parse_diagnostics_output(output.to_string(), path.parent().unwrap(), &source_mapper, &server.graph.borrow()); 154 | 155 | assert_eq!(results.len(), 1); 156 | let first = results.into_iter().next().unwrap(); 157 | assert_eq!(first.0, Url::from_file_path(path).unwrap()); 158 | server.endpoint.request_shutdown(); 159 | }); 160 | } 161 | 162 | #[test] 163 | #[logging_macro::log_scope] 164 | fn test_amd_diagnostics() { 165 | slog_scope::scope(&slog_scope::logger().new(slog_o!("driver" => "amd")), || { 166 | let mut mockgl = MockShaderValidator::new(); 167 | mockgl.expect_vendor().returning(|| "ATI Technologies".into()); 168 | let server = new_temp_server(Some(Box::new(mockgl))); 169 | 170 | let output = "ERROR: 0:1: '' : syntax error: #line 171 | ERROR: 0:10: '' : syntax error: #line 172 | ERROR: 0:15: 'varying' : syntax error: syntax error 173 | "; 174 | 175 | #[cfg(target_family = "unix")] 176 | let path: PathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into(); 177 | #[cfg(target_family = "windows")] 178 | let path: PathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into(); 179 | 180 | let mut source_mapper = SourceMapper::new(0); 181 | source_mapper.get_num(server.graph.borrow_mut().add_node(&path)); 182 | 183 | let parser = DiagnosticsParser::new(server.opengl_context.as_ref()); 184 | 185 | let results = 186 | parser.parse_diagnostics_output(output.to_string(), path.parent().unwrap(), &source_mapper, &server.graph.borrow()); 187 | 188 | assert_eq!(results.len(), 1); 189 | let first = results.into_iter().next().unwrap(); 190 | assert_eq!(first.1.len(), 3); 191 | server.endpoint.request_shutdown(); 192 | }); 193 | } 194 | } 195 | -------------------------------------------------------------------------------- /server/main/src/graph.rs: -------------------------------------------------------------------------------- 1 | use petgraph::stable_graph::EdgeIndex; 2 | use petgraph::stable_graph::NodeIndex; 3 | use petgraph::stable_graph::StableDiGraph; 4 | use petgraph::visit::EdgeRef; 5 | use petgraph::Direction; 6 | 7 | use std::{ 8 | collections::{HashMap, HashSet}, 9 | path::{Path, PathBuf}, 10 | str::FromStr, 11 | }; 12 | 13 | use super::IncludePosition; 14 | 15 | /// Wraps a `StableDiGraph` with caching behaviour for node search by maintaining 16 | /// an index for node value to node index and a reverse index. 17 | /// This allows for **O(1)** lookup for a value if it exists, else **O(n)**. 18 | pub struct CachedStableGraph { 19 | // StableDiGraph is used as it allows for String node values, essential for 20 | // generating the GraphViz DOT render. 21 | pub graph: StableDiGraph, 22 | cache: HashMap, 23 | // Maps a node index to its abstracted string representation. 24 | // Mainly used as the graph is based on NodeIndex. 25 | reverse_index: HashMap, 26 | } 27 | 28 | impl CachedStableGraph { 29 | #[allow(clippy::new_without_default)] 30 | pub fn new() -> CachedStableGraph { 31 | CachedStableGraph { 32 | graph: StableDiGraph::new(), 33 | cache: HashMap::new(), 34 | reverse_index: HashMap::new(), 35 | } 36 | } 37 | 38 | /// Returns the `NodeIndex` for a given graph node with the value of `name` 39 | /// and caches the result in the `HashMap`. Complexity is **O(1)** if the value 40 | /// is cached (which should always be the case), else **O(n)** where **n** is 41 | /// the number of node indices, as an exhaustive search must be done. 42 | pub fn find_node(&mut self, name: &Path) -> Option { 43 | match self.cache.get(name) { 44 | Some(n) => Some(*n), 45 | None => { 46 | // If the string is not in cache, O(n) search the graph (i know...) and then cache the NodeIndex 47 | // for later 48 | let n = self.graph.node_indices().find(|n| self.graph[*n] == name.to_str().unwrap()); 49 | if let Some(n) = n { 50 | self.cache.insert(name.into(), n); 51 | } 52 | n 53 | } 54 | } 55 | } 56 | 57 | // Returns the `PathBuf` for a given `NodeIndex` 58 | pub fn get_node(&self, node: NodeIndex) -> PathBuf { 59 | PathBuf::from_str(&self.graph[node]).unwrap() 60 | } 61 | 62 | /// Returns an iterator over all the `IncludePosition`'s between a parent and its child for all the positions 63 | /// that the child may be imported into the parent, in order of import. 64 | pub fn get_child_positions(&self, parent: NodeIndex, child: NodeIndex) -> impl Iterator + '_ { 65 | let mut edges = self 66 | .graph 67 | .edges(parent) 68 | .filter_map(move |edge| { 69 | let target = self.graph.edge_endpoints(edge.id()).unwrap().1; 70 | if target != child { 71 | return None; 72 | } 73 | Some(self.graph[edge.id()]) 74 | }) 75 | .collect::>(); 76 | edges.sort_by(|x, y| x.line.cmp(&y.line)); 77 | edges.into_iter() 78 | } 79 | 80 | /// Returns an iterator over all the `(NodeIndex, IncludePosition)` tuples between a node and all its children, in order 81 | /// of import. 82 | pub fn get_all_child_positions(&self, node: NodeIndex) -> impl Iterator + '_ { 83 | let mut edges = self.graph.edges(node).map(|edge| { 84 | let child = self.graph.edge_endpoints(edge.id()).unwrap().1; 85 | (child, self.graph[edge.id()]) 86 | }) 87 | .collect::>(); 88 | edges.sort_by(|x, y| x.1.line.cmp(&y.1.line)); 89 | edges.into_iter() 90 | } 91 | 92 | pub fn add_node(&mut self, name: &Path) -> NodeIndex { 93 | if let Some(idx) = self.cache.get(name) { 94 | return *idx; 95 | } 96 | let idx = self.graph.add_node(name.to_str().unwrap().to_string()); 97 | self.cache.insert(name.to_owned(), idx); 98 | self.reverse_index.insert(idx, name.to_owned()); 99 | idx 100 | } 101 | 102 | pub fn add_edge(&mut self, parent: NodeIndex, child: NodeIndex, meta: IncludePosition) -> EdgeIndex { 103 | self.graph.add_edge(parent, child, meta) 104 | } 105 | 106 | pub fn remove_edge(&mut self, parent: NodeIndex, child: NodeIndex, position: IncludePosition) { 107 | self.graph 108 | .edges(parent) 109 | .find(|edge| self.graph.edge_endpoints(edge.id()).unwrap().1 == child && *edge.weight() == position) 110 | .map(|edge| edge.id()) 111 | .and_then(|edge| self.graph.remove_edge(edge)); 112 | } 113 | 114 | pub fn child_node_indexes(&self, node: NodeIndex) -> impl Iterator + '_ { 115 | self.graph.neighbors(node) 116 | } 117 | 118 | pub fn collect_root_ancestors(&self, node: NodeIndex) -> Vec { 119 | let mut visited = HashSet::new(); 120 | self.get_root_ancestors(node, node, &mut visited) 121 | } 122 | 123 | // TODO: impl Iterator 124 | fn parent_node_indexes(&self, node: NodeIndex) -> Vec { 125 | self.graph.neighbors_directed(node, Direction::Incoming).collect() 126 | } 127 | 128 | fn get_root_ancestors(&self, initial: NodeIndex, node: NodeIndex, visited: &mut HashSet) -> Vec { 129 | if node == initial && !visited.is_empty() { 130 | return vec![]; 131 | } 132 | 133 | let parents = self.parent_node_indexes(node); 134 | let mut collection = Vec::with_capacity(parents.len()); 135 | 136 | for ancestor in &parents { 137 | visited.insert(*ancestor); 138 | } 139 | 140 | for ancestor in &parents { 141 | let ancestors = self.parent_node_indexes(*ancestor); 142 | if !ancestors.is_empty() { 143 | collection.extend(self.get_root_ancestors(initial, *ancestor, visited)); 144 | } else { 145 | collection.push(*ancestor); 146 | } 147 | } 148 | 149 | collection 150 | } 151 | } 152 | 153 | #[cfg(test)] 154 | impl CachedStableGraph { 155 | fn parent_node_names(&self, node: NodeIndex) -> Vec { 156 | self.graph 157 | .neighbors_directed(node, Direction::Incoming) 158 | .map(|n| self.reverse_index.get(&n).unwrap().clone()) 159 | .collect() 160 | } 161 | 162 | fn child_node_names(&self, node: NodeIndex) -> Vec { 163 | self.graph 164 | .neighbors(node) 165 | .map(|n| self.reverse_index.get(&n).unwrap().clone()) 166 | .collect() 167 | } 168 | 169 | fn remove_node(&mut self, name: &Path) { 170 | let idx = self.cache.remove(name); 171 | if let Some(idx) = idx { 172 | self.graph.remove_node(idx); 173 | } 174 | } 175 | } 176 | 177 | #[cfg(test)] 178 | mod graph_test { 179 | use std::path::PathBuf; 180 | 181 | use petgraph::graph::NodeIndex; 182 | 183 | use crate::{graph::CachedStableGraph, IncludePosition}; 184 | 185 | #[test] 186 | #[logging_macro::log_scope] 187 | fn test_graph_two_connected_nodes() { 188 | let mut graph = CachedStableGraph::new(); 189 | 190 | let idx1 = graph.add_node(&PathBuf::from("sample")); 191 | let idx2 = graph.add_node(&PathBuf::from("banana")); 192 | graph.add_edge(idx1, idx2, IncludePosition { line: 3, start: 0, end: 0 }); 193 | 194 | let children = graph.child_node_names(idx1); 195 | assert_eq!(children.len(), 1); 196 | assert_eq!(children[0], Into::::into("banana".to_string())); 197 | 198 | let children: Vec = graph.child_node_indexes(idx1).collect(); 199 | assert_eq!(children.len(), 1); 200 | assert_eq!(children[0], idx2); 201 | 202 | let parents = graph.parent_node_names(idx1); 203 | assert_eq!(parents.len(), 0); 204 | 205 | let parents = graph.parent_node_names(idx2); 206 | assert_eq!(parents.len(), 1); 207 | assert_eq!(parents[0], Into::::into("sample".to_string())); 208 | 209 | let parents = graph.parent_node_indexes(idx2); 210 | assert_eq!(parents.len(), 1); 211 | assert_eq!(parents[0], idx1); 212 | 213 | let ancestors = graph.collect_root_ancestors(idx2); 214 | assert_eq!(ancestors.len(), 1); 215 | assert_eq!(ancestors[0], idx1); 216 | 217 | let ancestors = graph.collect_root_ancestors(idx1); 218 | assert_eq!(ancestors.len(), 0); 219 | 220 | graph.remove_node(&PathBuf::from("sample")); 221 | assert_eq!(graph.graph.node_count(), 1); 222 | assert!(graph.find_node(&PathBuf::from("sample")).is_none()); 223 | 224 | let neighbors = graph.child_node_names(idx2); 225 | assert_eq!(neighbors.len(), 0); 226 | } 227 | 228 | #[test] 229 | #[logging_macro::log_scope] 230 | fn test_double_import() { 231 | let mut graph = CachedStableGraph::new(); 232 | 233 | let idx0 = graph.add_node(&PathBuf::from("0")); 234 | let idx1 = graph.add_node(&PathBuf::from("1")); 235 | 236 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 237 | graph.add_edge(idx0, idx1, IncludePosition { line: 4, start: 0, end: 0 }); 238 | 239 | // 0 240 | // / \ 241 | // 1 1 242 | 243 | assert_eq!(2, graph.get_child_positions(idx0, idx1).count()); 244 | 245 | let mut edge_metas = graph.get_child_positions(idx0, idx1); 246 | assert_eq!(Some(IncludePosition { line: 2, start: 0, end: 0 }), edge_metas.next()); 247 | assert_eq!(Some(IncludePosition { line: 4, start: 0, end: 0 }), edge_metas.next()); 248 | } 249 | 250 | #[test] 251 | #[logging_macro::log_scope] 252 | fn test_collect_root_ancestors() { 253 | { 254 | let mut graph = CachedStableGraph::new(); 255 | 256 | let idx0 = graph.add_node(&PathBuf::from("0")); 257 | let idx1 = graph.add_node(&PathBuf::from("1")); 258 | let idx2 = graph.add_node(&PathBuf::from("2")); 259 | let idx3 = graph.add_node(&PathBuf::from("3")); 260 | 261 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 262 | graph.add_edge(idx1, idx2, IncludePosition { line: 3, start: 0, end: 0 }); 263 | graph.add_edge(idx3, idx1, IncludePosition { line: 4, start: 0, end: 0 }); 264 | 265 | // 0 3 266 | // |/ 267 | // 1 268 | // | 269 | // 2 270 | 271 | let roots = graph.collect_root_ancestors(idx2); 272 | assert_eq!(roots, vec![idx3, idx0]); 273 | 274 | let roots = graph.collect_root_ancestors(idx1); 275 | assert_eq!(roots, vec![idx3, idx0]); 276 | 277 | let roots = graph.collect_root_ancestors(idx0); 278 | assert_eq!(roots, vec![]); 279 | 280 | let roots = graph.collect_root_ancestors(idx3); 281 | assert_eq!(roots, vec![]); 282 | } 283 | { 284 | let mut graph = CachedStableGraph::new(); 285 | 286 | let idx0 = graph.add_node(&PathBuf::from("0")); 287 | let idx1 = graph.add_node(&PathBuf::from("1")); 288 | let idx2 = graph.add_node(&PathBuf::from("2")); 289 | let idx3 = graph.add_node(&PathBuf::from("3")); 290 | 291 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 292 | graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 }); 293 | graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 }); 294 | 295 | // 0 296 | // / \ 297 | // 1 2 298 | // / 299 | // 3 300 | 301 | let roots = graph.collect_root_ancestors(idx3); 302 | assert_eq!(roots, vec![idx0]); 303 | 304 | let roots = graph.collect_root_ancestors(idx2); 305 | assert_eq!(roots, vec![idx0]); 306 | 307 | let roots = graph.collect_root_ancestors(idx1); 308 | assert_eq!(roots, vec![idx0]); 309 | 310 | let roots = graph.collect_root_ancestors(idx0); 311 | assert_eq!(roots, vec![]); 312 | } 313 | { 314 | let mut graph = CachedStableGraph::new(); 315 | 316 | let idx0 = graph.add_node(&PathBuf::from("0")); 317 | let idx1 = graph.add_node(&PathBuf::from("1")); 318 | let idx2 = graph.add_node(&PathBuf::from("2")); 319 | let idx3 = graph.add_node(&PathBuf::from("3")); 320 | 321 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 322 | graph.add_edge(idx2, idx3, IncludePosition { line: 3, start: 0, end: 0 }); 323 | graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 }); 324 | 325 | // 0 326 | // \ 327 | // 2 1 328 | // \ / 329 | // 3 330 | 331 | let roots = graph.collect_root_ancestors(idx3); 332 | assert_eq!(roots, vec![idx0, idx2]); 333 | 334 | let roots = graph.collect_root_ancestors(idx2); 335 | assert_eq!(roots, vec![]); 336 | 337 | let roots = graph.collect_root_ancestors(idx1); 338 | assert_eq!(roots, vec![idx0]); 339 | 340 | let roots = graph.collect_root_ancestors(idx0); 341 | assert_eq!(roots, vec![]); 342 | } 343 | { 344 | let mut graph = CachedStableGraph::new(); 345 | 346 | let idx0 = graph.add_node(&PathBuf::from("0")); 347 | let idx1 = graph.add_node(&PathBuf::from("1")); 348 | let idx2 = graph.add_node(&PathBuf::from("2")); 349 | let idx3 = graph.add_node(&PathBuf::from("3")); 350 | 351 | graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 }); 352 | graph.add_edge(idx1, idx2, IncludePosition { line: 4, start: 0, end: 0 }); 353 | graph.add_edge(idx1, idx3, IncludePosition { line: 6, start: 0, end: 0 }); 354 | 355 | // 0 356 | // | 357 | // 1 358 | // / \ 359 | // 2 3 360 | 361 | let roots = graph.collect_root_ancestors(idx3); 362 | assert_eq!(roots, vec![idx0]); 363 | 364 | let roots = graph.collect_root_ancestors(idx2); 365 | assert_eq!(roots, vec![idx0]); 366 | 367 | let roots = graph.collect_root_ancestors(idx1); 368 | assert_eq!(roots, vec![idx0]); 369 | 370 | let roots = graph.collect_root_ancestors(idx0); 371 | assert_eq!(roots, vec![]); 372 | } 373 | } 374 | } 375 | -------------------------------------------------------------------------------- /server/main/src/linemap.rs: -------------------------------------------------------------------------------- 1 | use rust_lsp::lsp_types::Position; 2 | 3 | pub struct LineMap { 4 | positions: Vec, 5 | } 6 | 7 | impl LineMap { 8 | pub fn new(source: &str) -> Self { 9 | let mut positions = vec![0]; 10 | for (i, char) in source.char_indices() { 11 | if char == '\n' { 12 | positions.push(i + 1); 13 | } 14 | } 15 | 16 | LineMap { positions } 17 | } 18 | 19 | pub fn offset_for_position(&self, position: Position) -> usize { 20 | self.positions[position.line as usize] + (position.character as usize) 21 | } 22 | } 23 | 24 | #[cfg(test)] 25 | mod test { 26 | use rust_lsp::lsp_types::Position; 27 | 28 | use crate::linemap::LineMap; 29 | 30 | #[test] 31 | #[logging_macro::log_scope] 32 | fn test_linemap() { 33 | struct Test { 34 | string: &'static str, 35 | pos: Position, 36 | offset: usize, 37 | } 38 | 39 | let cases = vec![ 40 | Test { 41 | string: "sample\ntext", 42 | pos: Position { line: 1, character: 2 }, 43 | offset: 9, 44 | }, 45 | Test { 46 | string: "banana", 47 | pos: Position { line: 0, character: 0 }, 48 | offset: 0, 49 | }, 50 | Test { 51 | string: "banana", 52 | pos: Position { line: 0, character: 1 }, 53 | offset: 1, 54 | }, 55 | Test { 56 | string: "sample\ntext", 57 | pos: Position { line: 1, character: 0 }, 58 | offset: 7, 59 | }, 60 | Test { 61 | string: "sample\n\ttext", 62 | pos: Position { line: 1, character: 2 }, 63 | offset: 9, 64 | }, 65 | Test { 66 | string: "sample\r\ntext", 67 | pos: Position { line: 1, character: 0 }, 68 | offset: 8, 69 | }, 70 | ]; 71 | 72 | for case in cases { 73 | let linemap = LineMap::new(case.string); 74 | 75 | let offset = linemap.offset_for_position(case.pos); 76 | 77 | assert_eq!(offset, case.offset, "{:?}", case.string); 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /server/main/src/lsp_ext.rs: -------------------------------------------------------------------------------- 1 | use rust_lsp::lsp_types::notification::Notification; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | pub enum Status {} 5 | 6 | impl Notification for Status { 7 | type Params = StatusParams; 8 | const METHOD: &'static str = "mc-glsl/status"; 9 | } 10 | 11 | #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] 12 | pub struct StatusParams { 13 | pub status: String, 14 | pub message: Option, 15 | pub icon: Option, 16 | } 17 | -------------------------------------------------------------------------------- /server/main/src/main.rs: -------------------------------------------------------------------------------- 1 | #![feature(once_cell)] 2 | #![feature(option_get_or_insert_default)] 3 | 4 | use merge_views::FilialTuple; 5 | use rust_lsp::jsonrpc::{method_types::*, *}; 6 | use rust_lsp::lsp::*; 7 | use rust_lsp::lsp_types::{notification::*, *}; 8 | 9 | use petgraph::stable_graph::NodeIndex; 10 | use path_slash::PathExt; 11 | 12 | use serde::Deserialize; 13 | use serde_json::{from_value, Value}; 14 | 15 | use tree_sitter::Parser; 16 | use url_norm::FromUrl; 17 | 18 | use walkdir::WalkDir; 19 | 20 | use std::collections::{HashMap, HashSet}; 21 | use std::convert::TryFrom; 22 | use std::fmt::{Debug, Display, Formatter}; 23 | use std::fs; 24 | use std::io::{stdin, stdout, BufRead, BufReader}; 25 | use std::iter::{Extend, FromIterator}; 26 | use std::rc::Rc; 27 | use std::str::FromStr; 28 | 29 | use std::{ 30 | cell::RefCell, 31 | path::{Path, PathBuf}, 32 | }; 33 | 34 | use slog::Level; 35 | use slog_scope::{debug, error, info, warn}; 36 | 37 | use path_slash::PathBufExt; 38 | 39 | use anyhow::{anyhow, Result}; 40 | 41 | use regex::Regex; 42 | 43 | use lazy_static::lazy_static; 44 | 45 | mod commands; 46 | mod configuration; 47 | mod consts; 48 | mod dfs; 49 | mod diagnostics_parser; 50 | mod graph; 51 | mod linemap; 52 | mod lsp_ext; 53 | mod merge_views; 54 | mod navigation; 55 | mod opengl; 56 | mod source_mapper; 57 | mod url_norm; 58 | 59 | #[cfg(test)] 60 | mod test; 61 | 62 | pub fn is_top_level(path: &Path) -> bool { 63 | let path = path.to_slash().unwrap(); 64 | if !RE_WORLD_FOLDER.is_match(&path) { 65 | return false; 66 | } 67 | let parts: Vec<&str> = path.split("/").collect(); 68 | let len = parts.len(); 69 | (len == 3 || len == 2) && TOPLEVEL_FILES.contains(parts[len - 1]) 70 | } 71 | 72 | lazy_static! { 73 | static ref RE_INCLUDE: Regex = Regex::new(r#"^(?:\s)*?(?:#include) "(.+)"\r?"#).unwrap(); 74 | static ref RE_WORLD_FOLDER: Regex = Regex::new(r#"^shaders(/world-?\d+)?"#).unwrap(); 75 | static ref TOPLEVEL_FILES: HashSet = { 76 | let mut set = HashSet::with_capacity(1716); 77 | for ext in ["fsh", "vsh", "gsh", "csh"] { 78 | set.insert(format!("composite.{}", ext)); 79 | set.insert(format!("deferred.{}", ext)); 80 | set.insert(format!("prepare.{}", ext)); 81 | set.insert(format!("shadowcomp.{}", ext)); 82 | for i in 1..=99 { 83 | set.insert(format!("composite{}.{}", i, ext)); 84 | set.insert(format!("deferred{}.{}", i, ext)); 85 | set.insert(format!("prepare{}.{}", i, ext)); 86 | set.insert(format!("shadowcomp{}.{}", i, ext)); 87 | } 88 | set.insert(format!("composite_pre.{}", ext)); 89 | set.insert(format!("deferred_pre.{}", ext)); 90 | set.insert(format!("final.{}", ext)); 91 | set.insert(format!("gbuffers_armor_glint.{}", ext)); 92 | set.insert(format!("gbuffers_basic.{}", ext)); 93 | set.insert(format!("gbuffers_beaconbeam.{}", ext)); 94 | set.insert(format!("gbuffers_block.{}", ext)); 95 | set.insert(format!("gbuffers_clouds.{}", ext)); 96 | set.insert(format!("gbuffers_damagedblock.{}", ext)); 97 | set.insert(format!("gbuffers_entities.{}", ext)); 98 | set.insert(format!("gbuffers_entities_glowing.{}", ext)); 99 | set.insert(format!("gbuffers_hand.{}", ext)); 100 | set.insert(format!("gbuffers_hand_water.{}", ext)); 101 | set.insert(format!("gbuffers_item.{}", ext)); 102 | set.insert(format!("gbuffers_line.{}", ext)); 103 | set.insert(format!("gbuffers_skybasic.{}", ext)); 104 | set.insert(format!("gbuffers_skytextured.{}", ext)); 105 | set.insert(format!("gbuffers_spidereyes.{}", ext)); 106 | set.insert(format!("gbuffers_terrain.{}", ext)); 107 | set.insert(format!("gbuffers_terrain_cutout.{}", ext)); 108 | set.insert(format!("gbuffers_terrain_cutout_mip.{}", ext)); 109 | set.insert(format!("gbuffers_terrain_solid.{}", ext)); 110 | set.insert(format!("gbuffers_textured.{}", ext)); 111 | set.insert(format!("gbuffers_textured_lit.{}", ext)); 112 | set.insert(format!("gbuffers_water.{}", ext)); 113 | set.insert(format!("gbuffers_weather.{}", ext)); 114 | set.insert(format!("shadow.{}", ext)); 115 | set.insert(format!("shadow_cutout.{}", ext)); 116 | set.insert(format!("shadow_solid.{}", ext)); 117 | } 118 | let base_char_num = 'a' as u8; 119 | for suffix_num in 0u8..=25u8 { 120 | let suffix_char = (base_char_num + suffix_num) as char; 121 | set.insert(format!("composite_{}.csh", suffix_char)); 122 | set.insert(format!("deferred_{}.csh", suffix_char)); 123 | set.insert(format!("prepare_{}.csh", suffix_char)); 124 | set.insert(format!("shadowcomp_{}.csh", suffix_char)); 125 | for i in 1..=99 { 126 | let total_suffix = format!("{}_{}", i, suffix_char); 127 | set.insert(format!("composite{}.csh", total_suffix)); 128 | set.insert(format!("deferred{}.csh", total_suffix)); 129 | set.insert(format!("prepare{}.csh", total_suffix)); 130 | set.insert(format!("shadowcomp{}.csh", total_suffix)); 131 | } 132 | } 133 | set 134 | }; 135 | } 136 | 137 | fn main() { 138 | let guard = logging::set_logger_with_level(Level::Info); 139 | 140 | let endpoint_output = LSPEndpoint::create_lsp_output_with_output_stream(stdout); 141 | 142 | let cache_graph = graph::CachedStableGraph::new(); 143 | 144 | let mut parser = Parser::new(); 145 | parser.set_language(tree_sitter_glsl::language()).unwrap(); 146 | 147 | let mut langserver = MinecraftShaderLanguageServer { 148 | endpoint: endpoint_output.clone(), 149 | graph: Rc::new(RefCell::new(cache_graph)), 150 | root: "".into(), 151 | command_provider: None, 152 | opengl_context: Rc::new(opengl::OpenGlContext::new()), 153 | tree_sitter: Rc::new(RefCell::new(parser)), 154 | log_guard: Some(guard), 155 | }; 156 | 157 | langserver.command_provider = Some(commands::CustomCommandProvider::new(vec![ 158 | ( 159 | "graphDot", 160 | Box::new(commands::graph_dot::GraphDotCommand { 161 | graph: langserver.graph.clone(), 162 | }), 163 | ), 164 | ( 165 | "virtualMerge", 166 | Box::new(commands::merged_includes::VirtualMergedDocument { 167 | graph: langserver.graph.clone(), 168 | }), 169 | ), 170 | ( 171 | "parseTree", 172 | Box::new(commands::parse_tree::TreeSitterSExpr { 173 | tree_sitter: langserver.tree_sitter.clone(), 174 | }), 175 | ), 176 | ])); 177 | 178 | LSPEndpoint::run_server_from_input(&mut stdin().lock(), endpoint_output, langserver); 179 | } 180 | 181 | pub struct MinecraftShaderLanguageServer { 182 | endpoint: Endpoint, 183 | graph: Rc>, 184 | root: PathBuf, 185 | command_provider: Option, 186 | opengl_context: Rc, 187 | tree_sitter: Rc>, 188 | log_guard: Option, 189 | } 190 | 191 | #[derive(Clone, Copy, PartialEq, Eq, Hash)] 192 | pub struct IncludePosition { 193 | // the 0-indexed line on which the include lives. 194 | line: usize, 195 | // the 0-indexed char offset defining the start of the include path string. 196 | start: usize, 197 | // the 0-indexed char offset defining the end of the include path string. 198 | end: usize, 199 | } 200 | 201 | impl Debug for IncludePosition { 202 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 203 | write!(f, "{{line: {}}}", self.line) 204 | } 205 | } 206 | 207 | impl Display for IncludePosition { 208 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { 209 | write!(f, "{{line: {}}}", self.line) 210 | } 211 | } 212 | 213 | #[derive(Debug)] 214 | pub enum TreeType { 215 | Fragment, 216 | Vertex, 217 | Geometry, 218 | Compute, 219 | } 220 | 221 | impl MinecraftShaderLanguageServer { 222 | pub fn error_not_available(data: DATA) -> MethodError { 223 | let msg = "Functionality not implemented.".to_string(); 224 | MethodError:: { 225 | code: 1, 226 | message: msg, 227 | data, 228 | } 229 | } 230 | 231 | fn build_initial_graph(&self) { 232 | info!("generating graph for current root"; "root" => self.root.to_str().unwrap()); 233 | 234 | // filter directories and files not ending in any of the 3 extensions 235 | WalkDir::new(&self.root) 236 | .into_iter() 237 | .filter_map(|entry| { 238 | if entry.is_err() { 239 | return None; 240 | } 241 | 242 | let entry = entry.unwrap(); 243 | let path = entry.path(); 244 | if path.is_dir() { 245 | return None; 246 | } 247 | 248 | let ext = match path.extension() { 249 | Some(e) => e, 250 | None => return None, 251 | }; 252 | 253 | // TODO: include user added extensions with a set 254 | if ext != "vsh" && ext != "fsh" && ext != "csh" && ext != "gsh" && ext != "glsl" && ext != "inc" { 255 | return None; 256 | } 257 | 258 | Some(entry.into_path()) 259 | }) 260 | .for_each(|path| { 261 | // iterate all valid found files, search for includes, add a node into the graph for each 262 | // file and add a file->includes KV into the map 263 | self.add_file_and_includes_to_graph(&path); 264 | }); 265 | 266 | info!("finished building project include graph"); 267 | } 268 | 269 | fn add_file_and_includes_to_graph(&self, path: &Path) { 270 | let includes = self.find_includes(path); 271 | 272 | let idx = self.graph.borrow_mut().add_node(path); 273 | 274 | debug!("adding includes for new file"; "file" => path.to_str().unwrap(), "includes" => format!("{:?}", includes)); 275 | for include in includes { 276 | self.add_include(include, idx); 277 | } 278 | } 279 | 280 | fn add_include(&self, include: (PathBuf, IncludePosition), node: NodeIndex) { 281 | let child = self.graph.borrow_mut().add_node(&include.0); 282 | self.graph.borrow_mut().add_edge(node, child, include.1); 283 | } 284 | 285 | pub fn find_includes(&self, file: &Path) -> Vec<(PathBuf, IncludePosition)> { 286 | let mut includes = Vec::default(); 287 | 288 | let buf = BufReader::new(std::fs::File::open(file).unwrap()); 289 | buf.lines() 290 | .enumerate() 291 | .filter_map(|line| match line.1 { 292 | Ok(t) => Some((line.0, t)), 293 | Err(_e) => None, 294 | }) 295 | .filter(|line| RE_INCLUDE.is_match(line.1.as_str())) 296 | .for_each(|line| { 297 | let cap = RE_INCLUDE.captures(line.1.as_str()).unwrap().get(1).unwrap(); 298 | 299 | let start = cap.start(); 300 | let end = cap.end(); 301 | let mut path: String = cap.as_str().into(); 302 | 303 | let full_include = if path.starts_with('/') { 304 | path = path.strip_prefix('/').unwrap().to_string(); 305 | self.root.join("shaders").join(PathBuf::from_slash(&path)) 306 | } else { 307 | file.parent().unwrap().join(PathBuf::from_slash(&path)) 308 | }; 309 | 310 | includes.push((full_include, IncludePosition { line: line.0, start, end })); 311 | }); 312 | 313 | includes 314 | } 315 | 316 | fn update_includes(&self, file: &Path) { 317 | let includes = self.find_includes(file); 318 | 319 | info!("includes found for file"; "file" => file.to_str().unwrap(), "includes" => format!("{:?}", includes)); 320 | 321 | let idx = match self.graph.borrow_mut().find_node(file) { 322 | None => return, 323 | Some(n) => n, 324 | }; 325 | 326 | let prev_children: HashSet<_> = HashSet::from_iter(self.graph.borrow().get_all_child_positions(idx).map(|tup| { 327 | (self.graph.borrow().get_node(tup.0), tup.1) 328 | })); 329 | let new_children: HashSet<_> = includes.iter().cloned().collect(); 330 | 331 | let to_be_added = new_children.difference(&prev_children); 332 | let to_be_removed = prev_children.difference(&new_children); 333 | 334 | debug!( 335 | "include sets diff'd"; 336 | "for removal" => format!("{:?}", to_be_removed), 337 | "for addition" => format!("{:?}", to_be_added) 338 | ); 339 | 340 | for removal in to_be_removed { 341 | let child = self.graph.borrow_mut().find_node(&removal.0).unwrap(); 342 | self.graph.borrow_mut().remove_edge(idx, child, removal.1); 343 | } 344 | 345 | for insertion in to_be_added { 346 | self.add_include(includes.iter().find(|f| f.0 == *insertion.0).unwrap().clone(), idx); 347 | } 348 | } 349 | 350 | pub fn lint(&self, uri: &Path) -> Result>> { 351 | // get all top level ancestors of this file 352 | let file_ancestors = match self.get_file_toplevel_ancestors(uri) { 353 | Ok(opt) => match opt { 354 | Some(ancestors) => ancestors, 355 | None => vec![], 356 | }, 357 | Err(e) => return Err(e), 358 | }; 359 | 360 | info!( 361 | "top-level file ancestors found"; 362 | "uri" => uri.to_str().unwrap(), 363 | "ancestors" => format!("{:?}", file_ancestors 364 | .iter() 365 | .map(|e| PathBuf::from_str( 366 | &self.graph.borrow().graph[*e].clone() 367 | ) 368 | .unwrap()) 369 | .collect::>()) 370 | ); 371 | 372 | // the set of all filepath->content. 373 | let mut all_sources: HashMap = HashMap::new(); 374 | // the set of filepath->list of diagnostics to report 375 | let mut diagnostics: HashMap> = HashMap::new(); 376 | 377 | // we want to backfill the diagnostics map with all linked sources 378 | let back_fill = |all_sources: &HashMap, diagnostics: &mut HashMap>| { 379 | for path in all_sources.keys() { 380 | diagnostics.entry(Url::from_file_path(path).unwrap()).or_default(); 381 | } 382 | }; 383 | 384 | // if we are a top-level file (this has to be one of the set defined by Optifine, right?) 385 | if file_ancestors.is_empty() { 386 | // gather the list of all descendants 387 | let root = self.graph.borrow_mut().find_node(uri).unwrap(); 388 | let tree = match self.get_dfs_for_node(root) { 389 | Ok(tree) => tree, 390 | Err(e) => { 391 | diagnostics.insert(Url::from_file_path(uri).unwrap(), vec![e.into()]); 392 | return Ok(diagnostics); 393 | } 394 | }; 395 | 396 | all_sources.extend(self.load_sources(&tree)?); 397 | 398 | let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len()); 399 | 400 | let view = { 401 | let graph = self.graph.borrow(); 402 | let merged_string = { 403 | merge_views::MergeViewBuilder::new(&tree, &all_sources, &graph, &mut source_mapper).build() 404 | }; 405 | merged_string 406 | }; 407 | 408 | let root_path = self.graph.borrow().get_node(root); 409 | let ext = match root_path.extension() { 410 | Some(ext) => ext.to_str().unwrap(), 411 | None => { 412 | back_fill(&all_sources, &mut diagnostics); 413 | return Ok(diagnostics); 414 | } 415 | }; 416 | 417 | if !is_top_level(root_path.strip_prefix(&self.root).unwrap()) { 418 | warn!("got a non-valid toplevel file"; "root_ancestor" => root_path.to_str().unwrap(), "stripped" => root_path.strip_prefix(&self.root).unwrap().to_str().unwrap()); 419 | back_fill(&all_sources, &mut diagnostics); 420 | return Ok(diagnostics); 421 | } 422 | 423 | let tree_type = if ext == "fsh" { 424 | TreeType::Fragment 425 | } else if ext == "vsh" { 426 | TreeType::Vertex 427 | } else if ext == "gsh" { 428 | TreeType::Geometry 429 | } else if ext == "csh" { 430 | TreeType::Compute 431 | } else { 432 | unreachable!(); 433 | }; 434 | 435 | let stdout = match self.compile_shader_source(&view, tree_type, &root_path) { 436 | Some(s) => s, 437 | None => { 438 | back_fill(&all_sources, &mut diagnostics); 439 | return Ok(diagnostics); 440 | } 441 | }; 442 | 443 | let diagnostics_parser = diagnostics_parser::DiagnosticsParser::new(self.opengl_context.as_ref()); 444 | 445 | diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow())); 446 | } else { 447 | let mut all_trees: Vec<(TreeType, Vec)> = Vec::new(); 448 | 449 | for root in &file_ancestors { 450 | let nodes = match self.get_dfs_for_node(*root) { 451 | Ok(nodes) => nodes, 452 | Err(e) => { 453 | diagnostics.insert(Url::from_file_path(uri).unwrap(), vec![e.into()]); 454 | back_fill(&all_sources, &mut diagnostics); // TODO: confirm 455 | return Ok(diagnostics); 456 | } 457 | }; 458 | 459 | let root_path = self.graph.borrow().get_node(*root).clone(); 460 | let ext = match root_path.extension() { 461 | Some(ext) => ext.to_str().unwrap(), 462 | None => continue, 463 | }; 464 | 465 | if !is_top_level(root_path.strip_prefix(&self.root).unwrap()) { 466 | warn!("got a non-valid toplevel file"; "root_ancestor" => root_path.to_str().unwrap(), "stripped" => root_path.strip_prefix(&self.root).unwrap().to_str().unwrap()); 467 | continue; 468 | } 469 | 470 | let tree_type = if ext == "fsh" { 471 | TreeType::Fragment 472 | } else if ext == "vsh" { 473 | TreeType::Vertex 474 | } else if ext == "gsh" { 475 | TreeType::Geometry 476 | } else if ext == "csh" { 477 | TreeType::Compute 478 | } else { 479 | unreachable!(); 480 | }; 481 | 482 | let sources = self.load_sources(&nodes)?; 483 | all_trees.push((tree_type, nodes)); 484 | all_sources.extend(sources); 485 | } 486 | 487 | for tree in all_trees { 488 | // bit over-zealous in allocation but better than having to resize 489 | let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len()); 490 | let view = { 491 | let graph = self.graph.borrow(); 492 | let merged_string = { 493 | merge_views::MergeViewBuilder::new(&tree.1, &all_sources, &graph, &mut source_mapper).build() 494 | }; 495 | merged_string 496 | }; 497 | 498 | let root_path = self.graph.borrow().get_node(tree.1.first().unwrap().child); 499 | let stdout = match self.compile_shader_source(&view, tree.0, &root_path) { 500 | Some(s) => s, 501 | None => continue, 502 | }; 503 | 504 | let diagnostics_parser = diagnostics_parser::DiagnosticsParser::new(self.opengl_context.as_ref()); 505 | 506 | diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow())); 507 | } 508 | }; 509 | 510 | back_fill(&all_sources, &mut diagnostics); 511 | Ok(diagnostics) 512 | } 513 | 514 | fn compile_shader_source(&self, source: &str, tree_type: TreeType, path: &Path) -> Option { 515 | let result = self.opengl_context.clone().validate(tree_type, source); 516 | match &result { 517 | Some(output) => { 518 | info!("compilation errors reported"; "errors" => format!("`{}`", output.replace('\n', "\\n")), "tree_root" => path.to_str().unwrap()) 519 | } 520 | None => info!("compilation reported no errors"; "tree_root" => path.to_str().unwrap()), 521 | }; 522 | result 523 | } 524 | 525 | pub fn get_dfs_for_node(&self, root: NodeIndex) -> Result, dfs::error::CycleError> { 526 | let graph_ref = self.graph.borrow(); 527 | 528 | let dfs = dfs::Dfs::new(&graph_ref, root); 529 | 530 | dfs.collect::>() 531 | } 532 | 533 | pub fn load_sources(&self, nodes: &[FilialTuple]) -> Result> { 534 | let mut sources = HashMap::new(); 535 | 536 | for node in nodes { 537 | let graph = self.graph.borrow(); 538 | let path = graph.get_node(node.child); 539 | 540 | if sources.contains_key(&path) { 541 | continue; 542 | } 543 | 544 | let source = match fs::read_to_string(&path) { 545 | Ok(s) => s, 546 | Err(e) => return Err(anyhow!("error reading {:?}: {}", path, e)), 547 | }; 548 | let source = source.replace("\r\n", "\n"); 549 | sources.insert(path.clone(), source); 550 | } 551 | 552 | Ok(sources) 553 | } 554 | 555 | fn get_file_toplevel_ancestors(&self, uri: &Path) -> Result>> { 556 | let curr_node = match self.graph.borrow_mut().find_node(uri) { 557 | Some(n) => n, 558 | None => return Err(anyhow!("node not found {:?}", uri)), 559 | }; 560 | let roots = self.graph.borrow().collect_root_ancestors(curr_node); 561 | if roots.is_empty() { 562 | return Ok(None); 563 | } 564 | Ok(Some(roots)) 565 | } 566 | 567 | pub fn publish_diagnostic(&self, diagnostics: HashMap>, document_version: Option) { 568 | // info!("DIAGNOSTICS:\n{:?}", diagnostics); 569 | for (uri, diagnostics) in diagnostics { 570 | self.endpoint 571 | .send_notification( 572 | PublishDiagnostics::METHOD, 573 | PublishDiagnosticsParams { 574 | uri, 575 | diagnostics, 576 | version: document_version, 577 | }, 578 | ) 579 | .expect("failed to publish diagnostics"); 580 | } 581 | } 582 | 583 | fn set_status(&self, status: impl Into, message: impl Into, icon: impl Into) { 584 | self.endpoint 585 | .send_notification( 586 | lsp_ext::Status::METHOD, 587 | lsp_ext::StatusParams { 588 | status: status.into(), 589 | message: Some(message.into()), 590 | icon: Some(icon.into()), 591 | }, 592 | ) 593 | .unwrap_or(()); 594 | } 595 | } 596 | 597 | impl LanguageServerHandling for MinecraftShaderLanguageServer { 598 | fn initialize(&mut self, params: InitializeParams, completable: MethodCompletable) { 599 | logging::slog_with_trace_id(|| { 600 | info!("starting server..."); 601 | 602 | let capabilities = ServerCapabilities { 603 | definition_provider: Some(OneOf::Left(true)), 604 | references_provider: Some(OneOf::Left(true)), 605 | document_symbol_provider: Some(OneOf::Left(true)), 606 | document_link_provider: Some(DocumentLinkOptions { 607 | resolve_provider: None, 608 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, 609 | }), 610 | execute_command_provider: Some(ExecuteCommandOptions { 611 | commands: vec!["graphDot".into()], 612 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, 613 | }), 614 | text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { 615 | open_close: Some(true), 616 | will_save: None, 617 | will_save_wait_until: None, 618 | change: Some(TextDocumentSyncKind::FULL), 619 | save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { include_text: Some(true) })), 620 | })), 621 | ..ServerCapabilities::default() 622 | }; 623 | 624 | let root = match params.root_uri { 625 | Some(uri) => PathBuf::from_url(uri), 626 | None => { 627 | completable.complete(Err(MethodError { 628 | code: 42069, 629 | message: "Must be in workspace".into(), 630 | data: InitializeError { retry: false }, 631 | })); 632 | return; 633 | } 634 | }; 635 | 636 | completable.complete(Ok(InitializeResult { 637 | capabilities, 638 | server_info: None, 639 | })); 640 | 641 | self.set_status("loading", "Building dependency graph...", "$(loading~spin)"); 642 | 643 | self.root = root; 644 | 645 | 646 | self.build_initial_graph(); 647 | 648 | self.set_status("ready", "Project initialized", "$(check)"); 649 | }); 650 | } 651 | 652 | fn shutdown(&mut self, _: (), completable: LSCompletable<()>) { 653 | warn!("shutting down language server..."); 654 | completable.complete(Ok(())); 655 | } 656 | 657 | fn exit(&mut self, _: ()) { 658 | self.endpoint.request_shutdown(); 659 | } 660 | 661 | fn workspace_change_configuration(&mut self, params: DidChangeConfigurationParams) { 662 | logging::slog_with_trace_id(|| { 663 | #[derive(Deserialize)] 664 | struct Configuration { 665 | #[serde(alias = "logLevel")] 666 | log_level: String, 667 | } 668 | 669 | if let Some(settings) = params.settings.as_object().unwrap().get("mcglsl") { 670 | let config: Configuration = from_value(settings.to_owned()).unwrap(); 671 | 672 | info!("got updated configuration"; "config" => params.settings.as_object().unwrap().get("mcglsl").unwrap().to_string()); 673 | 674 | configuration::handle_log_level_change(config.log_level, |level| { 675 | self.log_guard = None; // set to None so Drop is invoked 676 | self.log_guard = Some(logging::set_logger_with_level(level)); 677 | }) 678 | } 679 | }); 680 | } 681 | 682 | fn did_open_text_document(&mut self, params: DidOpenTextDocumentParams) { 683 | logging::slog_with_trace_id(|| { 684 | //info!("opened doc {}", params.text_document.uri); 685 | let path = PathBuf::from_url(params.text_document.uri); 686 | if !path.starts_with(&self.root) { 687 | return; 688 | } 689 | 690 | if self.graph.borrow_mut().find_node(&path) == None { 691 | self.add_file_and_includes_to_graph(&path); 692 | } 693 | match self.lint(&path) { 694 | Ok(diagnostics) => self.publish_diagnostic(diagnostics, None), 695 | Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => path.to_str().unwrap()), 696 | } 697 | }); 698 | } 699 | 700 | fn did_change_text_document(&mut self, _: DidChangeTextDocumentParams) {} 701 | 702 | fn did_close_text_document(&mut self, _: DidCloseTextDocumentParams) {} 703 | 704 | fn did_save_text_document(&mut self, params: DidSaveTextDocumentParams) { 705 | logging::slog_with_trace_id(|| { 706 | let path = PathBuf::from_url(params.text_document.uri); 707 | if !path.starts_with(&self.root) { 708 | return; 709 | } 710 | self.update_includes(&path); 711 | 712 | match self.lint(&path) { 713 | Ok(diagnostics) => self.publish_diagnostic(diagnostics, None), 714 | Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => path.to_str().unwrap()), 715 | } 716 | }); 717 | } 718 | 719 | fn did_change_watched_files(&mut self, _: DidChangeWatchedFilesParams) {} 720 | 721 | fn completion(&mut self, _: TextDocumentPositionParams, completable: LSCompletable) { 722 | completable.complete(Err(Self::error_not_available(()))); 723 | } 724 | 725 | fn resolve_completion_item(&mut self, _: CompletionItem, completable: LSCompletable) { 726 | completable.complete(Err(Self::error_not_available(()))); 727 | } 728 | 729 | fn hover(&mut self, _: TextDocumentPositionParams, _: LSCompletable) { 730 | /* completable.complete(Ok(Hover{ 731 | contents: HoverContents::Markup(MarkupContent{ 732 | kind: MarkupKind::Markdown, 733 | value: String::from("# Hello World"), 734 | }), 735 | range: None, 736 | })); */ 737 | } 738 | 739 | fn execute_command(&mut self, params: ExecuteCommandParams, completable: LSCompletable>) { 740 | logging::slog_with_trace_id(|| { 741 | match self 742 | .command_provider 743 | .as_ref() 744 | .unwrap() 745 | .execute(¶ms.command, ¶ms.arguments, &self.root) 746 | { 747 | Ok(resp) => { 748 | info!("executed command successfully"; "command" => params.command.clone()); 749 | self.endpoint 750 | .send_notification( 751 | ShowMessage::METHOD, 752 | ShowMessageParams { 753 | typ: MessageType::INFO, 754 | message: format!("Command {} executed successfully.", params.command), 755 | }, 756 | ) 757 | .expect("failed to send popup/show message notification"); 758 | completable.complete(Ok(Some(resp))) 759 | } 760 | Err(err) => { 761 | error!("failed to execute command"; "command" => params.command.clone(), "error" => format!("{:?}", err)); 762 | self.endpoint 763 | .send_notification( 764 | ShowMessage::METHOD, 765 | ShowMessageParams { 766 | typ: MessageType::ERROR, 767 | message: format!("Failed to execute `{}`. Reason: {}", params.command, err), 768 | }, 769 | ) 770 | .expect("failed to send popup/show message notification"); 771 | completable.complete(Err(MethodError::new(32420, err.to_string(), ()))) 772 | } 773 | } 774 | }); 775 | } 776 | 777 | fn signature_help(&mut self, _: TextDocumentPositionParams, completable: LSCompletable) { 778 | completable.complete(Err(Self::error_not_available(()))); 779 | } 780 | 781 | fn goto_definition(&mut self, params: TextDocumentPositionParams, completable: LSCompletable>) { 782 | logging::slog_with_trace_id(|| { 783 | let path = PathBuf::from_url(params.text_document.uri); 784 | if !path.starts_with(&self.root) { 785 | return; 786 | } 787 | let parser = &mut self.tree_sitter.borrow_mut(); 788 | let parser_ctx = match navigation::ParserContext::new(parser, &path) { 789 | Ok(ctx) => ctx, 790 | Err(e) => { 791 | return completable.complete(Err(MethodError { 792 | code: 42069, 793 | message: format!("error building parser context: error={}, path={:?}", e, path), 794 | data: (), 795 | })) 796 | } 797 | }; 798 | 799 | match parser_ctx.find_definitions(&path, params.position) { 800 | Ok(locations) => completable.complete(Ok(locations.unwrap_or_default())), 801 | Err(e) => completable.complete(Err(MethodError { 802 | code: 42069, 803 | message: format!("error finding definitions: error={}, path={:?}", e, path), 804 | data: (), 805 | })), 806 | } 807 | }); 808 | } 809 | 810 | fn references(&mut self, params: ReferenceParams, completable: LSCompletable>) { 811 | logging::slog_with_trace_id(|| { 812 | let path = PathBuf::from_url(params.text_document_position.text_document.uri); 813 | if !path.starts_with(&self.root) { 814 | return; 815 | } 816 | let parser = &mut self.tree_sitter.borrow_mut(); 817 | let parser_ctx = match navigation::ParserContext::new(parser, &path) { 818 | Ok(ctx) => ctx, 819 | Err(e) => { 820 | return completable.complete(Err(MethodError { 821 | code: 42069, 822 | message: format!("error building parser context: error={}, path={:?}", e, path), 823 | data: (), 824 | })) 825 | } 826 | }; 827 | 828 | match parser_ctx.find_references(&path, params.text_document_position.position) { 829 | Ok(locations) => completable.complete(Ok(locations.unwrap_or_default())), 830 | Err(e) => completable.complete(Err(MethodError { 831 | code: 42069, 832 | message: format!("error finding definitions: error={}, path={:?}", e, path), 833 | data: (), 834 | })), 835 | } 836 | }); 837 | } 838 | 839 | fn document_highlight(&mut self, _: TextDocumentPositionParams, completable: LSCompletable>) { 840 | completable.complete(Err(Self::error_not_available(()))); 841 | } 842 | 843 | fn document_symbols(&mut self, params: DocumentSymbolParams, completable: LSCompletable) { 844 | logging::slog_with_trace_id(|| { 845 | let path = PathBuf::from_url(params.text_document.uri); 846 | if !path.starts_with(&self.root) { 847 | return; 848 | } 849 | let parser = &mut self.tree_sitter.borrow_mut(); 850 | let parser_ctx = match navigation::ParserContext::new(parser, &path) { 851 | Ok(ctx) => ctx, 852 | Err(e) => { 853 | return completable.complete(Err(MethodError { 854 | code: 42069, 855 | message: format!("error building parser context: error={}, path={:?}", e, path), 856 | data: (), 857 | })) 858 | } 859 | }; 860 | 861 | match parser_ctx.list_symbols(&path) { 862 | Ok(symbols) => completable.complete(Ok(DocumentSymbolResponse::from(symbols.unwrap_or_default()))), 863 | Err(e) => { 864 | return completable.complete(Err(MethodError { 865 | code: 42069, 866 | message: format!("error finding definitions: error={}, path={:?}", e, path), 867 | data: (), 868 | })) 869 | } 870 | } 871 | }); 872 | } 873 | 874 | fn workspace_symbols(&mut self, _: WorkspaceSymbolParams, completable: LSCompletable) { 875 | completable.complete(Err(Self::error_not_available(()))); 876 | } 877 | 878 | fn code_action(&mut self, _: CodeActionParams, completable: LSCompletable>) { 879 | completable.complete(Err(Self::error_not_available(()))); 880 | } 881 | 882 | fn code_lens(&mut self, _: CodeLensParams, completable: LSCompletable>) { 883 | completable.complete(Err(Self::error_not_available(()))); 884 | } 885 | 886 | fn code_lens_resolve(&mut self, _: CodeLens, completable: LSCompletable) { 887 | completable.complete(Err(Self::error_not_available(()))); 888 | } 889 | 890 | fn document_link(&mut self, params: DocumentLinkParams, completable: LSCompletable>) { 891 | logging::slog_with_trace_id(|| { 892 | // node for current document 893 | let curr_doc = PathBuf::from_url(params.text_document.uri); 894 | let node = match self.graph.borrow_mut().find_node(&curr_doc) { 895 | Some(n) => n, 896 | None => { 897 | warn!("document not found in graph"; "path" => curr_doc.to_str().unwrap()); 898 | completable.complete(Ok(vec![])); 899 | return; 900 | } 901 | }; 902 | 903 | let edges: Vec = self 904 | .graph 905 | .borrow() 906 | .child_node_indexes(node) 907 | .filter_map::, _>(|child| { 908 | let graph = self.graph.borrow(); 909 | graph.get_child_positions(node, child).map(|value| { 910 | let path = graph.get_node(child); 911 | let url = match Url::from_file_path(&path) { 912 | Ok(url) => url, 913 | Err(e) => { 914 | error!("error converting into url"; "path" => path.to_str().unwrap(), "error" => format!("{:?}", e)); 915 | return None; 916 | } 917 | }; 918 | 919 | Some(DocumentLink { 920 | range: Range::new( 921 | Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.start).unwrap()), 922 | Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.end).unwrap()), 923 | ), 924 | target: Some(url.clone()), 925 | tooltip: Some(url.path().to_string()), 926 | data: None, 927 | }) 928 | }).collect() 929 | }) 930 | .flatten() 931 | .collect(); 932 | debug!("document link results"; 933 | "links" => format!("{:?}", edges.iter().map(|e| (e.range, e.target.as_ref().unwrap().path())).collect::>()), 934 | "path" => curr_doc.to_str().unwrap(), 935 | ); 936 | completable.complete(Ok(edges)); 937 | }); 938 | } 939 | 940 | fn document_link_resolve(&mut self, _: DocumentLink, completable: LSCompletable) { 941 | completable.complete(Err(Self::error_not_available(()))); 942 | } 943 | 944 | fn formatting(&mut self, _: DocumentFormattingParams, completable: LSCompletable>) { 945 | completable.complete(Err(Self::error_not_available(()))); 946 | } 947 | 948 | fn range_formatting(&mut self, _: DocumentRangeFormattingParams, completable: LSCompletable>) { 949 | completable.complete(Err(Self::error_not_available(()))); 950 | } 951 | 952 | fn on_type_formatting(&mut self, _: DocumentOnTypeFormattingParams, completable: LSCompletable>) { 953 | completable.complete(Err(Self::error_not_available(()))); 954 | } 955 | 956 | fn rename(&mut self, _: RenameParams, completable: LSCompletable) { 957 | completable.complete(Err(Self::error_not_available(()))); 958 | } 959 | } 960 | -------------------------------------------------------------------------------- /server/main/src/merge_views.rs: -------------------------------------------------------------------------------- 1 | use std::cmp::min; 2 | use std::iter::Peekable; 3 | use std::{ 4 | collections::{HashMap, LinkedList, VecDeque}, 5 | path::{Path, PathBuf}, 6 | }; 7 | 8 | use core::slice::Iter; 9 | 10 | use petgraph::stable_graph::NodeIndex; 11 | use slog_scope::debug; 12 | 13 | use crate::graph::CachedStableGraph; 14 | use crate::source_mapper::SourceMapper; 15 | use crate::IncludePosition; 16 | 17 | /// FilialTuple represents a tuple (not really) of a child and any legitimate 18 | /// parent. Parent can be nullable in the case of the child being a top level 19 | /// node in the tree. 20 | #[derive(Hash, PartialEq, Eq, Debug, Clone, Copy)] 21 | pub struct FilialTuple { 22 | pub child: NodeIndex, 23 | pub parent: Option, 24 | } 25 | 26 | /// Merges the source strings according to the nodes comprising a tree of imports into a GLSL source string 27 | /// that can be handed off to the GLSL compiler. 28 | pub struct MergeViewBuilder<'a> { 29 | nodes: &'a [FilialTuple], 30 | nodes_peeker: Peekable>, 31 | 32 | sources: &'a HashMap, 33 | graph: &'a CachedStableGraph, 34 | source_mapper: &'a mut SourceMapper, 35 | 36 | // holds the offset into the child which has been added to the merge list for a parent. 37 | // A child can have multiple parents for a given tree, and be included multiple times 38 | // by the same parent, hence we have to track it for a ((child, parent), line) tuple 39 | // instead of just the child or (child, parent). 40 | last_offset_set: HashMap, 41 | // holds, for any given filial tuple, the iterator yielding all the positions at which the child 42 | // is included into the parent in line-sorted order. This is necessary for files that are imported 43 | // more than once into the same parent, so we can easily get the next include position. 44 | parent_child_edge_iterator: HashMap + 'a)>>, 45 | } 46 | 47 | impl<'a> MergeViewBuilder<'a> { 48 | pub fn new( 49 | nodes: &'a [FilialTuple], sources: &'a HashMap, graph: &'a CachedStableGraph, source_mapper: &'a mut SourceMapper, 50 | ) -> Self { 51 | MergeViewBuilder { 52 | nodes, 53 | nodes_peeker: nodes.iter().peekable(), 54 | sources, 55 | graph, 56 | source_mapper, 57 | last_offset_set: HashMap::new(), 58 | parent_child_edge_iterator: HashMap::new(), 59 | } 60 | } 61 | 62 | pub fn build(&mut self) -> String { 63 | // contains additionally inserted lines such as #line and other directives, preamble defines etc 64 | let mut extra_lines: Vec = Vec::new(); 65 | extra_lines.reserve((self.nodes.len() * 2) + 2); 66 | 67 | // list of source code views onto the below sources 68 | let mut merge_list: LinkedList<&'a str> = LinkedList::new(); 69 | 70 | // invariant: nodes_iter always has _at least_ one element. Can't save a not-file :B 71 | let first = self.nodes_peeker.next().unwrap().child; 72 | let first_path = self.graph.get_node(first); 73 | let first_source = self.sources.get(&first_path).unwrap(); 74 | 75 | // seed source_mapper with top-level file 76 | self.source_mapper.get_num(first); 77 | 78 | let version_line_offset = self.find_version_offset(first_source); 79 | let _version_char_offsets = self.char_offset_for_line(version_line_offset, first_source); 80 | // add_preamble( 81 | // version_line_offset, 82 | // version_char_offsets.1, 83 | // &first_path, 84 | // first, 85 | // first_source, 86 | // &mut merge_list, 87 | // &mut extra_lines, 88 | // source_mapper, 89 | // ); 90 | 91 | // last_offset_set.insert((first, None), version_char_offsets.1); 92 | self.set_last_offset_for_tuple(None, first, 0); 93 | 94 | // stack to keep track of the depth first traversal 95 | let mut stack = VecDeque::::new(); 96 | 97 | self.create_merge_views(&mut merge_list, &mut extra_lines, &mut stack); 98 | 99 | // now we add a view of the remainder of the root file 100 | 101 | let offset = self.get_last_offset_for_tuple(None, first).unwrap(); 102 | 103 | let len = first_source.len(); 104 | merge_list.push_back(&first_source[min(offset, len)..]); 105 | 106 | let total_len = merge_list.iter().fold(0, |a, b| a + b.len()); 107 | 108 | let mut merged = String::with_capacity(total_len); 109 | merged.extend(merge_list); 110 | 111 | merged 112 | } 113 | 114 | fn create_merge_views(&mut self, merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec, stack: &mut VecDeque) { 115 | loop { 116 | let n = match self.nodes_peeker.next() { 117 | Some(n) => n, 118 | None => return, 119 | }; 120 | 121 | // invariant: never None as only the first element in `nodes` should have a None, which is popped off in the calling function 122 | let (parent, child) = (n.parent.unwrap(), n.child); 123 | // gets the next include position for the filial tuple, seeding if this is the first time querying this tuple 124 | let edge = self 125 | .parent_child_edge_iterator 126 | .entry(*n) 127 | .or_insert_with(|| { 128 | let child_positions = self.graph.get_child_positions(parent, child); 129 | Box::new(child_positions) 130 | }) 131 | .next() 132 | .unwrap(); 133 | let parent_path = self.graph.get_node(parent).clone(); 134 | let child_path = self.graph.get_node(child).clone(); 135 | 136 | let parent_source = self.sources.get(&parent_path).unwrap(); 137 | let (char_for_line, char_following_line) = self.char_offset_for_line(edge.line, parent_source); 138 | 139 | let offset = *self 140 | .set_last_offset_for_tuple(stack.back().copied(), parent, char_following_line) 141 | .get_or_insert(0); 142 | 143 | debug!("creating view to start child file"; 144 | "parent" => parent_path.to_str().unwrap(), "child" => child_path.to_str().unwrap(), 145 | "grandparent" => stack.back().copied().map(|g| self.graph.get_node(g).to_str().unwrap().to_string()), // self.graph.get_node().to_str().unwrap(), 146 | "last_parent_offset" => offset, "line" => edge.line, "char_for_line" => char_for_line, 147 | "char_following_line" => char_following_line, 148 | ); 149 | 150 | merge_list.push_back(&parent_source[offset..char_for_line]); 151 | self.add_opening_line_directive(&child_path, child, merge_list, extra_lines); 152 | 153 | match self.nodes_peeker.peek() { 154 | Some(next) => { 155 | let next = *next; 156 | // if the next pair's parent is not a child of the current pair, we dump the rest of this childs source 157 | if next.parent.unwrap() != child { 158 | let child_source = self.sources.get(&child_path).unwrap(); 159 | // if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad 160 | let offset = { 161 | match child_source.ends_with('\n') { 162 | true => child_source.len() - 1, 163 | false => child_source.len(), 164 | } 165 | }; 166 | merge_list.push_back(&child_source[..offset]); 167 | self.set_last_offset_for_tuple(Some(parent), child, 0); 168 | // +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line 169 | self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines); 170 | // if the next pair's parent is not the current pair's parent, we need to bubble up 171 | if stack.contains(&next.parent.unwrap()) { 172 | return; 173 | } 174 | continue; 175 | } 176 | 177 | stack.push_back(parent); 178 | self.create_merge_views(merge_list, extra_lines, stack); 179 | stack.pop_back(); 180 | 181 | let offset = self.get_last_offset_for_tuple(Some(parent), child).unwrap(); 182 | let child_source = self.sources.get(&child_path).unwrap(); 183 | // this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1 184 | let end_offset = match child_source.ends_with('\n') { 185 | true => 1, 186 | false => 0, 187 | }; 188 | if offset < child_source.len() - end_offset { 189 | // if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad 190 | merge_list.push_back(&child_source[offset..child_source.len() - end_offset]); 191 | self.set_last_offset_for_tuple(Some(parent), child, 0); 192 | } 193 | 194 | // +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line 195 | self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines); 196 | 197 | // we need to check the next item at the point of original return further down the callstack 198 | if self.nodes_peeker.peek().is_some() && stack.contains(&self.nodes_peeker.peek().unwrap().parent.unwrap()) { 199 | return; 200 | } 201 | } 202 | None => { 203 | let child_source = self.sources.get(&child_path).unwrap(); 204 | // if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad 205 | let offset = match child_source.ends_with('\n') { 206 | true => child_source.len() - 1, 207 | false => child_source.len(), 208 | }; 209 | merge_list.push_back(&child_source[..offset]); 210 | self.set_last_offset_for_tuple(Some(parent), child, 0); 211 | // +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line 212 | self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines); 213 | } 214 | } 215 | } 216 | } 217 | 218 | fn set_last_offset_for_tuple(&mut self, parent: Option, child: NodeIndex, offset: usize) -> Option { 219 | debug!("inserting last offset"; 220 | "parent" => parent.map(|p| self.graph.get_node(p).to_str().unwrap().to_string()), 221 | "child" => self.graph.get_node(child).to_str().unwrap().to_string(), 222 | "offset" => offset); 223 | self.last_offset_set.insert(FilialTuple { child, parent }, offset) 224 | } 225 | 226 | fn get_last_offset_for_tuple(&self, parent: Option, child: NodeIndex) -> Option { 227 | self.last_offset_set.get(&FilialTuple { child, parent }).copied() 228 | } 229 | 230 | // returns the character offset + 1 of the end of line number `line` and the character 231 | // offset + 1 for the end of the line after the previous one 232 | fn char_offset_for_line(&self, line_num: usize, source: &str) -> (usize, usize) { 233 | let mut char_for_line: usize = 0; 234 | let mut char_following_line: usize = 0; 235 | for (n, line) in source.lines().enumerate() { 236 | if n == line_num { 237 | char_following_line += line.len() + 1; 238 | break; 239 | } 240 | char_for_line += line.len() + 1; 241 | char_following_line = char_for_line; 242 | } 243 | (char_for_line, char_following_line) 244 | } 245 | 246 | fn find_version_offset(&self, source: &str) -> usize { 247 | source 248 | .lines() 249 | .enumerate() 250 | .find(|(_, line)| line.starts_with("#version ")) 251 | .map_or(0, |(i, _)| i) 252 | } 253 | 254 | // fn add_preamble<'a>( 255 | // version_line_offset: usize, version_char_offset: usize, path: &Path, node: NodeIndex, source: &'a str, 256 | // merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec, source_mapper: &mut SourceMapper, 257 | // ) { 258 | // // TODO: Optifine #define preabmle 259 | // merge_list.push_back(&source[..version_char_offset]); 260 | // let google_line_directive = format!( 261 | // "#extension GL_GOOGLE_cpp_style_line_directive : enable\n#line {} {} // {}\n", 262 | // // +2 because 0 indexed but #line is 1 indexed and references the *following* line 263 | // version_line_offset + 2, 264 | // source_mapper.get_num(node), 265 | // path.to_str().unwrap().replace('\\', "\\\\"), 266 | // ); 267 | // extra_lines.push(google_line_directive); 268 | // unsafe_get_and_insert(merge_list, extra_lines); 269 | // } 270 | 271 | fn add_opening_line_directive( 272 | &mut self, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec, 273 | ) { 274 | let line_directive = format!( 275 | "#line 1 {} // {}\n", 276 | self.source_mapper.get_num(node), 277 | path.to_str().unwrap().replace('\\', "\\\\") 278 | ); 279 | extra_lines.push(line_directive); 280 | self.unsafe_get_and_insert(merge_list, extra_lines); 281 | } 282 | 283 | fn add_closing_line_directive( 284 | &mut self, line: usize, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec, 285 | ) { 286 | // Optifine doesn't seem to add a leading newline if the previous line was a #line directive 287 | let line_directive = if let Some(l) = merge_list.back() { 288 | if l.trim().starts_with("#line") { 289 | format!( 290 | "#line {} {} // {}\n", 291 | line, 292 | self.source_mapper.get_num(node), 293 | path.to_str().unwrap().replace('\\', "\\\\") 294 | ) 295 | } else { 296 | format!( 297 | "\n#line {} {} // {}\n", 298 | line, 299 | self.source_mapper.get_num(node), 300 | path.to_str().unwrap().replace('\\', "\\\\") 301 | ) 302 | } 303 | } else { 304 | format!( 305 | "\n#line {} {} // {}\n", 306 | line, 307 | self.source_mapper.get_num(node), 308 | path.to_str().unwrap().replace('\\', "\\\\") 309 | ) 310 | }; 311 | 312 | extra_lines.push(line_directive); 313 | self.unsafe_get_and_insert(merge_list, extra_lines); 314 | } 315 | 316 | fn unsafe_get_and_insert(&self, merge_list: &mut LinkedList<&str>, extra_lines: &[String]) { 317 | // :^) 318 | unsafe { 319 | let vec_ptr_offset = extra_lines.as_ptr().add(extra_lines.len() - 1); 320 | merge_list.push_back(&vec_ptr_offset.as_ref().unwrap()[..]); 321 | } 322 | } 323 | } 324 | 325 | #[cfg(test)] 326 | mod merge_view_test { 327 | use std::fs; 328 | use std::path::PathBuf; 329 | 330 | use crate::merge_views::MergeViewBuilder; 331 | use crate::source_mapper::SourceMapper; 332 | use crate::test::{copy_to_and_set_root, new_temp_server}; 333 | use crate::IncludePosition; 334 | 335 | #[test] 336 | #[logging_macro::log_scope] 337 | fn test_generate_merge_list_01() { 338 | let mut server = new_temp_server(None); 339 | 340 | let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/01", &mut server); 341 | server.endpoint.request_shutdown(); 342 | 343 | let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh")); 344 | let common_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("common.glsl")); 345 | 346 | server 347 | .graph 348 | .borrow_mut() 349 | .add_edge(final_idx, common_idx, IncludePosition { line: 2, start: 0, end: 0 }); 350 | 351 | let nodes = server.get_dfs_for_node(final_idx).unwrap(); 352 | let sources = server.load_sources(&nodes).unwrap(); 353 | 354 | let graph_borrow = server.graph.borrow(); 355 | let mut source_mapper = SourceMapper::new(0); 356 | let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build(); 357 | 358 | let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); 359 | 360 | let mut truth = fs::read_to_string(merge_file).unwrap(); 361 | // truth = truth.replacen( 362 | // "!!", 363 | // &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"), 364 | // 1, 365 | // ); 366 | truth = truth.replacen( 367 | "!!", 368 | &tmp_path.join("shaders").join("common.glsl").to_str().unwrap().replace('\\', "\\\\"), 369 | 1, 370 | ); 371 | truth = truth.replace( 372 | "!!", 373 | &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"), 374 | ); 375 | 376 | assert_eq!(result, truth); 377 | } 378 | 379 | #[test] 380 | #[logging_macro::log_scope] 381 | fn test_generate_merge_list_02() { 382 | let mut server = new_temp_server(None); 383 | 384 | let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/02", &mut server); 385 | server.endpoint.request_shutdown(); 386 | 387 | let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh")); 388 | let test_idx = server 389 | .graph 390 | .borrow_mut() 391 | .add_node(&tmp_path.join("shaders").join("utils").join("test.glsl")); 392 | let burger_idx = server 393 | .graph 394 | .borrow_mut() 395 | .add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl")); 396 | let sample_idx = server 397 | .graph 398 | .borrow_mut() 399 | .add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl")); 400 | 401 | server 402 | .graph 403 | .borrow_mut() 404 | .add_edge(final_idx, sample_idx, IncludePosition { line: 2, start: 0, end: 0 }); 405 | server 406 | .graph 407 | .borrow_mut() 408 | .add_edge(sample_idx, burger_idx, IncludePosition { line: 4, start: 0, end: 0 }); 409 | server 410 | .graph 411 | .borrow_mut() 412 | .add_edge(sample_idx, test_idx, IncludePosition { line: 6, start: 0, end: 0 }); 413 | 414 | let nodes = server.get_dfs_for_node(final_idx).unwrap(); 415 | let sources = server.load_sources(&nodes).unwrap(); 416 | 417 | let graph_borrow = server.graph.borrow(); 418 | let mut source_mapper = SourceMapper::new(0); 419 | let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build(); 420 | 421 | let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); 422 | 423 | let mut truth = fs::read_to_string(merge_file).unwrap(); 424 | 425 | // truth = truth.replacen( 426 | // "!!", 427 | // &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"), 428 | // 1, 429 | // ); 430 | 431 | for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] { 432 | let path = tmp_path.clone(); 433 | truth = truth.replacen( 434 | "!!", 435 | &path 436 | .join("shaders") 437 | .join("utils") 438 | .join(file) 439 | .to_str() 440 | .unwrap() 441 | .replace('\\', "\\\\"), 442 | 1, 443 | ); 444 | } 445 | truth = truth.replacen( 446 | "!!", 447 | &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"), 448 | 1, 449 | ); 450 | 451 | assert_eq!(result, truth); 452 | } 453 | 454 | #[test] 455 | #[logging_macro::log_scope] 456 | fn test_generate_merge_list_03() { 457 | let mut server = new_temp_server(None); 458 | 459 | let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/03", &mut server); 460 | server.endpoint.request_shutdown(); 461 | 462 | let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh")); 463 | let test_idx = server 464 | .graph 465 | .borrow_mut() 466 | .add_node(&tmp_path.join("shaders").join("utils").join("test.glsl")); 467 | let burger_idx = server 468 | .graph 469 | .borrow_mut() 470 | .add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl")); 471 | let sample_idx = server 472 | .graph 473 | .borrow_mut() 474 | .add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl")); 475 | 476 | server 477 | .graph 478 | .borrow_mut() 479 | .add_edge(final_idx, sample_idx, IncludePosition { line: 2, start: 0, end: 0 }); 480 | server 481 | .graph 482 | .borrow_mut() 483 | .add_edge(sample_idx, burger_idx, IncludePosition { line: 4, start: 0, end: 0 }); 484 | server 485 | .graph 486 | .borrow_mut() 487 | .add_edge(sample_idx, test_idx, IncludePosition { line: 6, start: 0, end: 0 }); 488 | 489 | let nodes = server.get_dfs_for_node(final_idx).unwrap(); 490 | let sources = server.load_sources(&nodes).unwrap(); 491 | 492 | let graph_borrow = server.graph.borrow(); 493 | let mut source_mapper = SourceMapper::new(0); 494 | let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build(); 495 | 496 | let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); 497 | 498 | let mut truth = fs::read_to_string(merge_file).unwrap(); 499 | 500 | // truth = truth.replacen( 501 | // "!!", 502 | // &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"), 503 | // 1, 504 | // ); 505 | 506 | for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] { 507 | let path = tmp_path.clone(); 508 | truth = truth.replacen( 509 | "!!", 510 | &path 511 | .join("shaders") 512 | .join("utils") 513 | .join(file) 514 | .to_str() 515 | .unwrap() 516 | .replace('\\', "\\\\"), 517 | 1, 518 | ); 519 | } 520 | truth = truth.replacen( 521 | "!!", 522 | &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"), 523 | 1, 524 | ); 525 | 526 | assert_eq!(result, truth); 527 | } 528 | 529 | #[test] 530 | #[logging_macro::log_scope] 531 | fn test_generate_merge_list_04() { 532 | let mut server = new_temp_server(None); 533 | 534 | let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/04", &mut server); 535 | server.endpoint.request_shutdown(); 536 | 537 | let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh")); 538 | let utilities_idx = server 539 | .graph 540 | .borrow_mut() 541 | .add_node(&tmp_path.join("shaders").join("utils").join("utilities.glsl")); 542 | let stuff1_idx = server 543 | .graph 544 | .borrow_mut() 545 | .add_node(&tmp_path.join("shaders").join("utils").join("stuff1.glsl")); 546 | let stuff2_idx = server 547 | .graph 548 | .borrow_mut() 549 | .add_node(&tmp_path.join("shaders").join("utils").join("stuff2.glsl")); 550 | let matrices_idx = server 551 | .graph 552 | .borrow_mut() 553 | .add_node(&tmp_path.join("shaders").join("lib").join("matrices.glsl")); 554 | 555 | server 556 | .graph 557 | .borrow_mut() 558 | .add_edge(final_idx, utilities_idx, IncludePosition { line: 2, start: 0, end: 0 }); 559 | server 560 | .graph 561 | .borrow_mut() 562 | .add_edge(utilities_idx, stuff1_idx, IncludePosition { line: 0, start: 0, end: 0 }); 563 | server 564 | .graph 565 | .borrow_mut() 566 | .add_edge(utilities_idx, stuff2_idx, IncludePosition { line: 1, start: 0, end: 0 }); 567 | server 568 | .graph 569 | .borrow_mut() 570 | .add_edge(final_idx, matrices_idx, IncludePosition { line: 3, start: 0, end: 0 }); 571 | 572 | let nodes = server.get_dfs_for_node(final_idx).unwrap(); 573 | let sources = server.load_sources(&nodes).unwrap(); 574 | 575 | let graph_borrow = server.graph.borrow(); 576 | let mut source_mapper = SourceMapper::new(0); 577 | let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build(); 578 | 579 | let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); 580 | 581 | let mut truth = fs::read_to_string(merge_file).unwrap(); 582 | 583 | for file in &[ 584 | // PathBuf::new().join("final.fsh").to_str().unwrap(), 585 | PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(), 586 | PathBuf::new().join("utils").join("stuff1.glsl").to_str().unwrap(), 587 | PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(), 588 | PathBuf::new().join("utils").join("stuff2.glsl").to_str().unwrap(), 589 | PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(), 590 | PathBuf::new().join("final.fsh").to_str().unwrap(), 591 | PathBuf::new().join("lib").join("matrices.glsl").to_str().unwrap(), 592 | PathBuf::new().join("final.fsh").to_str().unwrap(), 593 | ] { 594 | let path = tmp_path.clone(); 595 | truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace('\\', "\\\\"), 1); 596 | } 597 | 598 | assert_eq!(result, truth); 599 | } 600 | 601 | #[test] 602 | #[logging_macro::log_scope] 603 | fn test_generate_merge_list_06() { 604 | let mut server = new_temp_server(None); 605 | 606 | let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/06", &mut server); 607 | server.endpoint.request_shutdown(); 608 | 609 | let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh")); 610 | let test_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("test.glsl")); 611 | 612 | server 613 | .graph 614 | .borrow_mut() 615 | .add_edge(final_idx, test_idx, IncludePosition { line: 3, start: 0, end: 0 }); 616 | server 617 | .graph 618 | .borrow_mut() 619 | .add_edge(final_idx, test_idx, IncludePosition { line: 5, start: 0, end: 0 }); 620 | 621 | let nodes = server.get_dfs_for_node(final_idx).unwrap(); 622 | let sources = server.load_sources(&nodes).unwrap(); 623 | 624 | let graph_borrow = server.graph.borrow(); 625 | let mut source_mapper = SourceMapper::new(0); 626 | let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build(); 627 | 628 | let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); 629 | 630 | let mut truth = fs::read_to_string(merge_file).unwrap(); 631 | 632 | for file in &[ 633 | // PathBuf::new().join("final.fsh").to_str().unwrap(), 634 | PathBuf::new().join("test.glsl").to_str().unwrap(), 635 | PathBuf::new().join("final.fsh").to_str().unwrap(), 636 | PathBuf::new().join("test.glsl").to_str().unwrap(), 637 | PathBuf::new().join("final.fsh").to_str().unwrap(), 638 | ] { 639 | let path = tmp_path.clone(); 640 | truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace('\\', "\\\\"), 1); 641 | } 642 | 643 | assert_eq!(result, truth); 644 | } 645 | } 646 | -------------------------------------------------------------------------------- /server/main/src/navigation.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, fs::read_to_string, path::Path, vec}; 2 | 3 | use anyhow::Result; 4 | use rust_lsp::lsp_types::{DocumentSymbol, Location, Position, Range, SymbolKind}; 5 | use slog_scope::{debug, info, trace}; 6 | use tree_sitter::{Node, Parser, Point, Query, QueryCursor, Tree}; 7 | use url::Url; 8 | 9 | use crate::linemap::LineMap; 10 | 11 | #[derive(Clone, Debug, Hash, PartialEq, Eq, Default)] 12 | struct SymbolName(String); 13 | 14 | impl SymbolName { 15 | // construct a new SymbolName from a node and its node ID for overload disambiguating. 16 | fn new(node: &Node, source: &str, node_id: usize) -> Self { 17 | let mut fqname = vec![format!("{}[{}]", node.utf8_text(source.as_bytes()).unwrap(), node_id)]; 18 | 19 | // first node will always have a parent 20 | let mut prev = *node; 21 | let mut node = node.parent().unwrap(); 22 | 23 | loop { 24 | match (node.kind(), prev.kind()) { 25 | ("function_definition", "compound_statement") => { 26 | let func_ident = node.child_by_field_name("declarator").unwrap().child(0).unwrap(); 27 | fqname.push(format!("{}[{}]", func_ident.utf8_text(source.as_bytes()).unwrap(), func_ident.id())); 28 | } 29 | ("struct_specifier", "field_declaration_list") => { 30 | let struct_ident = node.child_by_field_name("name").unwrap(); 31 | fqname.push(format!( 32 | "{}[{}]", 33 | struct_ident.utf8_text(source.as_bytes()).unwrap(), 34 | struct_ident.id() 35 | )); 36 | } 37 | _ => (), 38 | } 39 | 40 | prev = node; 41 | node = match node.parent() { 42 | Some(n) => n, 43 | None => break, 44 | }; 45 | } 46 | 47 | fqname.reverse(); 48 | SymbolName(fqname.join("/")) 49 | } 50 | 51 | fn parent(&self) -> Option { 52 | self.0.rsplit_once('/').map(|(left, _)| SymbolName(left.to_string())) 53 | } 54 | } 55 | 56 | impl slog::Value for SymbolName { 57 | fn serialize(&self, record: &slog::Record, key: slog::Key, serializer: &mut dyn slog::Serializer) -> slog::Result { 58 | self.0.serialize(record, key, serializer) 59 | } 60 | } 61 | 62 | macro_rules! find_function_def_str { 63 | () => { 64 | r#" 65 | ( 66 | (function_declarator 67 | (identifier) @function) 68 | (#match? @function "^{}$") 69 | ) 70 | "# 71 | }; 72 | } 73 | 74 | macro_rules! find_function_refs_str { 75 | () => { 76 | r#" 77 | ( 78 | (call_expression 79 | (identifier) @call) 80 | (#match? @call "^{}$") 81 | ) 82 | "# 83 | }; 84 | } 85 | 86 | macro_rules! find_variable_def_str { 87 | () => { 88 | r#" 89 | [ 90 | (init_declarator 91 | (identifier) @variable) 92 | 93 | (parameter_declaration 94 | (identifier) @variable) 95 | 96 | (declaration 97 | (identifier) @variable) 98 | 99 | (#match? @variable "^{}$") 100 | ] 101 | "# 102 | }; 103 | } 104 | 105 | const LIST_SYMBOLS_STR: &str = r#" 106 | ; global consts 107 | (declaration 108 | (type_qualifier) @const_qualifier 109 | (init_declarator 110 | (identifier) @const_ident)) 111 | (#match? @const_qualifier "^const") 112 | 113 | ; global uniforms, varyings, struct variables etc 114 | (translation_unit 115 | (declaration 116 | (identifier) @ident)) 117 | 118 | ; #defines 119 | (preproc_def 120 | (identifier) @define_ident) 121 | 122 | ; function definitions 123 | (function_declarator 124 | (identifier) @func_ident) 125 | 126 | ; struct definitions 127 | (struct_specifier 128 | (type_identifier) @struct_ident) 129 | 130 | ; struct fields 131 | (struct_specifier 132 | (field_declaration_list 133 | (field_declaration 134 | [ 135 | (field_identifier) @field_ident 136 | (array_declarator 137 | (field_identifier) @field_ident) 138 | ])) @field_list) 139 | "#; 140 | 141 | pub struct ParserContext<'a> { 142 | source: String, 143 | tree: Tree, 144 | linemap: LineMap, 145 | parser: &'a mut Parser, 146 | } 147 | 148 | impl<'a> ParserContext<'a> { 149 | pub fn new(parser: &'a mut Parser, path: &Path) -> Result { 150 | let source = read_to_string(path)?; 151 | 152 | let tree = parser.parse(&source, None).unwrap(); 153 | 154 | let linemap = LineMap::new(&source); 155 | 156 | Ok(ParserContext { 157 | source, 158 | tree, 159 | linemap, 160 | parser, 161 | }) 162 | } 163 | 164 | pub fn list_symbols(&self, _path: &Path) -> Result>> { 165 | let query = Query::new(tree_sitter_glsl::language(), LIST_SYMBOLS_STR)?; 166 | let mut query_cursor = QueryCursor::new(); 167 | 168 | let mut parent_child_vec: Vec<(Option, DocumentSymbol)> = vec![]; 169 | let mut fqname_to_index: HashMap = HashMap::new(); 170 | 171 | for (m, _) in query_cursor.captures(&query, self.root_node(), self.source.as_bytes()) { 172 | if m.captures.is_empty() { 173 | continue; 174 | } 175 | 176 | let mut capture_iter = m.captures.iter(); 177 | 178 | let capture = capture_iter.next().unwrap(); 179 | let capture_name = query.capture_names()[capture.index as usize].as_str(); 180 | 181 | trace!("next capture name"; "name" => capture_name, "capture" => format!("{:?}", capture)); 182 | 183 | let (kind, node) = match capture_name { 184 | "const_qualifier" => (SymbolKind::CONSTANT, capture_iter.next().unwrap().node), 185 | "ident" => (SymbolKind::VARIABLE, capture.node), 186 | "func_ident" => (SymbolKind::FUNCTION, capture.node), 187 | "define_ident" => (SymbolKind::STRING, capture.node), 188 | "struct_ident" => (SymbolKind::STRUCT, capture.node), 189 | "field_list" => (SymbolKind::FIELD, capture_iter.next().unwrap().node), 190 | _ => (SymbolKind::NULL, capture.node), 191 | }; 192 | 193 | let range = Range { 194 | start: Position { 195 | line: node.start_position().row as u32, 196 | character: node.start_position().column as u32, 197 | }, 198 | end: Position { 199 | line: node.end_position().row as u32, 200 | character: node.end_position().column as u32, 201 | }, 202 | }; 203 | 204 | let name = node.utf8_text(self.source.as_bytes()).unwrap().to_string(); 205 | 206 | let fqname = SymbolName::new(&node, self.source.as_str(), node.id()); 207 | 208 | debug!("found symbol"; "node_name" => &name, "kind" => format!("{:?}", kind), "fqname" => &fqname); 209 | 210 | let child_symbol = DocumentSymbol { 211 | name, 212 | detail: None, 213 | kind, 214 | tags: None, 215 | deprecated: None, 216 | range, 217 | selection_range: range, 218 | children: None, 219 | }; 220 | parent_child_vec.push((fqname.parent(), child_symbol)); 221 | trace!("inserting fqname"; "fqname" => &fqname, "index" => parent_child_vec.len() - 1); 222 | fqname_to_index.insert(fqname, parent_child_vec.len() - 1); 223 | } 224 | 225 | // let mut symbols = vec![]; 226 | for i in 1..parent_child_vec.len() { 227 | let (left, right) = parent_child_vec.split_at_mut(i); 228 | let parent = &right[0].0; 229 | let child = &right[0].1; 230 | if let Some(parent) = parent { 231 | trace!("finding parent"; "parent_symbol_name" => &parent, "child" => format!("{:?}", child), "split_point" => i, "left_len" => left.len(), "right_len" => right.len()); 232 | let parent_index = fqname_to_index.get(parent).unwrap(); 233 | let parent_sym = &mut left[*parent_index]; 234 | parent_sym.1.children.get_or_insert_default().push(right[0].1.clone()) 235 | } 236 | } 237 | 238 | let symbols = parent_child_vec 239 | .iter() 240 | .filter(|tuple| tuple.0.is_none()) 241 | .map(|tuple| tuple.1.clone()) 242 | .collect(); 243 | 244 | Ok(Some(symbols)) 245 | } 246 | 247 | pub fn find_definitions(&self, path: &Path, point: Position) -> Result>> { 248 | let current_node = match self.find_node_at_point(point) { 249 | Some(node) => node, 250 | None => return Ok(None), 251 | }; 252 | 253 | let parent = match current_node.parent() { 254 | Some(parent) => parent, 255 | None => return Ok(None), 256 | }; 257 | 258 | debug!("matching location lookup method for parent-child tuple"; "parent" => parent.kind(), "child" => current_node.kind()); 259 | 260 | let locations = match (current_node.kind(), parent.kind()) { 261 | (_, "call_expression") => { 262 | let query_str = format!(find_function_def_str!(), current_node.utf8_text(self.source.as_bytes())?); 263 | self.simple_global_search(path, &query_str)? 264 | } 265 | ("identifier", "argument_list") 266 | | ("identifier", "field_expression") 267 | | ("identifier", "binary_expression") 268 | | ("identifier", "assignment_expression") => self.tree_climbing_search(path, current_node)?, 269 | _ => return Ok(None), 270 | }; 271 | 272 | info!("finished searching for definitions"; "count" => locations.len(), "definitions" => format!("{:?}", locations)); 273 | 274 | Ok(Some(locations)) 275 | } 276 | 277 | pub fn find_references(&self, path: &Path, point: Position) -> Result>> { 278 | let current_node = match self.find_node_at_point(point) { 279 | Some(node) => node, 280 | None => return Ok(None), 281 | }; 282 | 283 | let parent = match current_node.parent() { 284 | Some(parent) => parent, 285 | None => return Ok(None), 286 | }; 287 | 288 | let locations = match (current_node.kind(), parent.kind()) { 289 | (_, "function_declarator") => { 290 | let query_str = format!(find_function_refs_str!(), current_node.utf8_text(self.source.as_bytes())?); 291 | self.simple_global_search(path, &query_str)? 292 | } 293 | _ => return Ok(None), 294 | }; 295 | 296 | info!("finished searching for references"; "count" => locations.len(), "references" => format!("{:?}", locations)); 297 | 298 | Ok(Some(locations)) 299 | } 300 | 301 | fn tree_climbing_search(&self, path: &Path, start_node: Node) -> Result> { 302 | let mut locations = vec![]; 303 | 304 | let node_text = start_node.utf8_text(self.source.as_bytes())?; 305 | 306 | let query_str = format!(find_variable_def_str!(), node_text); 307 | 308 | debug!("built query string"; "query" => &query_str); 309 | 310 | let mut parent = start_node.parent(); 311 | 312 | loop { 313 | if parent.is_none() { 314 | trace!("no more parent left, found nothing"); 315 | break; 316 | } 317 | 318 | let query = Query::new(tree_sitter_glsl::language(), &query_str)?; 319 | let mut query_cursor = QueryCursor::new(); 320 | 321 | trace!("running tree-sitter query for node"; "node" => format!("{:?}", parent.unwrap()), "node_text" => parent.unwrap().utf8_text(self.source.as_bytes()).unwrap()); 322 | 323 | for m in query_cursor.matches(&query, parent.unwrap(), self.source.as_bytes()) { 324 | for capture in m.captures { 325 | let start = capture.node.start_position(); 326 | let end = capture.node.end_position(); 327 | 328 | locations.push(Location { 329 | uri: Url::from_file_path(path).unwrap(), 330 | range: Range { 331 | start: Position { 332 | line: start.row as u32, 333 | character: start.column as u32, 334 | }, 335 | end: Position { 336 | line: end.row as u32, 337 | character: end.column as u32, 338 | }, 339 | }, 340 | }); 341 | } 342 | } 343 | 344 | if !locations.is_empty() { 345 | break; 346 | } 347 | 348 | parent = parent.unwrap().parent(); 349 | } 350 | 351 | Ok(locations) 352 | } 353 | 354 | fn simple_global_search(&self, path: &Path, query_str: &str) -> Result> { 355 | let query = Query::new(tree_sitter_glsl::language(), query_str)?; 356 | let mut query_cursor = QueryCursor::new(); 357 | 358 | let mut locations = vec![]; 359 | 360 | for m in query_cursor.matches(&query, self.root_node(), self.source.as_bytes()) { 361 | for capture in m.captures { 362 | let start = capture.node.start_position(); 363 | let end = capture.node.end_position(); 364 | 365 | locations.push(Location { 366 | uri: Url::from_file_path(path).unwrap(), 367 | range: Range { 368 | start: Position { 369 | line: start.row as u32, 370 | character: start.column as u32, 371 | }, 372 | end: Position { 373 | line: end.row as u32, 374 | character: end.column as u32, 375 | }, 376 | }, 377 | }); 378 | } 379 | } 380 | 381 | Ok(locations) 382 | } 383 | 384 | fn root_node(&self) -> Node { 385 | self.tree.root_node() 386 | } 387 | 388 | fn find_node_at_point(&self, pos: Position) -> Option { 389 | // if we're at the end of an ident, we need to look _back_ one char instead 390 | // for tree-sitter to find the right node. 391 | let look_behind = { 392 | let offset = self.linemap.offset_for_position(pos); 393 | let char_at = self.source.as_bytes()[offset]; 394 | trace!("looking for non-alpha for point adjustment"; 395 | "offset" => offset, 396 | "char" => char_at as char, 397 | "point" => format!("{:?}", pos), 398 | "look_behind" => !char_at.is_ascii_alphabetic()); 399 | !char_at.is_ascii_alphabetic() 400 | }; 401 | 402 | let mut start = Point { 403 | row: pos.line as usize, 404 | column: pos.character as usize, 405 | }; 406 | let mut end = Point { 407 | row: pos.line as usize, 408 | column: pos.character as usize, 409 | }; 410 | 411 | if look_behind { 412 | start.column -= 1; 413 | } else { 414 | end.column += 1; 415 | } 416 | 417 | match self.root_node().named_descendant_for_point_range(start, end) { 418 | Some(node) => { 419 | debug!("found a node"; 420 | "node" => format!("{:?}", node), 421 | "text" => node.utf8_text(self.source.as_bytes()).unwrap(), 422 | "start" => format!("{}", start), 423 | "end" => format!("{}", end)); 424 | Some(node) 425 | } 426 | None => None, 427 | } 428 | } 429 | } 430 | -------------------------------------------------------------------------------- /server/main/src/opengl.rs: -------------------------------------------------------------------------------- 1 | use std::ffi::{CStr, CString}; 2 | use std::ptr; 3 | 4 | use slog_scope::info; 5 | 6 | #[cfg(test)] 7 | use mockall::automock; 8 | 9 | #[cfg_attr(test, automock)] 10 | pub trait ShaderValidator { 11 | fn validate(&self, tree_type: super::TreeType, source: &str) -> Option; 12 | fn vendor(&self) -> String; 13 | } 14 | 15 | pub struct OpenGlContext { 16 | _ctx: glutin::Context, 17 | } 18 | 19 | impl OpenGlContext { 20 | pub fn new() -> OpenGlContext { 21 | let events_loop = glutin::event_loop::EventLoop::new(); 22 | let gl_window = glutin::ContextBuilder::new() 23 | .build_headless(&*events_loop, glutin::dpi::PhysicalSize::new(1, 1)) 24 | .unwrap(); 25 | 26 | let gl_window = unsafe { 27 | let gl_window = gl_window.make_current().unwrap(); 28 | gl::load_with(|symbol| gl_window.get_proc_address(symbol) as *const _); 29 | gl_window 30 | }; 31 | 32 | let gl_ctx = OpenGlContext { _ctx: gl_window }; 33 | 34 | unsafe { 35 | info!( 36 | "OpenGL device"; 37 | "vendor" => gl_ctx.vendor(), 38 | "version" => String::from_utf8(CStr::from_ptr(gl::GetString(gl::VERSION) as *const _).to_bytes().to_vec()).unwrap(), 39 | "renderer" => String::from_utf8(CStr::from_ptr(gl::GetString(gl::RENDERER) as *const _).to_bytes().to_vec()).unwrap() 40 | ); 41 | } 42 | gl_ctx 43 | } 44 | 45 | unsafe fn compile_and_get_shader_log(&self, shader: gl::types::GLuint, source: &str) -> Option { 46 | let mut success = i32::from(gl::FALSE); 47 | let c_str_frag = CString::new(source).unwrap(); 48 | gl::ShaderSource(shader, 1, &c_str_frag.as_ptr(), ptr::null()); 49 | gl::CompileShader(shader); 50 | 51 | // Check for shader compilation errors 52 | gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut success); 53 | let result = if success != i32::from(gl::TRUE) { 54 | let mut info_len: gl::types::GLint = 0; 55 | gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut info_len); 56 | let mut info = vec![0u8; info_len as usize]; 57 | gl::GetShaderInfoLog( 58 | shader, 59 | info_len as gl::types::GLsizei, 60 | ptr::null_mut(), 61 | info.as_mut_ptr() as *mut gl::types::GLchar, 62 | ); 63 | info.set_len((info_len - 1) as usize); // ignore null for str::from_utf8 64 | Some(String::from_utf8(info).unwrap()) 65 | } else { 66 | None 67 | }; 68 | gl::DeleteShader(shader); 69 | result 70 | } 71 | } 72 | 73 | impl ShaderValidator for OpenGlContext { 74 | fn validate(&self, tree_type: super::TreeType, source: &str) -> Option { 75 | unsafe { 76 | match tree_type { 77 | crate::TreeType::Fragment => { 78 | // Fragment shader 79 | let fragment_shader = gl::CreateShader(gl::FRAGMENT_SHADER); 80 | self.compile_and_get_shader_log(fragment_shader, source) 81 | } 82 | crate::TreeType::Vertex => { 83 | // Vertex shader 84 | let vertex_shader = gl::CreateShader(gl::VERTEX_SHADER); 85 | self.compile_and_get_shader_log(vertex_shader, source) 86 | } 87 | crate::TreeType::Geometry => { 88 | // Geometry shader 89 | let geometry_shader = gl::CreateShader(gl::GEOMETRY_SHADER); 90 | self.compile_and_get_shader_log(geometry_shader, source) 91 | } 92 | crate::TreeType::Compute => { 93 | // Compute shader 94 | let compute_shader = gl::CreateShader(gl::COMPUTE_SHADER); 95 | self.compile_and_get_shader_log(compute_shader, source) 96 | } 97 | } 98 | } 99 | } 100 | 101 | fn vendor(&self) -> String { 102 | unsafe { String::from_utf8(CStr::from_ptr(gl::GetString(gl::VENDOR) as *const _).to_bytes().to_vec()).unwrap() } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /server/main/src/source_mapper.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, fmt::Display}; 2 | 3 | use petgraph::graph::NodeIndex; 4 | 5 | #[derive(Clone, Copy, PartialEq, Eq, Hash)] 6 | pub struct SourceNum(usize); 7 | 8 | impl Display for SourceNum { 9 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 10 | f.write_str(format!("{}", self.0).as_str()) 11 | } 12 | } 13 | 14 | impl From for SourceNum { 15 | fn from(val: usize) -> Self { 16 | SourceNum(val) 17 | } 18 | } 19 | 20 | // Maps from a graph node index to a virtual OpenGL 21 | // source number (for when building the merged source view), 22 | // and in reverse (for when mapping from GLSL error source numbers to their source path). 23 | // What is a source number: https://community.khronos.org/t/what-is-source-string-number/70976 24 | pub struct SourceMapper { 25 | next: SourceNum, 26 | mapping: HashMap, 27 | reverse_mapping: Vec, 28 | } 29 | 30 | impl SourceMapper { 31 | pub fn new(capacity: usize) -> Self { 32 | SourceMapper { 33 | next: SourceNum(0), 34 | mapping: HashMap::with_capacity(capacity), 35 | reverse_mapping: Vec::with_capacity(capacity), 36 | } 37 | } 38 | 39 | pub fn get_num(&mut self, node: NodeIndex) -> SourceNum { 40 | let num = &*self.mapping.entry(node).or_insert_with(|| { 41 | let next = self.next; 42 | self.next.0 += 1; 43 | self.reverse_mapping.push(node); 44 | next 45 | }); 46 | *num 47 | } 48 | 49 | pub fn get_node(&self, num: SourceNum) -> NodeIndex { 50 | self.reverse_mapping[num.0] 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /server/main/src/test.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use std::fs; 3 | use std::io; 4 | use std::io::Result; 5 | 6 | use pretty_assertions::assert_eq; 7 | 8 | use tempdir::TempDir; 9 | 10 | use fs_extra::{copy_items, dir}; 11 | 12 | use jsonrpc_common::*; 13 | use jsonrpc_response::*; 14 | 15 | struct StdoutNewline { 16 | s: Box, 17 | } 18 | 19 | impl io::Write for StdoutNewline { 20 | fn write(&mut self, buf: &[u8]) -> Result { 21 | let res = self.s.write(buf); 22 | if buf[buf.len() - 1] == b"}"[0] { 23 | #[allow(unused_variables)] 24 | let res = self.s.write(b"\n\n"); 25 | } 26 | res 27 | } 28 | 29 | fn flush(&mut self) -> Result<()> { 30 | self.s.flush() 31 | } 32 | } 33 | 34 | pub fn new_temp_server(opengl_context: Option>) -> MinecraftShaderLanguageServer { 35 | let endpoint = LSPEndpoint::create_lsp_output_with_output_stream(|| StdoutNewline { s: Box::new(io::sink()) }); 36 | 37 | let context = opengl_context.unwrap_or_else(|| Box::new(opengl::MockShaderValidator::new())); 38 | 39 | MinecraftShaderLanguageServer { 40 | endpoint, 41 | graph: Rc::new(RefCell::new(graph::CachedStableGraph::new())), 42 | root: "".into(), 43 | command_provider: None, 44 | opengl_context: context.into(), 45 | log_guard: None, 46 | tree_sitter: Rc::new(RefCell::new(Parser::new())), 47 | } 48 | } 49 | 50 | fn copy_files(files: &str, dest: &TempDir) { 51 | let opts = &dir::CopyOptions::new(); 52 | let files = fs::read_dir(files) 53 | .unwrap() 54 | .map(|e| String::from(e.unwrap().path().to_str().unwrap())) 55 | .collect::>(); 56 | copy_items(&files, dest.path().join("shaders"), opts).unwrap(); 57 | } 58 | 59 | pub fn copy_to_and_set_root(test_path: &str, server: &mut MinecraftShaderLanguageServer) -> (Rc, PathBuf) { 60 | let (_tmp_dir, tmp_path) = copy_to_tmp_dir(test_path); 61 | 62 | server.root = tmp_path.clone(); //format!("{}{}", "file://", tmp_path); 63 | 64 | (_tmp_dir, tmp_path) 65 | } 66 | 67 | fn copy_to_tmp_dir(test_path: &str) -> (Rc, PathBuf) { 68 | let tmp_dir = Rc::new(TempDir::new("mcshader").unwrap()); 69 | fs::create_dir(tmp_dir.path().join("shaders")).unwrap(); 70 | 71 | copy_files(test_path, &tmp_dir); 72 | 73 | let tmp_clone = tmp_dir.clone(); 74 | let tmp_path = tmp_clone.path().to_str().unwrap(); 75 | 76 | (tmp_dir, tmp_path.into()) 77 | } 78 | 79 | #[allow(deprecated)] 80 | #[test] 81 | #[logging_macro::log_scope] 82 | fn test_empty_initialize() { 83 | let mut server = new_temp_server(None); 84 | 85 | let tmp_dir = TempDir::new("mcshader").unwrap(); 86 | let tmp_path = tmp_dir.path(); 87 | 88 | let initialize_params = InitializeParams { 89 | process_id: None, 90 | root_path: None, 91 | root_uri: Some(Url::from_directory_path(tmp_path).unwrap()), 92 | client_info: None, 93 | initialization_options: None, 94 | capabilities: ClientCapabilities { 95 | workspace: None, 96 | text_document: None, 97 | experimental: None, 98 | window: None, 99 | general: Option::None, 100 | }, 101 | trace: None, 102 | workspace_folders: None, 103 | locale: Option::None, 104 | }; 105 | 106 | let on_response = |resp: Option| { 107 | assert!(resp.is_some()); 108 | let respu = resp.unwrap(); 109 | match respu.result_or_error { 110 | ResponseResult::Result(_) => {} 111 | ResponseResult::Error(e) => { 112 | panic!("expected ResponseResult::Result(..), got {:?}", e) 113 | } 114 | } 115 | }; 116 | 117 | let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response))); 118 | server.initialize(initialize_params, completable); 119 | 120 | assert_eq!(server.root, tmp_path); 121 | 122 | assert_eq!(server.graph.borrow().graph.edge_count(), 0); 123 | assert_eq!(server.graph.borrow().graph.node_count(), 0); 124 | 125 | server.endpoint.request_shutdown(); 126 | } 127 | 128 | #[allow(deprecated)] 129 | #[test] 130 | #[logging_macro::log_scope] 131 | fn test_01_initialize() { 132 | let mut server = new_temp_server(None); 133 | 134 | let (_tmp_dir, tmp_path) = copy_to_tmp_dir("./testdata/01"); 135 | 136 | let initialize_params = InitializeParams { 137 | process_id: None, 138 | root_path: None, 139 | root_uri: Some(Url::from_directory_path(tmp_path.clone()).unwrap()), 140 | client_info: None, 141 | initialization_options: None, 142 | capabilities: ClientCapabilities { 143 | workspace: None, 144 | text_document: None, 145 | experimental: None, 146 | window: None, 147 | general: Option::None, 148 | }, 149 | trace: None, 150 | workspace_folders: None, 151 | locale: Option::None, 152 | }; 153 | 154 | let on_response = |resp: Option| { 155 | assert!(resp.is_some()); 156 | let respu = resp.unwrap(); 157 | match respu.result_or_error { 158 | ResponseResult::Result(_) => {} 159 | ResponseResult::Error(e) => { 160 | panic!("expected ResponseResult::Result(..), got {:?}", e) 161 | } 162 | } 163 | }; 164 | 165 | let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response))); 166 | server.initialize(initialize_params, completable); 167 | server.endpoint.request_shutdown(); 168 | 169 | // Assert there is one edge between two nodes 170 | assert_eq!(server.graph.borrow().graph.edge_count(), 1); 171 | 172 | let edge = server.graph.borrow().graph.edge_indices().next().unwrap(); 173 | let (node1, node2) = server.graph.borrow().graph.edge_endpoints(edge).unwrap(); 174 | 175 | // Assert the values of the two nodes in the tree 176 | assert_eq!( 177 | server.graph.borrow().graph[node1], 178 | //format!("{:?}/{}/{}", tmp_path, "shaders", "final.fsh") 179 | tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string() 180 | ); 181 | assert_eq!( 182 | server.graph.borrow().graph[node2], 183 | //format!("{:?}/{}/{}", tmp_path, "shaders", "common.glsl") 184 | tmp_path.join("shaders").join("common.glsl").to_str().unwrap().to_string() 185 | ); 186 | 187 | assert_eq!(server.graph.borrow().graph.edge_weight(edge).unwrap().line, 2); 188 | } 189 | 190 | #[allow(deprecated)] 191 | #[test] 192 | #[logging_macro::log_scope] 193 | fn test_05_initialize() { 194 | let mut server = new_temp_server(None); 195 | 196 | let (_tmp_dir, tmp_path) = copy_to_tmp_dir("./testdata/05"); 197 | 198 | let initialize_params = InitializeParams { 199 | process_id: None, 200 | root_path: None, 201 | root_uri: Some(Url::from_directory_path(tmp_path.clone()).unwrap()), 202 | client_info: None, 203 | initialization_options: None, 204 | capabilities: ClientCapabilities { 205 | workspace: None, 206 | text_document: None, 207 | experimental: None, 208 | window: None, 209 | general: Option::None, 210 | }, 211 | trace: None, 212 | workspace_folders: None, 213 | locale: Option::None, 214 | }; 215 | 216 | let on_response = |resp: Option| { 217 | assert!(resp.is_some()); 218 | let respu = resp.unwrap(); 219 | match respu.result_or_error { 220 | ResponseResult::Result(_) => {} 221 | ResponseResult::Error(e) => { 222 | panic!("expected ResponseResult::Result(..), got {:?}", e) 223 | } 224 | } 225 | }; 226 | 227 | let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response))); 228 | server.initialize(initialize_params, completable); 229 | server.endpoint.request_shutdown(); 230 | 231 | // Assert there is one edge between two nodes 232 | assert_eq!(server.graph.borrow().graph.edge_count(), 3); 233 | 234 | assert_eq!(server.graph.borrow().graph.node_count(), 4); 235 | 236 | let pairs: HashSet<(PathBuf, PathBuf)> = vec![ 237 | ( 238 | tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string().into(), 239 | tmp_path.join("shaders").join("common.glsl").to_str().unwrap().to_string().into(), 240 | ), 241 | ( 242 | tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string().into(), 243 | tmp_path 244 | .join("shaders") 245 | .join("test") 246 | .join("banana.glsl") 247 | .to_str() 248 | .unwrap() 249 | .to_string() 250 | .into(), 251 | ), 252 | ( 253 | tmp_path 254 | .join("shaders") 255 | .join("test") 256 | .join("banana.glsl") 257 | .to_str() 258 | .unwrap() 259 | .to_string() 260 | .into(), 261 | tmp_path 262 | .join("shaders") 263 | .join("test") 264 | .join("burger.glsl") 265 | .to_str() 266 | .unwrap() 267 | .to_string() 268 | .into(), 269 | ), 270 | ] 271 | .into_iter() 272 | .collect(); 273 | 274 | for edge in server.graph.borrow().graph.edge_indices() { 275 | let endpoints = server.graph.borrow().graph.edge_endpoints(edge).unwrap(); 276 | let first = server.graph.borrow().get_node(endpoints.0); 277 | let second = server.graph.borrow().get_node(endpoints.1); 278 | let contains = pairs.contains(&(first.clone(), second.clone())); 279 | assert!(contains, "doesn't contain ({:?}, {:?})", first, second); 280 | } 281 | } 282 | -------------------------------------------------------------------------------- /server/main/src/url_norm.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use slog_scope::trace; 4 | use anyhow::Result; 5 | use path_slash::PathBufExt; 6 | use url::Url; 7 | 8 | pub trait FromUrl { 9 | fn from_url(u: Url) -> Self; 10 | } 11 | 12 | pub trait FromJson { 13 | fn from_json(v: &serde_json::value::Value) -> Result 14 | where 15 | Self: Sized; 16 | } 17 | 18 | impl FromUrl for PathBuf { 19 | #[cfg(target_family = "windows")] 20 | fn from_url(u: Url) -> Self { 21 | let path = percent_encoding::percent_decode_str(u.path().strip_prefix('/').unwrap()) 22 | .decode_utf8() 23 | .unwrap(); 24 | 25 | trace!("converted win path from url"; "old" => u.as_str(), "new" => path.to_string()); 26 | 27 | PathBuf::from_slash(path) 28 | } 29 | 30 | #[cfg(target_family = "unix")] 31 | fn from_url(u: Url) -> Self { 32 | let path = percent_encoding::percent_decode_str(u.path()).decode_utf8().unwrap(); 33 | 34 | trace!("converted unix path from url"; "old" => u.as_str(), "new" => path.to_string()); 35 | 36 | PathBuf::from_slash(path) 37 | } 38 | } 39 | 40 | impl FromJson for PathBuf { 41 | #[cfg(target_family = "windows")] 42 | fn from_json(v: &serde_json::value::Value) -> Result 43 | where 44 | Self: Sized, 45 | { 46 | if !v.is_string() { 47 | return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v)); 48 | } 49 | let path = v.to_string(); 50 | let path = percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"').strip_prefix('/').unwrap()) 51 | .decode_utf8()?; 52 | 53 | trace!("converted win path from json"; "old" => v.to_string(), "new" => path.to_string()); 54 | 55 | Ok(PathBuf::from_slash(path)) 56 | } 57 | 58 | #[cfg(target_family = "unix")] 59 | fn from_json(v: &serde_json::value::Value) -> Result 60 | where 61 | Self: Sized, 62 | { 63 | if !v.is_string() { 64 | return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v)); 65 | } 66 | let path = v.to_string(); 67 | let path = percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"')).decode_utf8()?; 68 | 69 | trace!("converted unix path from json"; "old" => v.to_string(), "new" => path.to_string()); 70 | 71 | Ok(PathBuf::from_slash(path)) 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /server/main/testdata/01/common.glsl: -------------------------------------------------------------------------------- 1 | float test() { 2 | return 0.5; 3 | } -------------------------------------------------------------------------------- /server/main/testdata/01/final.fsh: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #include "/common.glsl" 4 | 5 | void main() { 6 | gl_FragColor[0] = vec4(0.0); 7 | } -------------------------------------------------------------------------------- /server/main/testdata/01/final.fsh.merge: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #line 1 1 // !! 4 | float test() { 5 | return 0.5; 6 | } 7 | #line 4 0 // !! 8 | 9 | void main() { 10 | gl_FragColor[0] = vec4(0.0); 11 | } -------------------------------------------------------------------------------- /server/main/testdata/02/final.fsh: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #include "/utils/sample.glsl" 4 | 5 | void main() { 6 | gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); 7 | } 8 | -------------------------------------------------------------------------------- /server/main/testdata/02/final.fsh.merge: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #line 1 1 // !! 4 | int sample() { 5 | return 5; 6 | } 7 | 8 | #line 1 2 // !! 9 | void burger() { 10 | // sample text 11 | } 12 | #line 6 1 // !! 13 | 14 | #line 1 3 // !! 15 | float test() { 16 | return 3.0; 17 | } 18 | #line 8 1 // !! 19 | 20 | int sample_more() { 21 | return 5; 22 | } 23 | #line 4 0 // !! 24 | 25 | void main() { 26 | gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); 27 | } 28 | -------------------------------------------------------------------------------- /server/main/testdata/02/utils/burger.glsl: -------------------------------------------------------------------------------- 1 | void burger() { 2 | // sample text 3 | } -------------------------------------------------------------------------------- /server/main/testdata/02/utils/sample.glsl: -------------------------------------------------------------------------------- 1 | int sample() { 2 | return 5; 3 | } 4 | 5 | #include "/utils/burger.glsl" 6 | 7 | #include "/utils/test.glsl" 8 | 9 | int sample_more() { 10 | return 5; 11 | } -------------------------------------------------------------------------------- /server/main/testdata/02/utils/test.glsl: -------------------------------------------------------------------------------- 1 | float test() { 2 | return 3.0; 3 | } -------------------------------------------------------------------------------- /server/main/testdata/03/final.fsh: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #include "/utils/sample.glsl" 4 | 5 | void main() { 6 | gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); 7 | } 8 | -------------------------------------------------------------------------------- /server/main/testdata/03/final.fsh.merge: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #line 1 1 // !! 4 | int sample() { 5 | return 5; 6 | } 7 | 8 | #line 1 2 // !! 9 | void burger() { 10 | // sample text 11 | } 12 | #line 6 1 // !! 13 | 14 | #line 1 3 // !! 15 | float test() { 16 | return 3.0; 17 | } 18 | #line 8 1 // !! 19 | #line 4 0 // !! 20 | 21 | void main() { 22 | gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); 23 | } 24 | -------------------------------------------------------------------------------- /server/main/testdata/03/utils/burger.glsl: -------------------------------------------------------------------------------- 1 | void burger() { 2 | // sample text 3 | } -------------------------------------------------------------------------------- /server/main/testdata/03/utils/sample.glsl: -------------------------------------------------------------------------------- 1 | int sample() { 2 | return 5; 3 | } 4 | 5 | #include "/utils/burger.glsl" 6 | 7 | #include "/utils/test.glsl" -------------------------------------------------------------------------------- /server/main/testdata/03/utils/test.glsl: -------------------------------------------------------------------------------- 1 | float test() { 2 | return 3.0; 3 | } -------------------------------------------------------------------------------- /server/main/testdata/04/final.fsh: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #include "/utils/utilities.glsl" 4 | #include "/utils/matricies.glsl" 5 | 6 | void main() { 7 | 8 | } -------------------------------------------------------------------------------- /server/main/testdata/04/final.fsh.merge: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #line 1 1 // !! 4 | #line 1 2 // !! 5 | void stuff1() { 6 | 7 | } 8 | #line 2 1 // !! 9 | #line 1 3 // !! 10 | void stuff2() { 11 | 12 | } 13 | #line 3 1 // !! 14 | #line 4 0 // !! 15 | #line 1 4 // !! 16 | void matrix() { 17 | 18 | } 19 | #line 5 0 // !! 20 | 21 | void main() { 22 | 23 | } -------------------------------------------------------------------------------- /server/main/testdata/04/lib/matrices.glsl: -------------------------------------------------------------------------------- 1 | void matrix() { 2 | 3 | } -------------------------------------------------------------------------------- /server/main/testdata/04/utils/stuff1.glsl: -------------------------------------------------------------------------------- 1 | void stuff1() { 2 | 3 | } -------------------------------------------------------------------------------- /server/main/testdata/04/utils/stuff2.glsl: -------------------------------------------------------------------------------- 1 | void stuff2() { 2 | 3 | } -------------------------------------------------------------------------------- /server/main/testdata/04/utils/utilities.glsl: -------------------------------------------------------------------------------- 1 | #include "/utils/stuff1.glsl" 2 | #include "/utils/stuff2.glsl" -------------------------------------------------------------------------------- /server/main/testdata/05/common.glsl: -------------------------------------------------------------------------------- 1 | float test() { 2 | return 0.5; 3 | } -------------------------------------------------------------------------------- /server/main/testdata/05/final.fsh: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #include "/common.glsl" 4 | #include "/test/banana.glsl" 5 | 6 | void main() { 7 | gl_FragColor = vec4(0.0); 8 | } -------------------------------------------------------------------------------- /server/main/testdata/05/final.fsh.merge: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #line 2 "!!" 4 | 5 | #line 1 "!!" 6 | float test() { 7 | return 0.5; 8 | } 9 | #line 4 "!!" 10 | #line 1 "!!" 11 | #line 1 "!!" 12 | void dont() { 13 | 14 | } 15 | #line 2 "!!" 16 | 17 | void ok() { 18 | 19 | } 20 | #line 5 "!!" 21 | 22 | void main() { 23 | gl_FragColor = vec4(0.0); 24 | } 25 | -------------------------------------------------------------------------------- /server/main/testdata/05/test/banana.glsl: -------------------------------------------------------------------------------- 1 | #include "burger.glsl" 2 | 3 | void ok() { 4 | 5 | } -------------------------------------------------------------------------------- /server/main/testdata/05/test/burger.glsl: -------------------------------------------------------------------------------- 1 | void dont() { 2 | 3 | } -------------------------------------------------------------------------------- /server/main/testdata/06/final.fsh: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #ifdef BANANA 4 | #include "test.glsl" 5 | #else 6 | #include "test.glsl" 7 | #endif 8 | 9 | void main() {} -------------------------------------------------------------------------------- /server/main/testdata/06/final.fsh.merge: -------------------------------------------------------------------------------- 1 | #version 120 2 | 3 | #ifdef BANANA 4 | #line 1 1 // !! 5 | int test() { 6 | return 1; 7 | } 8 | #line 5 0 // !! 9 | #else 10 | #line 1 1 // !! 11 | int test() { 12 | return 1; 13 | } 14 | #line 7 0 // !! 15 | #endif 16 | 17 | void main() {} -------------------------------------------------------------------------------- /server/main/testdata/06/test.glsl: -------------------------------------------------------------------------------- 1 | int test() { 2 | return 1; 3 | } -------------------------------------------------------------------------------- /shaders.py: -------------------------------------------------------------------------------- 1 | with open('shaders.txt') as f: 2 | items = {} 3 | lines = filter(lambda s: s.startswith('uniform'), f.readlines()) 4 | for line in lines: 5 | err = False 6 | try: 7 | detail = int(' '.join(line.split()[3:])) 8 | err = True 9 | except: 10 | pass 11 | type = line.split()[1].rstrip('>').lstrip('<') 12 | detail = ' '.join(line.split()[3:]) if not err else '' 13 | label = line.split()[2].rstrip(';') 14 | if label in items: 15 | continue 16 | items[label] = True 17 | detail = ' ' + detail if not detail == '' else '' 18 | print('{\n\tlabel: \'%s\',\n\tdetail: \'<%s>%s\'\n},' % (label, type, detail)) -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es6", 5 | "outDir": "./client/out", 6 | "rootDir": "./client/src", 7 | "sourceMap": true, 8 | }, 9 | "include": [ 10 | "./client" 11 | ], 12 | "exclude": [ 13 | "node_modules", 14 | ".vscode-test" 15 | ], 16 | } --------------------------------------------------------------------------------