├── .github └── workflows │ └── build.yml ├── .gitignore ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── SECURITY.md ├── build.rs ├── examples ├── REPO.bazel ├── edge_cases.bazelrc ├── file2.bazelrc ├── line-styles.bazelrc └── unformatted.bazelrc ├── rust-toolchains.toml ├── src ├── bazel_flags.rs ├── bazel_flags_proto.rs ├── bazel_version.rs ├── completion.rs ├── definition.rs ├── diagnostic.rs ├── file_utils.rs ├── formatting.rs ├── language_server.rs ├── lib.rs ├── line_index.rs ├── lsp_utils.rs ├── main.rs ├── parser.rs ├── semantic_token.rs └── tokenizer.rs └── vscode-extension ├── .eslintrc.js ├── bazelrc-language-configuration.json ├── build.js ├── package.json ├── pnpm-lock.yaml ├── src └── extension.ts └── tsconfig.json /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | # For the time being, only build binaries for releases. 5 | # Maybe we also want to build for pull requests as soon as this is open-sourced. 6 | pull_request: 7 | push: 8 | workflow_dispatch: 9 | release: 10 | types: [prereleased, released] 11 | 12 | permissions: 13 | # Write permissions needed for publishing the release 14 | contents: write 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 18 | cancel-in-progress: true 19 | 20 | jobs: 21 | build_lsp_native: 22 | name: Build & Test Language Server 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | os: [ubuntu, windows, macos] 27 | runs-on: ${{ matrix.os }}-latest 28 | steps: 29 | - name: Checkout 30 | uses: actions/checkout@v3 31 | 32 | - name: Install Rust 33 | uses: dtolnay/rust-toolchain@stable 34 | 35 | # Inspired by https://ectobit.com/blog/speed-up-github-actions-rust-pipelines/ 36 | - name: Set up cargo cache 37 | uses: actions/cache@v3 38 | continue-on-error: false 39 | with: 40 | path: | 41 | ~/.cargo/bin/ 42 | ~/.cargo/registry/index/ 43 | ~/.cargo/registry/cache/ 44 | ~/.cargo/git/db/ 45 | bazel-flags-cache/ 46 | target/ 47 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 48 | restore-keys: ${{ runner.os }}-cargo- 49 | 50 | - name: Lint 51 | # Only run the linter on Ubuntu to save some time 52 | if: ${{ runner.os == 'Linux' }} 53 | run: | 54 | cargo fmt --all -- --check 55 | cargo clippy -- -D warnings 56 | 57 | - name: Test Language Server 58 | run: cargo test 59 | 60 | - name: Build Language Server 61 | run: cargo build --release 62 | 63 | - name: Upload Workflow Artifact 64 | uses: actions/upload-artifact@v4 65 | with: 66 | name: bazelrc-lsp-${{ matrix.os }}${{ runner.os == 'Windows' && '.exe' || '' }} 67 | path: ./target/release/bazelrc-lsp${{ runner.os == 'Windows' && '.exe' || '' }} 68 | if-no-files-found: error 69 | 70 | - name: Upload Release Artifact 71 | if: ${{ github.event_name == 'release' }} 72 | shell: bash 73 | run: | 74 | upload_url=`echo '${{ github.event.release.upload_url }}' | cut -f1 -d"{"`; 75 | gh api --method POST \ 76 | -H "Accept: application/vnd.github+json" \ 77 | -H "X-GitHub-Api-Version: 2022-11-28" \ 78 | "${upload_url}?name=bazelrc-lsp-${{ matrix.os }}${{ runner.os == 'Windows' && '.exe' || '' }}" \ 79 | --input './target/release/bazelrc-lsp${{ runner.os == 'Windows' && '.exe' || '' }}' 80 | env: 81 | GH_TOKEN: ${{ github.token }} 82 | 83 | 84 | build_vscode_ext: 85 | name: Build VS Code extension 86 | strategy: 87 | fail-fast: false 88 | matrix: 89 | os: [ubuntu, windows, macos] 90 | runs-on: ${{ matrix.os }}-latest 91 | steps: 92 | - name: Checkout 93 | uses: actions/checkout@v3 94 | 95 | - name: Install Rust 96 | uses: dtolnay/rust-toolchain@stable 97 | 98 | # Inspired by https://ectobit.com/blog/speed-up-github-actions-rust-pipelines/ 99 | - name: Set up cargo cache 100 | uses: actions/cache@v3 101 | continue-on-error: false 102 | with: 103 | path: | 104 | ~/.cargo/bin/ 105 | ~/.cargo/registry/index/ 106 | ~/.cargo/registry/cache/ 107 | ~/.cargo/git/db/ 108 | target/ 109 | key: ${{ runner.os }}-cargo-vscode-${{ hashFiles('**/Cargo.lock') }} 110 | restore-keys: ${{ runner.os }}-cargo-vscode- 111 | 112 | - name: Install Node.js 113 | uses: actions/setup-node@v3 114 | 115 | - name: Install pnpm 116 | uses: pnpm/action-setup@v2 117 | with: 118 | version: latest 119 | run_install: false 120 | 121 | - name: Get pnpm store directory 122 | shell: bash 123 | run: | 124 | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV 125 | 126 | - name: Setup pnpm cache 127 | uses: actions/cache@v3 128 | with: 129 | path: ${{ env.STORE_PATH }} 130 | key: ${{ runner.os }}-pnpm-cache-${{ hashFiles('**/pnpm-lock.yaml') }} 131 | restore-keys: | 132 | ${{ runner.os }}-pnpm-cache- 133 | 134 | - name: Install Javascript dependencies 135 | working-directory: vscode-extension 136 | run: pnpm install 137 | 138 | - name: Lint VSCode extension 139 | # Only run the linter on Ubuntu to save some time 140 | if: ${{ runner.os == 'Linux' }} 141 | working-directory: vscode-extension 142 | run: pnpm lint 143 | 144 | - name: Build Language Server 145 | run: cargo build --release 146 | 147 | - name: Package VS Code extension 148 | working-directory: vscode-extension 149 | run: pnpm package:release 150 | 151 | - name: Upload Workflow Artifact 152 | uses: actions/upload-artifact@v4 153 | with: 154 | name: bazelrc-lsp-${{ matrix.os }}-prerelease.vsix 155 | path: ./vscode-extension/bazelrc-lsp-*.vsix 156 | if-no-files-found: error 157 | 158 | - name: Upload Release Artifact 159 | if: ${{ github.event_name == 'release' }} 160 | shell: bash 161 | working-directory: ./vscode-extension/ 162 | run: | 163 | upload_url=`echo '${{ github.event.release.upload_url }}' | cut -f1 -d"{"`; 164 | filename=`echo bazelrc-lsp-*.vsix`; 165 | uploadname=${filename/lsp-/lsp-${{ matrix.os }}-}; 166 | gh api --method POST \ 167 | -H "Accept: application/vnd.github+json" \ 168 | -H "X-GitHub-Api-Version: 2022-11-28" \ 169 | "${upload_url}?name=$uploadname" \ 170 | --input "$filename" 171 | env: 172 | GH_TOKEN: ${{ github.token }} 173 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /vscode-extension/dist/ 3 | /bazel-flags-cache 4 | node_modules 5 | .pnpm-debug.log 6 | *.vsix 7 | 8 | .DS_Store 9 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Comment line immediately above ownership line is reserved for related other information. Please be careful while editing. 2 | #ECCN:Open Source 5D002 3 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Salesforce Open Source Community Code of Conduct 2 | 3 | ## About the Code of Conduct 4 | 5 | Equality is a core value at Salesforce. We believe a diverse and inclusive 6 | community fosters innovation and creativity, and are committed to building a 7 | culture where everyone feels included. 8 | 9 | Salesforce open-source projects are committed to providing a friendly, safe, and 10 | welcoming environment for all, regardless of gender identity and expression, 11 | sexual orientation, disability, physical appearance, body size, ethnicity, nationality, 12 | race, age, religion, level of experience, education, socioeconomic status, or 13 | other similar personal characteristics. 14 | 15 | The goal of this code of conduct is to specify a baseline standard of behavior so 16 | that people with different social values and communication styles can work 17 | together effectively, productively, and respectfully in our open source community. 18 | It also establishes a mechanism for reporting issues and resolving conflicts. 19 | 20 | All questions and reports of abusive, harassing, or otherwise unacceptable behavior 21 | in a Salesforce open-source project may be reported by contacting the Salesforce 22 | Open Source Conduct Committee at ossconduct@salesforce.com. 23 | 24 | ## Our Pledge 25 | 26 | In the interest of fostering an open and welcoming environment, we as 27 | contributors and maintainers pledge to making participation in our project and 28 | our community a harassment-free experience for everyone, regardless of gender 29 | identity and expression, sexual orientation, disability, physical appearance, 30 | body size, ethnicity, nationality, race, age, religion, level of experience, education, 31 | socioeconomic status, or other similar personal characteristics. 32 | 33 | ## Our Standards 34 | 35 | Examples of behavior that contributes to creating a positive environment 36 | include: 37 | 38 | * Using welcoming and inclusive language 39 | * Being respectful of differing viewpoints and experiences 40 | * Gracefully accepting constructive criticism 41 | * Focusing on what is best for the community 42 | * Showing empathy toward other community members 43 | 44 | Examples of unacceptable behavior by participants include: 45 | 46 | * The use of sexualized language or imagery and unwelcome sexual attention or 47 | advances 48 | * Personal attacks, insulting/derogatory comments, or trolling 49 | * Public or private harassment 50 | * Publishing, or threatening to publish, others' private information—such as 51 | a physical or electronic address—without explicit permission 52 | * Other conduct which could reasonably be considered inappropriate in a 53 | professional setting 54 | * Advocating for or encouraging any of the above behaviors 55 | 56 | ## Our Responsibilities 57 | 58 | Project maintainers are responsible for clarifying the standards of acceptable 59 | behavior and are expected to take appropriate and fair corrective action in 60 | response to any instances of unacceptable behavior. 61 | 62 | Project maintainers have the right and responsibility to remove, edit, or 63 | reject comments, commits, code, wiki edits, issues, and other contributions 64 | that are not aligned with this Code of Conduct, or to ban temporarily or 65 | permanently any contributor for other behaviors that they deem inappropriate, 66 | threatening, offensive, or harmful. 67 | 68 | ## Scope 69 | 70 | This Code of Conduct applies both within project spaces and in public spaces 71 | when an individual is representing the project or its community. Examples of 72 | representing a project or community include using an official project email 73 | address, posting via an official social media account, or acting as an appointed 74 | representative at an online or offline event. Representation of a project may be 75 | further defined and clarified by project maintainers. 76 | 77 | ## Enforcement 78 | 79 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 80 | reported by contacting the Salesforce Open Source Conduct Committee 81 | at ossconduct@salesforce.com. All complaints will be reviewed and investigated 82 | and will result in a response that is deemed necessary and appropriate to the 83 | circumstances. The committee is obligated to maintain confidentiality with 84 | regard to the reporter of an incident. Further details of specific enforcement 85 | policies may be posted separately. 86 | 87 | Project maintainers who do not follow or enforce the Code of Conduct in good 88 | faith may face temporary or permanent repercussions as determined by other 89 | members of the project's leadership and the Salesforce Open Source Conduct 90 | Committee. 91 | 92 | ## Attribution 93 | 94 | This Code of Conduct is adapted from the [Contributor Covenant][contributor-covenant-home], 95 | version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html. 96 | It includes adaptions and additions from [Go Community Code of Conduct][golang-coc], 97 | [CNCF Code of Conduct][cncf-coc], and [Microsoft Open Source Code of Conduct][microsoft-coc]. 98 | 99 | This Code of Conduct is licensed under the [Creative Commons Attribution 3.0 License][cc-by-3-us]. 100 | 101 | [contributor-covenant-home]: https://www.contributor-covenant.org (https://www.contributor-covenant.org/) 102 | [golang-coc]: https://golang.org/conduct 103 | [cncf-coc]: https://github.com/cncf/foundation/blob/master/code-of-conduct.md 104 | [microsoft-coc]: https://opensource.microsoft.com/codeofconduct/ 105 | [cc-by-3-us]: https://creativecommons.org/licenses/by/3.0/us/ 106 | 107 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guide For `bazelrc-lsp` 2 | 3 | This page lists the operational governance model of this project, as well as the recommendations and requirements for how to best contribute to `bazelrc-lsp`. We strive to obey these as best as possible. As always, thanks for contributing – we hope these guidelines make it easier and shed some light on our approach and processes. 4 | 5 | # Governance Model: Community Based 6 | 7 | The intent and goal of open sourcing this project is to increase the contributor and user base. The governance model is one where new project leads (`admins`) will be added to the project based on their contributions and efforts, a so-called "do-acracy" or "meritocracy" similar to that used by all Apache Software Foundation projects. 8 | 9 | # Issues, requests & ideas 10 | 11 | Most communication happens in GitHub issues. 12 | Use GitHub Issues page to submit issues, enhancement requests and discuss ideas. 13 | 14 | ### Bug Reports and Fixes 15 | - If you find a bug, please search for it in the [Issues](https://github.com/salesforce-misc/bazelrc-lsp/issues), and if it isn't already tracked, 16 | [create a new issue](https://github.com/salesforce-misc/bazelrc-lsp/issues/new). 17 | - Issues that have already been identified as a bug (note: able to reproduce) will be labelled `bug`. 18 | - If you'd like to submit a fix for a bug, [send a Pull Request](#creating_a_pull_request) and mention the Issue number. 19 | - Include tests that isolate the bug and verifies that it was fixed. 20 | 21 | ### New Features 22 | - If you'd like to add new functionality to this project, describe the problem you want to solve in a [new Issue](https://github.com/salesforce-misc/bazelrc-lsp/issues/new). 23 | - Issues that have been identified as a feature request will be labelled `enhancement`. 24 | - If you'd like to implement the new feature, please wait for feedback from the project 25 | maintainers before spending too much time writing the code. In some cases, `enhancement`s may 26 | not align well with the project objectives at the time. 27 | 28 | ### Tests, Documentation, Miscellaneous 29 | - If you'd like to improve the tests, you want to make the documentation clearer, you have an 30 | alternative implementation of something that may have advantages over the way its currently 31 | done, or you have any other change, we would be happy to hear about it! 32 | - If its a trivial change, go ahead and [send a Pull Request](#creating_a_pull_request) with the changes you have in mind. 33 | - If not, [open an Issue](https://github.com/salesforce-misc/bazelrc-lsp/issues/new) to discuss the idea first. 34 | 35 | If you're new to our project and looking for some way to make your first contribution, look for 36 | Issues labelled `good first contribution`. 37 | 38 | # Contribution Checklist 39 | 40 | - [x] Clean, simple, well styled code 41 | - [x] Commits should be atomic and messages must be descriptive. Related issues should be mentioned by Issue number. 42 | - [x] Comments on complex blocks of code or algorithms (include references to sources). 43 | - [x] Tests 44 | - The test suite, if provided, must be complete and pass 45 | - Increase code coverage, not versa. 46 | - [x] Dependencies 47 | - Minimize number of dependencies. 48 | - Prefer Apache 2.0, BSD3, MIT, ISC and MPL licenses. 49 | - [x] Reviews 50 | - Changes must be approved via peer code review 51 | 52 | # Creating a Pull Request 53 | 54 | 1. **Ensure the bug/feature was not already reported** by searching on GitHub under Issues. If none exists, create a new issue so that other contributors can keep track of what you are trying to add/fix and offer suggestions (or let you know if there is already an effort in progress). 55 | 3. **Clone** the forked repo to your machine. 56 | 4. **Create** a new branch to contain your work (e.g. `git br fix-issue-11`) 57 | 4. **Commit** changes to your own branch. 58 | 5. **Push** your work back up to your fork. (e.g. `git push fix-issue-11`) 59 | 6. **Submit** a Pull Request against the `main` branch and refer to the issue(s) you are fixing. Try not to pollute your pull request with unintended changes. Keep it simple and small. 60 | 7. **Sign** the Salesforce CLA (you will be prompted to do so when submitting the Pull Request) 61 | 62 | > **NOTE**: Be sure to [sync your fork](https://help.github.com/articles/syncing-a-fork/) before making a pull request. 63 | 64 | 65 | # Code of Conduct 66 | Please follow our [Code of Conduct](CODE_OF_CONDUCT.md). 67 | 68 | # License 69 | By contributing your code, you agree to license your contribution under the terms of our project [LICENSE](LICENSE.txt) and to sign the [Salesforce CLA](https://cla.salesforce.com/sign-cla) -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "bazelrc-lsp" 3 | version = "0.2.3" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | regex = "1.11.1" 10 | tokio = { version = "1.43.0", features = ["full"] } 11 | tower-lsp = { version = "0.20.0", features = ["proposed"] } 12 | serde = { version = "1.0", features = ["derive"] } 13 | chumsky = "0.10.1" 14 | dashmap = "6.1.0" 15 | ropey = "1.6.1" 16 | prost = "0.13.4" 17 | phf = { version = "0.11.3", features = ["macros"] } 18 | lz4_flex = "0.11" 19 | once_cell = "1.20.2" 20 | base64 = "0.22.1" 21 | clap = { version = "4.5.28", features = ["derive"] } 22 | serde_json = "1.0.136" 23 | walkdir = "2" 24 | 25 | [build-dependencies] 26 | prost = "0.13.4" 27 | base64 = "0.22.1" 28 | lz4_flex = { version = "0.11" } 29 | 30 | [profile.release] 31 | opt-level = 'z' # Optimize for size 32 | lto = true # Enable link-time optimization 33 | panic = 'abort' # Abort on panic 34 | strip = true # Strip symbols from binary* 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License Version 2.0 2 | 3 | Copyright (c) 2023 Salesforce, Inc. 4 | All rights reserved. 5 | 6 | Apache License 7 | Version 2.0, January 2004 8 | http://www.apache.org/licenses/ 9 | 10 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 11 | 12 | 1. Definitions. 13 | 14 | "License" shall mean the terms and conditions for use, reproduction, 15 | and distribution as defined by Sections 1 through 9 of this document. 16 | 17 | "Licensor" shall mean the copyright owner or entity authorized by 18 | the copyright owner that is granting the License. 19 | 20 | "Legal Entity" shall mean the union of the acting entity and all 21 | other entities that control, are controlled by, or are under common 22 | control with that entity. For the purposes of this definition, 23 | "control" means (i) the power, direct or indirect, to cause the 24 | direction or management of such entity, whether by contract or 25 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 26 | outstanding shares, or (iii) beneficial ownership of such entity. 27 | 28 | "You" (or "Your") shall mean an individual or Legal Entity 29 | exercising permissions granted by this License. 30 | 31 | "Source" form shall mean the preferred form for making modifications, 32 | including but not limited to software source code, documentation 33 | source, and configuration files. 34 | 35 | "Object" form shall mean any form resulting from mechanical 36 | transformation or translation of a Source form, including but 37 | not limited to compiled object code, generated documentation, 38 | and conversions to other media types. 39 | 40 | "Work" shall mean the work of authorship, whether in Source or 41 | Object form, made available under the License, as indicated by a 42 | copyright notice that is included in or attached to the work 43 | (an example is provided in the Appendix below). 44 | 45 | "Derivative Works" shall mean any work, whether in Source or Object 46 | form, that is based on (or derived from) the Work and for which the 47 | editorial revisions, annotations, elaborations, or other modifications 48 | represent, as a whole, an original work of authorship. For the purposes 49 | of this License, Derivative Works shall not include works that remain 50 | separable from, or merely link (or bind by name) to the interfaces of, 51 | the Work and Derivative Works thereof. 52 | 53 | "Contribution" shall mean any work of authorship, including 54 | the original version of the Work and any modifications or additions 55 | to that Work or Derivative Works thereof, that is intentionally 56 | submitted to Licensor for inclusion in the Work by the copyright owner 57 | or by an individual or Legal Entity authorized to submit on behalf of 58 | the copyright owner. For the purposes of this definition, "submitted" 59 | means any form of electronic, verbal, or written communication sent 60 | to the Licensor or its representatives, including but not limited to 61 | communication on electronic mailing lists, source code control systems, 62 | and issue tracking systems that are managed by, or on behalf of, the 63 | Licensor for the purpose of discussing and improving the Work, but 64 | excluding communication that is conspicuously marked or otherwise 65 | designated in writing by the copyright owner as "Not a Contribution." 66 | 67 | "Contributor" shall mean Licensor and any individual or Legal Entity 68 | on behalf of whom a Contribution has been received by Licensor and 69 | subsequently incorporated within the Work. 70 | 71 | 2. Grant of Copyright License. Subject to the terms and conditions of 72 | this License, each Contributor hereby grants to You a perpetual, 73 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 74 | copyright license to reproduce, prepare Derivative Works of, 75 | publicly display, publicly perform, sublicense, and distribute the 76 | Work and such Derivative Works in Source or Object form. 77 | 78 | 3. Grant of Patent License. Subject to the terms and conditions of 79 | this License, each Contributor hereby grants to You a perpetual, 80 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 81 | (except as stated in this section) patent license to make, have made, 82 | use, offer to sell, sell, import, and otherwise transfer the Work, 83 | where such license applies only to those patent claims licensable 84 | by such Contributor that are necessarily infringed by their 85 | Contribution(s) alone or by combination of their Contribution(s) 86 | with the Work to which such Contribution(s) was submitted. If You 87 | institute patent litigation against any entity (including a 88 | cross-claim or counterclaim in a lawsuit) alleging that the Work 89 | or a Contribution incorporated within the Work constitutes direct 90 | or contributory patent infringement, then any patent licenses 91 | granted to You under this License for that Work shall terminate 92 | as of the date such litigation is filed. 93 | 94 | 4. Redistribution. You may reproduce and distribute copies of the 95 | Work or Derivative Works thereof in any medium, with or without 96 | modifications, and in Source or Object form, provided that You 97 | meet the following conditions: 98 | 99 | (a) You must give any other recipients of the Work or 100 | Derivative Works a copy of this License; and 101 | 102 | (b) You must cause any modified files to carry prominent notices 103 | stating that You changed the files; and 104 | 105 | (c) You must retain, in the Source form of any Derivative Works 106 | that You distribute, all copyright, patent, trademark, and 107 | attribution notices from the Source form of the Work, 108 | excluding those notices that do not pertain to any part of 109 | the Derivative Works; and 110 | 111 | (d) If the Work includes a "NOTICE" text file as part of its 112 | distribution, then any Derivative Works that You distribute must 113 | include a readable copy of the attribution notices contained 114 | within such NOTICE file, excluding those notices that do not 115 | pertain to any part of the Derivative Works, in at least one 116 | of the following places: within a NOTICE text file distributed 117 | as part of the Derivative Works; within the Source form or 118 | documentation, if provided along with the Derivative Works; or, 119 | within a display generated by the Derivative Works, if and 120 | wherever such third-party notices normally appear. The contents 121 | of the NOTICE file are for informational purposes only and 122 | do not modify the License. You may add Your own attribution 123 | notices within Derivative Works that You distribute, alongside 124 | or as an addendum to the NOTICE text from the Work, provided 125 | that such additional attribution notices cannot be construed 126 | as modifying the License. 127 | 128 | You may add Your own copyright statement to Your modifications and 129 | may provide additional or different license terms and conditions 130 | for use, reproduction, or distribution of Your modifications, or 131 | for any such Derivative Works as a whole, provided Your use, 132 | reproduction, and distribution of the Work otherwise complies with 133 | the conditions stated in this License. 134 | 135 | 5. Submission of Contributions. Unless You explicitly state otherwise, 136 | any Contribution intentionally submitted for inclusion in the Work 137 | by You to the Licensor shall be under the terms and conditions of 138 | this License, without any additional terms or conditions. 139 | Notwithstanding the above, nothing herein shall supersede or modify 140 | the terms of any separate license agreement you may have executed 141 | with Licensor regarding such Contributions. 142 | 143 | 6. Trademarks. This License does not grant permission to use the trade 144 | names, trademarks, service marks, or product names of the Licensor, 145 | except as required for reasonable and customary use in describing the 146 | origin of the Work and reproducing the content of the NOTICE file. 147 | 148 | 7. Disclaimer of Warranty. Unless required by applicable law or 149 | agreed to in writing, Licensor provides the Work (and each 150 | Contributor provides its Contributions) on an "AS IS" BASIS, 151 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 152 | implied, including, without limitation, any warranties or conditions 153 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 154 | PARTICULAR PURPOSE. You are solely responsible for determining the 155 | appropriateness of using or redistributing the Work and assume any 156 | risks associated with Your exercise of permissions under this License. 157 | 158 | 8. Limitation of Liability. In no event and under no legal theory, 159 | whether in tort (including negligence), contract, or otherwise, 160 | unless required by applicable law (such as deliberate and grossly 161 | negligent acts) or agreed to in writing, shall any Contributor be 162 | liable to You for damages, including any direct, indirect, special, 163 | incidental, or consequential damages of any character arising as a 164 | result of this License or out of the use or inability to use the 165 | Work (including but not limited to damages for loss of goodwill, 166 | work stoppage, computer failure or malfunction, or any and all 167 | other commercial damages or losses), even if such Contributor 168 | has been advised of the possibility of such damages. 169 | 170 | 9. Accepting Warranty or Additional Liability. While redistributing 171 | the Work or Derivative Works thereof, You may choose to offer, 172 | and charge a fee for, acceptance of support, warranty, indemnity, 173 | or other liability obligations and/or rights consistent with this 174 | License. However, in accepting such obligations, You may act only 175 | on Your own behalf and on Your sole responsibility, not on behalf 176 | of any other Contributor, and only if You agree to indemnify, 177 | defend, and hold each Contributor harmless for any liability 178 | incurred by, or claims asserted against, such Contributor by reason 179 | of your accepting any such warranty or additional liability. 180 | 181 | END OF TERMS AND CONDITIONS 182 | 183 | APPENDIX: How to apply the Apache License to your work. 184 | 185 | To apply the Apache License to your work, attach the following 186 | boilerplate notice, with the fields enclosed by brackets "{}" 187 | replaced with your own identifying information. (Don't include 188 | the brackets!) The text should be enclosed in the appropriate 189 | comment syntax for the file format. We also recommend that a 190 | file or class name and description of purpose be included on the 191 | same "printed page" as the copyright notice for easier 192 | identification within third-party archives. 193 | 194 | Copyright {yyyy} {name of copyright owner} 195 | 196 | Licensed under the Apache License, Version 2.0 (the "License"); 197 | you may not use this file except in compliance with the License. 198 | You may obtain a copy of the License at 199 | 200 | http://www.apache.org/licenses/LICENSE-2.0 201 | 202 | Unless required by applicable law or agreed to in writing, software 203 | distributed under the License is distributed on an "AS IS" BASIS, 204 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 205 | See the License for the specific language governing permissions and 206 | limitations under the License. 207 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # bazelrc language server 2 | 3 | Code intelligence for `.bazelrc` config files. 4 | 5 | ## Installation & Configuration 6 | 7 | The language server from this repository can be used in a wide range editors (neovim, emacs, IntelliJ, ...). 8 | 9 | For **Visual Studio Code**, we offer a pre-packaged Visual Studio Code plugin: 10 | 11 | 1. Download the correct `*.vsix` package for your operating system for the [latest release](https://github.com/salesforce-misc/bazelrc-lsp/releases/) 12 | 2. Inside Visual Studio, press `Cmd` + `Shift` + `P` to open the command picker 13 | 3. Choose the "Extension: Install from VSIX..." command 14 | 4. Choose the downloaded `.vsix` file in the file picker 15 | 16 | I will leave it as an exercise to the reader to figure out how exactly 17 | to configure the language server for other editors. The `bazelrc-lsp help` 18 | command might be useful to get an overview of the available arguments. 19 | 20 | ### Configuring your Bazel version 21 | 22 | `bazelrc-lsp` comes packaged with documentation and metadata about the supported Bazel 23 | flags for a large range of Bazel versions. By default, the Bazel version is auto-detected 24 | using [Bazelisk's algorithm](https://github.com/bazelbuild/bazelisk/tree/master?tab=readme-ov-file#how-does-bazelisk-know-which-bazel-version-to-run), 25 | i.e. based on the `USE_BAZEL_VERSION` environment variable, the `.bazeliskrc` and the 26 | `.bazelversion` file. 27 | 28 | In case auto-detection does not work for your particular use case, you can also specify a version 29 | explicitly via the VS-Code settings. 30 | 31 | Alternatively, the language server can also invoke Bazel to request the list of supported flags. 32 | To do so, point the environment variable `BAZELRC_LSP_RUN_BAZEL_PATH` to your Bazel binary. This 33 | is particularly useful for development versions and custom forks of Bazel. 34 | 35 | ## Development 36 | 37 | The source code for this extension lives at https://github.com/salesforce-misc/bazelrc-lsp. 38 | Contributions are welcome. Feel free to just open a pull request. 39 | Further down in this README you can also find a backlog of various ideas, in case you need some inspiration. 40 | In case you want to discuss any of those topics (or a topic of your own), please feel free to reach out via a Github issue. 41 | 42 | ### Building from source 43 | 44 | 1. `cd vscode-extension` 45 | 2. `pnpm i` 46 | 3. `pnpm package` 47 | 4. Install the "hyper-ir-lsp-*.vsix" in VS Code 48 | 49 | ### Current State & Backlog 50 | 51 | The extension is complete enough for my personal needs and hopefully useful to you, too. 52 | 53 | Long-term, I am considering to integrate this functionality into the official [VSCode Bazel extension](https://github.com/bazelbuild/vscode-bazel). This is also why this extension is not published to the VS Code Marketplace as a standalone extension. 54 | 55 | Backlog items: 56 | 57 | * ✔ Bazel version support 58 | * ✔ load flags from Bazel's flag dump 59 | * ✔ pack multiple flag versions & auto-detect version 60 | * ✔ run `bazel help flags-as-proto` at runtime based on environment variable 61 | * ✔ expose settings in VS-Code extension 62 | * ✔ Syntax highlighting / semantic tokens 63 | * Diagnose 64 | * ✔ unknown flags 65 | * ✔ allow custom setting flags (`--//my/package:setting` and `--no//my/package:setting`) 66 | * ✔ abbreviated flag names; prefer non-abbreviated flags 67 | * when using an old, deprecated name (blocked on [up-stream Bazel change](https://github.com/bazelbuild/bazel/pull/25169)) 68 | * ✔ diagnose deprecated flags 69 | * ✔ diagnose missing `import`ed files 70 | * ✔ configs on `startup`, `import`, `try-import` 71 | * ✔ empty config name 72 | * ✔ config name which doesn't match `[a-z_\-]+` (or similar) 73 | * repeated flags 74 | * offer fix-it: 75 | * to replace abbreviated flags by non-abbreviated flags 76 | * to remove deprecated no-op flags 77 | * to remove repeated flags 78 | * Hover 79 | * ✔ Show documentation of flags on hover 80 | * ✔ Correctly escape `<>` in Markdown (e.g. problematic in the documentation for `--config`) 81 | * Link to flag documentation in hovers 82 | * Expose default value, value description (blocked on [up-stream Bazel change](https://github.com/bazelbuild/bazel/pull/25169)) 83 | * ✔ Show documentation for commands on hover 84 | * Autocomplete 85 | * ✔ auto complete command names 86 | * ✔ flag names 87 | * flag values: 88 | * based on available setting values (needs Bazel-side changes) 89 | * based on previously observed values 90 | * config names 91 | * based on config names used elsewhere in the file / project 92 | * file names for `import` / `try-import` 93 | * Format / pretty print 94 | * improved formatting behavior 95 | * ✔ basic formatting support 96 | * ✔ always quote arguments to `import` / `try-import` 97 | * ✔ combine `--flag value` into `--flag=value` 98 | * ✔ "line reflowing" support (all on single line; one flag per line with `\` line continuations; one flag per command; ...) 99 | * ✔ compact multiple consecutive empty lines 100 | * break up multiline continuations in comments 101 | * more aggressive reformatting of comments / smarter detection of Markdown, ASCII art etc. 102 | * ✔ LSP integration 103 | * ✔ whole document formatting 104 | * ✔ range formatting 105 | * ✔ expose formatting through command line to enable integration into CI systems 106 | * ✔ stdin processing 107 | * ✔ inplace updates for files 108 | * ✔ `--check`-mode 109 | * ✔ support enumerating all bazelrc files from a folder 110 | * write documentation, including explanation of different styles 111 | * ✔ link file names for `import` & `try-import` 112 | * Rename functionality for config names 113 | * Go to Reference: 114 | * Other usages of config name 115 | * Find other usages of same flag 116 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | ## Security 2 | 3 | Please report any security issue to [security@salesforce.com](mailto:security@salesforce.com) 4 | as soon as it is discovered. This library limits its runtime dependencies in 5 | order to reduce the total cost of ownership as much as can be, but all consumers 6 | should remain vigilant and have their security stakeholders review all third-party 7 | products (3PP) like this one and their dependencies. 8 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | use base64::prelude::*; 2 | use prost::{bytes::Bytes, Message}; 3 | use std::{collections::HashMap, env, fs, io::Result, path::Path, process::Command}; 4 | 5 | include!("src/bazel_flags_proto.rs"); 6 | 7 | fn dump_flags(cache_dir: &Path, version: &str) -> Vec { 8 | let cache_path = cache_dir.join(format!("flags-dumps/{version}.data")); 9 | if cache_path.exists() { 10 | fs::read(cache_path).unwrap() 11 | } else { 12 | let mut bazelisk_cmd = if cfg!(windows) { 13 | // In Github Actions, bazelisk is available via powershell on Windows 14 | let mut cmd = Command::new("powershell.exe"); 15 | cmd.arg("-File"); 16 | cmd.arg("C:\\npm\\prefix\\bazelisk.ps1"); 17 | cmd 18 | } else { 19 | Command::new("bazelisk") 20 | }; 21 | let result = bazelisk_cmd 22 | .env("USE_BAZEL_VERSION", version) 23 | .arg("--ignore_all_rc_files") 24 | .arg("help") 25 | .arg("flags-as-proto") 26 | .output() 27 | .unwrap_or_else(|e| panic!("Failed to spawn Bazelisk for version {version}, {e}")); 28 | if !result.status.success() { 29 | panic!( 30 | "Failed to get flags for Bazel version {version}:\n===stdout===\n{stdout}\n===stderr===\n{stderr}", 31 | stdout = String::from_utf8_lossy(&result.stdout), 32 | stderr = String::from_utf8_lossy(&result.stderr) 33 | ); 34 | } 35 | let flags_binary = BASE64_STANDARD 36 | .decode(result.stdout) 37 | .expect("Failed to decode Bazelisk output as base64"); 38 | if let Some(parent) = cache_path.parent() { 39 | fs::create_dir_all(parent).unwrap_or_else(|e| { 40 | panic!( 41 | "Failed to create directory at {} for flags, {e}", 42 | parent.display() 43 | ) 44 | }); 45 | } 46 | fs::write(cache_path.clone(), &flags_binary).unwrap_or_else(|e| { 47 | panic!( 48 | "Failed to write flags to disk at {}, {e}", 49 | cache_path.display() 50 | ) 51 | }); 52 | flags_binary 53 | } 54 | } 55 | 56 | fn merge_flags_into( 57 | new_flags: Vec, 58 | flags: &mut HashMap>, 59 | bazel_version: &str, 60 | ) { 61 | new_flags.into_iter().for_each(|mut new_flag| { 62 | if let Some(existing_flags) = flags.get_mut(&new_flag.name) { 63 | let existing_flag_opt = existing_flags.iter_mut().find(|existing_flag| { 64 | existing_flag.has_negative_flag == new_flag.has_negative_flag 65 | && existing_flag.documentation == new_flag.documentation 66 | && existing_flag.commands == new_flag.commands 67 | && existing_flag.abbreviation == new_flag.abbreviation 68 | && existing_flag.allows_multiple == new_flag.allows_multiple 69 | && existing_flag.effect_tags == new_flag.effect_tags 70 | && existing_flag.metadata_tags == new_flag.metadata_tags 71 | && existing_flag.documentation_category == new_flag.documentation_category 72 | && existing_flag.requires_value == new_flag.requires_value 73 | }); 74 | if let Some(existing_flag) = existing_flag_opt { 75 | existing_flag.bazel_versions.push(bazel_version.to_string()); 76 | } else { 77 | new_flag.bazel_versions.push(bazel_version.to_string()); 78 | existing_flags.push(new_flag); 79 | } 80 | } else { 81 | new_flag.bazel_versions.push(bazel_version.to_string()); 82 | flags.insert(new_flag.name.clone(), vec![new_flag]); 83 | } 84 | }); 85 | } 86 | 87 | fn main() -> Result<()> { 88 | let versions = [ 89 | "7.0.0", 90 | "7.0.1", 91 | "7.0.2", 92 | "7.1.0", 93 | "7.1.1", 94 | "7.1.2", 95 | "7.2.0", 96 | "7.2.1", 97 | "7.3.0", 98 | "7.3.1", 99 | "7.3.2", 100 | "7.4.0", 101 | "7.4.1", 102 | "7.5.0", 103 | "7.6.0", 104 | "7.6.1", 105 | "8.0.0", 106 | "8.0.1", 107 | "8.1.0", 108 | "8.1.1", 109 | "8.2.0", 110 | "8.2.1", 111 | "9.0.0-pre.20250317.2", 112 | ]; 113 | 114 | let cache_dir = env::current_dir().unwrap().join("bazel-flags-cache"); 115 | if !cache_dir.exists() { 116 | fs::create_dir(&cache_dir).expect("Failed to create cached directory"); 117 | } 118 | 119 | let mut flags_by_name = HashMap::>::new(); 120 | for version in versions { 121 | let flags_proto: Vec = dump_flags(&cache_dir, version); 122 | let flags = FlagCollection::decode(Bytes::from(flags_proto)) 123 | .expect("Failed to decode protobuf flags"); 124 | merge_flags_into(flags.flag_infos, &mut flags_by_name, version); 125 | } 126 | 127 | // Hack to workaround https://github.com/salesforce-misc/bazelrc-lsp/issues/2 128 | // Bazel used to have two `--watchfs` flags: A startup-flag and a build flag. 129 | // The build flag is mising from the flag-dumps of older Bazel versions, and only 130 | // the startup flag was included, which is marked as deprecated. Newer Bazel versions 131 | // only report the non-deprecated build flag. 132 | // 133 | // We "back-port" this fix to earlier Bazel versions by patching the flags here. 134 | let watchfs_flags = flags_by_name.remove("watchfs").unwrap(); 135 | let (mut deprecated_watchfs, mut non_deprecated_watchfs): (Vec<_>, Vec<_>) = watchfs_flags 136 | .into_iter() 137 | .partition(|f| f.metadata_tags.contains(&"DEPRECATED".to_string())); 138 | if !non_deprecated_watchfs.is_empty() { 139 | for flag in &mut deprecated_watchfs { 140 | non_deprecated_watchfs[0] 141 | .bazel_versions 142 | .append(&mut flag.bazel_versions); 143 | } 144 | } 145 | flags_by_name.insert("watchfs".to_string(), non_deprecated_watchfs); 146 | 147 | // Write the combined flags into a file 148 | let flag_list = flags_by_name 149 | .into_iter() 150 | .flat_map(|e| e.1) 151 | .collect::>(); 152 | let combined_flag_collection = FlagCollection { 153 | flag_infos: flag_list, 154 | all_bazel_versions: Vec::from(versions.map(|f| f.to_string())), 155 | }; 156 | let combined_proto = combined_flag_collection.encode_to_vec(); 157 | let compressed = lz4_flex::compress_prepend_size(&combined_proto); 158 | let out_dir_env = env::var_os("OUT_DIR").unwrap(); 159 | let out_dir = Path::new(&out_dir_env); 160 | let result_path: std::path::PathBuf = out_dir.join("bazel-flags-combined.data.lz4"); 161 | fs::write(result_path, compressed).expect("Failed to write combined flags to disk"); 162 | 163 | Ok(()) 164 | } 165 | -------------------------------------------------------------------------------- /examples/REPO.bazel: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/salesforce-misc/bazelrc-lsp/e59cf79c468b0ec6db24417fc717dee926302d4c/examples/REPO.bazel -------------------------------------------------------------------------------- /examples/edge_cases.bazelrc: -------------------------------------------------------------------------------- 1 | # This file tries out all the edge cases for bazelrc parsing. 2 | 3 | # A \ 4 | multine \ 5 | comment 6 | 7 | # A couple of valid flags; also including a shorthand option 8 | build --keep_going --remote_cache= -k 9 | 10 | # Various syntaxes for config names 11 | build:myconfig --keep_going 12 | buil"d:my"config --keep_going 13 | build: --keep_going 14 | build:my\ -config --keep_going 15 | 16 | # Configs on `import` and `startup` are not valid 17 | startup:myconfig --autodetect_server_javabase 18 | import:myconfig "my_file.bazelr" 19 | import "%workspace%/file2.bazelrc" 20 | import "%workspace%/no_such_file.bazelrc" 21 | 22 | # `common` and `always` are special cases for bazelrc files 23 | common --keep_going 24 | always --keep_going 25 | 26 | # Invalid command 27 | clear --flag 28 | 29 | # A couple of valid flags 30 | build --keep_going 31 | build --notest_keep_going 32 | build --disk_cache= 33 | 34 | # Invalid flag 35 | build --some_unknown_flag 36 | # Deprecated flag 37 | startup --batch 38 | 39 | # Valid, combined flag 40 | build --copt --std=c++20 41 | 42 | # Custom flag names. Those are not validated at all, currently. 43 | build --//my/package:setting=foobar 44 | build --no//my/package:bool_flag 45 | build --@dependency:my/package:bool_flag 46 | build --no@dependency:my/package:bool_flag 47 | 48 | # Multiline flag lists 49 | build:myconfig \ 50 | --remote_cache \ 51 | path_to_remote_cache \ 52 | --disk_cache= -------------------------------------------------------------------------------- /examples/file2.bazelrc: -------------------------------------------------------------------------------- 1 | # `common` and `always` are special cases for bazelrc files 2 | common --keep_going -------------------------------------------------------------------------------- /examples/line-styles.bazelrc: -------------------------------------------------------------------------------- 1 | # Some people prefer a single flag per line 2 | build:linux --cxxopt=-std=c++17 3 | build:linux --host_cxxopt=-std=c++17 4 | build:macos --cxxopt=-std=c++17 5 | build:macos --host_cxxopt=-std=c++17 6 | build:windows --cxxopt=/std:c++17 7 | build:windows --host_cxxopt=/std:c++17 8 | test:ci --keep_going 9 | 10 | # Others might prefer them all on a single line 11 | build:linux --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 12 | build:macos --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 13 | build:windows --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 14 | test:ci --keep_going 15 | 16 | # Some folks even discovered line continuations. 17 | build:linux \ 18 | --cxxopt=-std=c++17 \ 19 | --host_cxxopt=-std=c++17 20 | build:macos \ 21 | --cxxopt=-std=c++17 \ 22 | --host_cxxopt=-std=c++17 23 | build:windows \ 24 | --cxxopt=/std:c++17 \ 25 | --host_cxxopt=/std:c++17 26 | # But single flags should still be printed on the same line 27 | test:ci --keep_going 28 | 29 | # But nobody likes trailing empty lines. So those 30 | # are stripped when re-formatting the file. 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /examples/unformatted.bazelrc: -------------------------------------------------------------------------------- 1 | # Weird / unnecessary escapes 2 | build --x=a\ bc"1 2 3" 3 | 4 | # Weird whitespaces / new lines 5 | build --x 1 -------------------------------------------------------------------------------- /rust-toolchains.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.84.1" 3 | -------------------------------------------------------------------------------- /src/bazel_flags.rs: -------------------------------------------------------------------------------- 1 | use base64::prelude::*; 2 | use phf::phf_map; 3 | use prost::Message; 4 | use std::{collections::HashMap, io::Cursor, process::Command}; 5 | 6 | use crate::bazel_flags_proto::{FlagCollection, FlagInfo}; 7 | 8 | pub static COMMAND_DOCS: phf::Map<&'static str, &'static str> = phf_map! { 9 | // The command line docs, taken from the `bazel help` 10 | "analyze-profile" => "Analyzes build profile data.", 11 | "aquery" => "Analyzes the given targets and queries the action graph.", 12 | "build" => "Builds the specified targets.", 13 | "canonicalize-flags" => "Canonicalizes a list of bazel options.", 14 | "clean" => "Removes output files and optionally stops the server.", 15 | "coverage" => "Generates code coverage report for specified test targets.", 16 | "cquery" => "Loads, analyzes, and queries the specified targets w/ configurations.", 17 | "dump" => "Dumps the internal state of the bazel server process.", 18 | "fetch" => "Fetches external repositories that are prerequisites to the targets.", 19 | "help" => "Prints help for commands, or the index.", 20 | "info" => "Displays runtime info about the bazel server.", 21 | "license" => "Prints the license of this software.", 22 | "mobile-install" => "Installs targets to mobile devices.", 23 | "mod" => "Queries the Bzlmod external dependency graph", 24 | "print_action" => "Prints the command line args for compiling a file.", 25 | "query" => "Executes a dependency graph query.", 26 | "run" => "Runs the specified target.", 27 | "shutdown" => "Stops the bazel server.", 28 | "sync" => "Syncs all repositories specified in the workspace file", 29 | "test" => "Builds and runs the specified test targets.", 30 | "vendor" => "Fetches external repositories into a specific folder specified by the flag --vendor_dir.", 31 | "version" => "Prints version information for bazel.", 32 | // bazelrc specific commands. Taken from https://bazel.build/run/bazelrc 33 | "startup" => "Startup options, which go before the command, and are described in `bazel help startup_options`.", 34 | "common" => "Options that should be applied to all Bazel commands that support them. If a command does not support an option specified in this way, the option is ignored so long as it is valid for some other Bazel command. Note that this only applies to option names: If the current command accepts an option with the specified name, but doesn't support the specified value, it will fail.", 35 | "always" => "Options that apply to all Bazel commands. If a command does not support an option specified in this way, it will fail.", 36 | // Import. Documentation written by myself 37 | "import" => "Imports the given file. Fails if the file is not found.", 38 | "try-import" => "Tries to import the given file. Does not fail if the file is not found.", 39 | }; 40 | 41 | #[derive(Debug)] 42 | pub struct BazelFlags { 43 | pub commands: Vec, 44 | pub flags: Vec, 45 | pub flags_by_commands: HashMap>, 46 | pub flags_by_name: HashMap, 47 | pub flags_by_abbreviation: HashMap, 48 | } 49 | 50 | #[derive(Clone, Debug, PartialEq, Eq)] 51 | pub enum FlagLookupType { 52 | Normal, 53 | Abbreviation, 54 | OldName, 55 | } 56 | 57 | impl BazelFlags { 58 | pub fn from_flags(flags: Vec, bazel_version: Option<&str>) -> BazelFlags { 59 | // Index the flags from the protobuf description 60 | let mut flags_by_commands = HashMap::>::new(); 61 | let mut flags_by_name = HashMap::::new(); 62 | let mut flags_by_abbreviation = HashMap::::new(); 63 | for (i, f) in flags.iter().enumerate() { 64 | if bazel_version.is_some() 65 | && !f.bazel_versions.iter().any(|v| v == bazel_version.unwrap()) 66 | { 67 | continue; 68 | } 69 | flags_by_name.insert(f.name.clone(), i); 70 | if let Some(old_name) = &f.old_name { 71 | flags_by_name.insert(old_name.clone(), i); 72 | } 73 | for c in &f.commands { 74 | let list = flags_by_commands.entry(c.clone()).or_default(); 75 | list.push(i); 76 | } 77 | if let Some(abbreviation) = &f.abbreviation { 78 | flags_by_abbreviation.insert(abbreviation.clone(), i); 79 | } 80 | } 81 | 82 | // The `common` option is the union of all other options 83 | let mut common_flags = flags_by_commands 84 | .values() 85 | .flatten() 86 | .copied() 87 | .collect::>(); 88 | common_flags.sort(); 89 | common_flags.dedup(); 90 | flags_by_commands.insert("common".to_string(), common_flags.clone()); 91 | 92 | // For safe usage, the `always` option should be the intersection of all other options. 93 | // Using an option not supported by all commands would otherwise make some commands 94 | // unusable. But there are no options which are valid for *all* commands. 95 | // Hence, I am using the union of all flags. 96 | flags_by_commands.insert("always".to_string(), common_flags); 97 | 98 | // Determine the list of supported commands 99 | let mut commands = flags_by_commands.keys().cloned().collect::>(); 100 | commands.extend(["import".to_string(), "try-import".to_string()]); 101 | 102 | BazelFlags { 103 | commands, 104 | flags, 105 | flags_by_commands, 106 | flags_by_name, 107 | flags_by_abbreviation, 108 | } 109 | } 110 | 111 | pub fn get_by_invocation(&self, s: &str) -> Option<(FlagLookupType, &FlagInfo)> { 112 | let stripped = s.strip_suffix('=').unwrap_or(s); 113 | // Long names 114 | if let Some(long_name) = stripped.strip_prefix("--") { 115 | if long_name.starts_with('-') { 116 | return None; 117 | } 118 | // Strip the `no` prefix, if any 119 | let stripped_no = long_name.strip_prefix("no").unwrap_or(long_name); 120 | return self.flags_by_name.get(stripped_no).map(|i| { 121 | let flag = self.flags.get(*i).unwrap(); 122 | let old_name = 123 | flag.old_name.is_some() && flag.old_name.as_ref().unwrap() == stripped_no; 124 | let lookup_mode = if old_name { 125 | FlagLookupType::OldName 126 | } else { 127 | FlagLookupType::Normal 128 | }; 129 | (lookup_mode, flag) 130 | }); 131 | } 132 | // Short names 133 | if let Some(abbreviation) = stripped.strip_prefix('-') { 134 | if abbreviation.starts_with('-') { 135 | return None; 136 | } 137 | return self 138 | .flags_by_abbreviation 139 | .get(abbreviation) 140 | .map(|i| (FlagLookupType::Abbreviation, self.flags.get(*i).unwrap())); 141 | } 142 | None 143 | } 144 | } 145 | 146 | pub fn load_packaged_bazel_flag_collection() -> FlagCollection { 147 | let bazel_flags_proto: &[u8] = 148 | include_bytes!(concat!(env!("OUT_DIR"), "/bazel-flags-combined.data.lz4")); 149 | let decompressed = lz4_flex::decompress_size_prepended(bazel_flags_proto).unwrap(); 150 | FlagCollection::decode(&mut Cursor::new(decompressed)).unwrap() 151 | } 152 | 153 | pub fn load_packaged_bazel_flags(bazel_version: &str) -> BazelFlags { 154 | BazelFlags::from_flags( 155 | load_packaged_bazel_flag_collection().flag_infos, 156 | Some(bazel_version), 157 | ) 158 | } 159 | 160 | pub fn load_bazel_flags_from_command(bazel_command: &str) -> Result { 161 | let result = Command::new(bazel_command) 162 | // Disable bazelrc loading. Otherwise, with an invalid bazelrc, the `bazel help` 163 | // command might fail. 164 | .arg("--ignore_all_rc_files") 165 | .arg("help") 166 | .arg("flags-as-proto") 167 | .output() 168 | .map_err(|err| err.to_string())?; 169 | if !result.status.success() { 170 | let msg = format!( 171 | "exited with code {code}:\n===stdout===\n{stdout}\n===stderr===\n{stderr}", 172 | code = result.status.code().unwrap(), 173 | stdout = String::from_utf8_lossy(&result.stdout), 174 | stderr = String::from_utf8_lossy(&result.stderr) 175 | ); 176 | return Err(msg); 177 | } 178 | let flags_binary = BASE64_STANDARD.decode(&result.stdout).map_err(|_err| { 179 | format!( 180 | "failed to base64-decode output as base64: {}", 181 | String::from_utf8_lossy(&result.stdout) 182 | ) 183 | })?; 184 | let flags = FlagCollection::decode(&mut Cursor::new(flags_binary)) 185 | .map_err(|_err| "failed to decode protobuf flags")?; 186 | 187 | Ok(BazelFlags::from_flags(flags.flag_infos, None)) 188 | } 189 | 190 | fn escape_markdown(str: &str) -> String { 191 | let mut res = String::with_capacity(str.len()); 192 | for c in str.chars() { 193 | match c { 194 | '\\' => res.push_str("\\\\"), 195 | '`' => res.push_str("\\`"), 196 | '*' => res.push_str("\\*"), 197 | '_' => res.push_str("\\_"), 198 | '#' => res.push_str("\\#"), 199 | '+' => res.push_str("\\+"), 200 | '-' => res.push_str("\\-"), 201 | '.' => res.push_str("\\."), 202 | '!' => res.push_str("\\!"), 203 | '~' => res.push_str("\\~"), 204 | '{' => res.push_str("\\{"), 205 | '}' => res.push_str("\\}"), 206 | '[' => res.push_str("\\["), 207 | ']' => res.push_str("\\]"), 208 | '(' => res.push_str("\\("), 209 | ')' => res.push_str("\\)"), 210 | '<' => res.push_str("\\<"), 211 | '>' => res.push_str("\\>"), 212 | _ => res.push(c), 213 | } 214 | } 215 | res 216 | } 217 | 218 | // Combines flags names with their values based on the `requires_value` metadata 219 | pub fn combine_key_value_flags(lines: &mut [crate::parser::Line], bazel_flags: &BazelFlags) { 220 | use crate::parser::Flag; 221 | use crate::tokenizer::Spanned; 222 | for l in lines { 223 | let mut new_flags = Vec::::with_capacity(l.flags.len()); 224 | let mut i: usize = 0; 225 | while i < l.flags.len() { 226 | let flag = &l.flags[i]; 227 | new_flags.push( 228 | || -> Option> { 229 | let flag_name = &flag.name.as_ref()?.0; 230 | let (lookup_type, info) = bazel_flags.get_by_invocation(flag_name)?; 231 | if flag.value.is_some() || lookup_type == FlagLookupType::Abbreviation { 232 | // If we already have an associated value or if the flag was referred to 233 | // using it's abbreviated name, we don't combine the flag. 234 | // Note that the `-c=opt` would be invalid, only `-c opt` is valid. 235 | return flag.value.clone(); 236 | } else if info.requires_value() { 237 | // Combine with the next flag 238 | let next_flag = &l.flags.get(i + 1)?; 239 | i += 1; 240 | if let Some(next_name) = &next_flag.name { 241 | if let Some(next_value) = &next_flag.value { 242 | let combined_str = next_name.0.clone() + "=" + &next_value.0; 243 | let combined_span = crate::tokenizer::Span { 244 | start: next_name.1.start, 245 | end: next_value.1.end, 246 | }; 247 | return Some((combined_str, combined_span)); 248 | } else { 249 | return Some(next_name.clone()); 250 | } 251 | } else if let Some(next_value) = &next_flag.value { 252 | return Some(next_value.clone()); 253 | } 254 | } 255 | None 256 | }() 257 | .map(|new_value| Flag { 258 | name: flag.name.clone(), 259 | value: Some(new_value), 260 | }) 261 | .unwrap_or_else(|| flag.clone()), 262 | ); 263 | i += 1; 264 | } 265 | l.flags = new_flags; 266 | } 267 | } 268 | 269 | impl FlagInfo { 270 | pub fn is_deprecated(&self) -> bool { 271 | self.metadata_tags.iter().any(|t| t == "DEPRECATED") 272 | } 273 | 274 | pub fn is_noop(&self) -> bool { 275 | self.effect_tags.iter().any(|t| t == "NO_OP") 276 | } 277 | 278 | pub fn supports_command(&self, command: &str) -> bool { 279 | command == "common" || command == "always" || self.commands.iter().any(|c| c == command) 280 | } 281 | 282 | pub fn get_documentation_markdown(&self) -> String { 283 | let mut result = String::new(); 284 | 285 | // First line: Flag name and short hand (if any) 286 | result += format!("`--{}`", self.name).as_str(); 287 | if let Some(abbr) = &self.abbreviation { 288 | result += format!(" [`-{}`]", abbr).as_str(); 289 | } 290 | if self.has_negative_flag() { 291 | result += format!(", `--no{}`", self.name).as_str(); 292 | } 293 | // Followed by the documentation text 294 | if let Some(doc) = &self.documentation { 295 | result += "\n\n"; 296 | result += &escape_markdown(&doc.as_str().replace("%{product}", "Bazel")); 297 | } 298 | // And a list of tags 299 | result += "\n\n"; 300 | if !self.effect_tags.is_empty() { 301 | result += "Effect tags: "; 302 | result += self 303 | .effect_tags 304 | .iter() 305 | .map(|t| t.to_lowercase()) 306 | .collect::>() 307 | .join(", ") 308 | .as_str(); 309 | result += "\n"; 310 | } 311 | if !self.metadata_tags.is_empty() { 312 | result += "Tags: "; 313 | result += self 314 | .metadata_tags 315 | .iter() 316 | .map(|t| t.to_lowercase()) 317 | .collect::>() 318 | .join(", ") 319 | .as_str(); 320 | result += "\n"; 321 | } 322 | if let Some(catgegory) = &self.documentation_category { 323 | result += format!("Category: {}\n", catgegory.to_lowercase()).as_str(); 324 | } 325 | 326 | result 327 | } 328 | } 329 | 330 | #[test] 331 | fn test_flags() { 332 | let flags = load_packaged_bazel_flags("7.1.0"); 333 | let commands = flags.flags_by_commands.keys().cloned().collect::>(); 334 | assert!(commands.contains(&"build".to_string())); 335 | assert!(commands.contains(&"clean".to_string())); 336 | assert!(commands.contains(&"test".to_string())); 337 | assert!(commands.contains(&"common".to_string())); 338 | 339 | // Can lookup a flag by its invocation 340 | let preemptible_info = flags.get_by_invocation("--preemptible"); 341 | assert_eq!( 342 | preemptible_info 343 | .unwrap() 344 | .1 345 | .commands 346 | .iter() 347 | .map(|n| n.to_string()) 348 | .collect::>(), 349 | vec!("startup") 350 | ); 351 | 352 | // Supports both short and long forms 353 | assert_eq!( 354 | flags.get_by_invocation("-k").unwrap().0, 355 | FlagLookupType::Abbreviation 356 | ); 357 | assert_eq!( 358 | flags.get_by_invocation("--keep_going").unwrap().0, 359 | FlagLookupType::Normal 360 | ); 361 | assert_eq!( 362 | flags.get_by_invocation("-k").unwrap().1, 363 | flags.get_by_invocation("--keep_going").unwrap().1 364 | ); 365 | 366 | // The `remote_cache` is valid for at least one command. Hence, it should be in `common`. 367 | assert!(flags 368 | .flags_by_commands 369 | .get("common") 370 | .unwrap() 371 | .iter() 372 | .any(|id| { flags.flags[*id].name == "remote_cache" })); 373 | assert!(flags 374 | .flags_by_commands 375 | .get("always") 376 | .unwrap() 377 | .iter() 378 | .any(|id| flags.flags[*id].name == "remote_cache")); 379 | } 380 | 381 | // Test that different flags are available in different Bazel versions 382 | #[test] 383 | fn test_flag_versions() { 384 | let bazel7_flags = load_packaged_bazel_flags("7.0.0"); 385 | let bazel8_flags = load_packaged_bazel_flags("8.0.0"); 386 | let bazel9_flags = load_packaged_bazel_flags("9.0.0"); 387 | 388 | // `python3_path` was removed in Bazel 8 389 | assert!(bazel7_flags.flags_by_name.contains_key("python3_path")); 390 | assert!(!bazel8_flags.flags_by_name.contains_key("python3_path")); 391 | assert!(!bazel9_flags.flags_by_name.contains_key("python3_path")); 392 | } 393 | -------------------------------------------------------------------------------- /src/bazel_flags_proto.rs: -------------------------------------------------------------------------------- 1 | // Based on https://github.com/bazelbuild/bazel/blob/master/src/main/protobuf/bazel_flags.proto 2 | // Originaly generated via prost-build. 3 | // This file contains additional modifications to store 4 | // supported Bazel version ranges. 5 | 6 | #[derive(Clone, PartialEq, ::prost::Message)] 7 | pub struct FlagInfo { 8 | /// Name of the flag, without leading dashes. 9 | #[prost(string, required, tag = "1")] 10 | pub name: ::prost::alloc::string::String, 11 | /// True if --noname exists, too. 12 | #[prost(bool, optional, tag = "2", default = "false")] 13 | pub has_negative_flag: ::core::option::Option, 14 | /// Help text of the flag. 15 | #[prost(string, optional, tag = "3")] 16 | pub documentation: ::core::option::Option<::prost::alloc::string::String>, 17 | /// List of supported Bazel commands, e.g. \['build', 'test'\] 18 | #[prost(string, repeated, tag = "4")] 19 | pub commands: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, 20 | /// Flag name abbreviation, without leading dash. 21 | #[prost(string, optional, tag = "5")] 22 | pub abbreviation: ::core::option::Option<::prost::alloc::string::String>, 23 | /// True if a flag is allowed to occur multiple times in a single arg list. 24 | #[prost(bool, optional, tag = "6", default = "false")] 25 | pub allows_multiple: ::core::option::Option, 26 | /// The effect tags associated with the flag 27 | #[prost(string, repeated, tag = "7")] 28 | pub effect_tags: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, 29 | /// The metadata tags associated with the flag 30 | #[prost(string, repeated, tag = "8")] 31 | pub metadata_tags: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, 32 | /// The documentation category assigned to this flag 33 | #[prost(string, optional, tag = "9")] 34 | pub documentation_category: ::core::option::Option<::prost::alloc::string::String>, 35 | /// Whether the flag requires a value. 36 | /// If false, value-less invocations are acceptable, e.g. --subcommands, 37 | /// but if true a value must be present for all instantiations of the flag, 38 | /// e.g. --jobs=100. 39 | #[prost(bool, optional, tag = "10")] 40 | pub requires_value: ::core::option::Option, 41 | // The old, deprecated name for this option, without leading dashes. 42 | // TODO: Fix the tag number after the upstream Bazel change got merged. 43 | // See https://github.com/bazelbuild/bazel/pull/25169 44 | #[prost(string, optional, tag = "99998")] 45 | pub old_name: Option<::prost::alloc::string::String>, 46 | // The deprecation warning for this option, if one is present. 47 | // TODO: Fix the tag number after the upstream Bazel change got merged. 48 | // See https://github.com/bazelbuild/bazel/pull/25169 49 | #[prost(string, optional, tag = "99999")] 50 | pub deprecation_warning: Option<::prost::alloc::string::String>, 51 | 52 | /// EXTENSION: List of Bazel versions this flag applies to 53 | #[prost(string, repeated, tag = "999")] 54 | pub bazel_versions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, 55 | } 56 | 57 | #[derive(Clone, PartialEq, ::prost::Message)] 58 | pub struct FlagCollection { 59 | #[prost(message, repeated, tag = "1")] 60 | pub flag_infos: ::prost::alloc::vec::Vec, 61 | /// EXTENSION: List of Bazel versions indexed in this collection 62 | #[prost(string, repeated, tag = "999")] 63 | pub all_bazel_versions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, 64 | } 65 | -------------------------------------------------------------------------------- /src/bazel_version.rs: -------------------------------------------------------------------------------- 1 | use once_cell::sync::Lazy; 2 | 3 | use crate::{bazel_flags::load_packaged_bazel_flag_collection, file_utils::get_workspace_path}; 4 | use std::{env, fs, path::Path}; 5 | 6 | #[derive(Debug, PartialEq)] 7 | struct BazelVersion { 8 | major: i16, 9 | minor: i16, 10 | patch: i16, 11 | fork_owner: Option, 12 | pre_release: Option, 13 | } 14 | 15 | type BazelVersionTuple = (i16, i16, i16, Option, Option); 16 | 17 | impl BazelVersion { 18 | fn as_tuple(&self) -> BazelVersionTuple { 19 | ( 20 | self.major, 21 | self.minor, 22 | self.patch, 23 | self.fork_owner.clone(), 24 | self.pre_release.clone(), 25 | ) 26 | } 27 | } 28 | 29 | // Parse a Bazel version into a tuple of 3 integers 30 | // Use "99", i.e. the highest possible version if a part is missing 31 | // Parses pre-release versions, e.g. "pre.20240925.4" from "8.0.0-pre.20240925.4" 32 | // Parses forked versions and stores the fork owner, e.g. "GitHubUser" from "GitHubUser/8.0.0" 33 | fn parse_bazel_version(full_version_str: &str) -> Option { 34 | let (fork_owner, version_str) = match full_version_str.split_once('/') { 35 | Some((fork_owner, version_str)) => (Some(fork_owner.to_string()), version_str), 36 | None => (None, full_version_str), 37 | }; 38 | let (version_str, pre_release) = match version_str.split_once('-') { 39 | Some((version_str, pre_release)) => (version_str, Some(pre_release.to_string())), 40 | None => (version_str, None), 41 | }; 42 | let mut parts = version_str.split('.'); 43 | let major = parts.next()?.parse::().ok()?; 44 | let minor_str = parts.next().unwrap_or(""); 45 | if minor_str == "*" || minor_str == "+" { 46 | return Some(BazelVersion { 47 | major, 48 | minor: 99, 49 | patch: 0, 50 | fork_owner, 51 | pre_release, 52 | }); 53 | } 54 | let minor = minor_str.parse::().unwrap_or(0); 55 | let patch_digits = parts 56 | .next() 57 | .unwrap_or("") 58 | .chars() 59 | .take_while(|c| c.is_ascii_digit()) 60 | .collect::(); 61 | let patch = patch_digits.parse::().unwrap_or(99); 62 | Some(BazelVersion { 63 | major, 64 | minor, 65 | patch, 66 | fork_owner, 67 | pre_release, 68 | }) 69 | } 70 | 71 | // Find the closest available Bazel version 72 | pub fn find_closest_version( 73 | available_version_strs: &[String], 74 | version_hint_str: &str, 75 | ) -> (String, Option) { 76 | let mut available_versions = available_version_strs 77 | .iter() 78 | .map(|s| (parse_bazel_version(s).unwrap().as_tuple(), s)) 79 | .collect::>(); 80 | available_versions.sort(); 81 | let bazel_version = if let Some(version_hint) = parse_bazel_version(version_hint_str) { 82 | let match_idx = available_versions.partition_point(|e| e.0 <= version_hint.as_tuple()); 83 | available_versions[match_idx.saturating_sub(1)].1.clone() 84 | } else { 85 | available_versions.last().unwrap().1.clone() 86 | }; 87 | 88 | if bazel_version == version_hint_str { 89 | (bazel_version, None) 90 | } else { 91 | let message = format!( 92 | "Using flags from Bazel {} because flags for version {} are not available", 93 | bazel_version, version_hint_str 94 | ); 95 | (bazel_version, Some(message)) 96 | } 97 | } 98 | 99 | // Use the Bazelisk logic to figure out the Bazel version 100 | // Ref: https://github.com/bazelbuild/bazelisk/blob/1f9a1aca958cdb50b4adb84b15cdda55a600ed31/README.md?plain=1#L45-L47 101 | pub fn determine_bazelisk_version(path: &Path) -> Option { 102 | if let Ok(version_str) = env::var("USE_BAZEL_VERSION") { 103 | return Some(version_str.trim().to_string()); 104 | } 105 | let workspace_root = get_workspace_path(path)?; 106 | if let Ok(bazeliskrc) = fs::read_to_string(workspace_root.join(".bazeliskrc")) { 107 | for line in bazeliskrc.split('\n') { 108 | if line.starts_with("USE_BAZEL_VERSION=") { 109 | let version_str = &line.split_once('=').unwrap().1; 110 | return Some(version_str.trim().to_string()); 111 | } 112 | } 113 | } 114 | if let Ok(bazelversion) = fs::read_to_string(workspace_root.join(".bazelversion")) { 115 | return Some(bazelversion.trim().to_string()); 116 | } 117 | 118 | None 119 | } 120 | 121 | pub static AVAILABLE_BAZEL_VERSIONS: Lazy> = 122 | Lazy::new(|| load_packaged_bazel_flag_collection().all_bazel_versions); 123 | 124 | #[test] 125 | fn test_parse_bazel_version() { 126 | assert_eq!( 127 | parse_bazel_version("7.1.2"), 128 | Some(BazelVersion { 129 | major: 7, 130 | minor: 1, 131 | patch: 2, 132 | fork_owner: None, 133 | pre_release: None 134 | }) 135 | ); 136 | assert_eq!( 137 | parse_bazel_version("7.*"), 138 | Some(BazelVersion { 139 | major: 7, 140 | minor: 99, 141 | patch: 0, 142 | fork_owner: None, 143 | pre_release: None 144 | }) 145 | ); 146 | assert_eq!( 147 | parse_bazel_version("7.+"), 148 | Some(BazelVersion { 149 | major: 7, 150 | minor: 99, 151 | patch: 0, 152 | fork_owner: None, 153 | pre_release: None 154 | }) 155 | ); 156 | assert_eq!( 157 | parse_bazel_version("7."), 158 | Some(BazelVersion { 159 | major: 7, 160 | minor: 0, 161 | patch: 99, 162 | fork_owner: None, 163 | pre_release: None 164 | }) 165 | ); 166 | assert_eq!( 167 | parse_bazel_version("7"), 168 | Some(BazelVersion { 169 | major: 7, 170 | minor: 0, 171 | patch: 99, 172 | fork_owner: None, 173 | pre_release: None 174 | }) 175 | ); 176 | assert_eq!( 177 | parse_bazel_version("8.1.1rc3"), 178 | Some(BazelVersion { 179 | major: 8, 180 | minor: 1, 181 | patch: 1, 182 | fork_owner: None, 183 | pre_release: None 184 | }) 185 | ); 186 | assert_eq!( 187 | parse_bazel_version("9.0.0-pre.20210317.1"), 188 | Some(BazelVersion { 189 | major: 9, 190 | minor: 0, 191 | patch: 0, 192 | fork_owner: None, 193 | pre_release: Some("pre.20210317.1".to_string()) 194 | }) 195 | ); 196 | assert_eq!( 197 | parse_bazel_version("GitHubUser/8.0.0"), 198 | Some(BazelVersion { 199 | major: 8, 200 | minor: 0, 201 | patch: 0, 202 | fork_owner: Some("GitHubUser".to_string()), 203 | pre_release: None 204 | }) 205 | ); 206 | assert_eq!( 207 | parse_bazel_version("GitHubUser/9.1.2-pre.20210317.1"), 208 | Some(BazelVersion { 209 | major: 9, 210 | minor: 1, 211 | patch: 2, 212 | fork_owner: Some("GitHubUser".to_string()), 213 | pre_release: Some("pre.20210317.1".to_string()) 214 | }) 215 | ); 216 | } 217 | 218 | #[test] 219 | fn test_find_closest_version() { 220 | let versions = [ 221 | "7.0.0", 222 | "7.0.1", 223 | "7.0.2", 224 | "7.1.0", 225 | "7.1.1", 226 | "7.1.2", 227 | "7.2.0", 228 | "8.0.0", 229 | "8.0.1", 230 | "9.0.0-pre.20250121.1", 231 | ]; 232 | let version_strings = versions.map(|s| s.to_string()); 233 | // Versions with an exact match 234 | assert_eq!(find_closest_version(&version_strings, "7.1.1").0, "7.1.1"); 235 | assert_eq!(find_closest_version(&version_strings, "7.2.0").0, "7.2.0"); 236 | // An outdated version for which we no longer provide flags data 237 | assert_eq!(find_closest_version(&version_strings, "5.0.0").0, "7.0.0"); 238 | assert_eq!(find_closest_version(&version_strings, "5.1.1").0, "7.0.0"); 239 | // Release candidate versions 240 | assert_eq!( 241 | find_closest_version(&version_strings, "7.1.1rc2").0, 242 | "7.1.1" 243 | ); 244 | assert_eq!( 245 | find_closest_version(&version_strings, "7.1.2rc2").0, 246 | "7.1.2" 247 | ); 248 | assert_eq!( 249 | find_closest_version(&version_strings, "7.1.2-pre.123434").0, 250 | "7.1.2" 251 | ); 252 | // A newer patch version for which we don't have flags, yet 253 | assert_eq!(find_closest_version(&version_strings, "7.1.4").0, "7.1.2"); 254 | assert_eq!(find_closest_version(&version_strings, "7.2.3").0, "7.2.0"); 255 | assert_eq!(find_closest_version(&version_strings, "8.0.2").0, "8.0.1"); 256 | // A newer version, where we only have a pre-release version 257 | assert_eq!( 258 | find_closest_version(&version_strings, "9.1.2").0, 259 | "9.0.0-pre.20250121.1" 260 | ); 261 | // A partial version specification 262 | assert_eq!(find_closest_version(&version_strings, "7.*").0, "7.2.0"); 263 | assert_eq!(find_closest_version(&version_strings, "7.+").0, "7.2.0"); 264 | assert_eq!(find_closest_version(&version_strings, "7.1").0, "7.1.2"); 265 | assert_eq!( 266 | find_closest_version(&version_strings, "latest").0, 267 | "9.0.0-pre.20250121.1" 268 | ); 269 | assert_eq!( 270 | find_closest_version(&version_strings, "latest-1").0, 271 | "9.0.0-pre.20250121.1" 272 | ); 273 | } 274 | -------------------------------------------------------------------------------- /src/completion.rs: -------------------------------------------------------------------------------- 1 | use ropey::Rope; 2 | use tower_lsp::lsp_types::{ 3 | CompletionItem, CompletionItemTag, CompletionTextEdit, Documentation, MarkupContent, 4 | MarkupKind, Range, TextEdit, 5 | }; 6 | 7 | use crate::{ 8 | bazel_flags::{BazelFlags, COMMAND_DOCS}, 9 | bazel_flags_proto::FlagInfo, 10 | line_index::{IndexEntryKind, IndexedLines}, 11 | lsp_utils::range_to_lsp, 12 | tokenizer::Span, 13 | }; 14 | 15 | fn complete_bazel_command(bazel_flags: &BazelFlags) -> Vec { 16 | bazel_flags 17 | .commands 18 | .iter() 19 | .map(|cmd| CompletionItem { 20 | label: cmd.clone(), 21 | commit_characters: Some(vec![':'.to_string()]), 22 | documentation: get_command_documentation(cmd), 23 | ..Default::default() 24 | }) 25 | .collect::>() 26 | } 27 | 28 | fn complete_bazel_flag( 29 | bazel_flags: &BazelFlags, 30 | command: &str, 31 | range: Range, 32 | ) -> Vec { 33 | let exisiting_flags = bazel_flags.flags_by_commands.get(command); 34 | 35 | if exisiting_flags.is_none() { 36 | return vec![]; 37 | } 38 | 39 | let relevant_flags = exisiting_flags 40 | .unwrap() 41 | .iter() 42 | .map(|i| &bazel_flags.flags[*i]) 43 | // Hide undocumented flags 44 | .filter(|f| f.documentation_category != Some("UNDOCUMENTED".to_string())); 45 | 46 | let create_completion_item = 47 | |label: String, new_text: String, flag: &FlagInfo, commit_characters: Vec| { 48 | let tags = if flag.is_deprecated() || flag.is_noop() { 49 | Some(vec![CompletionItemTag::DEPRECATED]) 50 | } else { 51 | None 52 | }; 53 | CompletionItem { 54 | label, 55 | documentation: get_flag_documentation(flag), 56 | filter_text: Some(new_text.clone()), 57 | text_edit: Some(CompletionTextEdit::Edit(TextEdit { range, new_text })), 58 | commit_characters: Some(commit_characters), 59 | tags, 60 | deprecated: Some(flag.is_deprecated()), 61 | ..Default::default() 62 | } 63 | }; 64 | 65 | // The Bazel flags themselves... 66 | let mut completion_items: Vec = Vec::::new(); 67 | completion_items.extend(relevant_flags.clone().map(|flag| { 68 | let new_text = format!("--{}", flag.name); 69 | create_completion_item(flag.name.clone(), new_text, flag, vec!["=".to_string()]) 70 | })); 71 | 72 | // ... and their negations 73 | completion_items.extend( 74 | relevant_flags 75 | .filter(|flag| flag.has_negative_flag()) 76 | .map(|flag| { 77 | let label = format!("no{}", flag.name.clone()); 78 | let new_text = format!("--no{}", flag.name); 79 | create_completion_item(label, new_text, flag, vec![]) 80 | }), 81 | ); 82 | 83 | completion_items 84 | } 85 | 86 | pub fn get_completion_items( 87 | bazel_flags: &BazelFlags, 88 | rope: &Rope, 89 | index: &IndexedLines, 90 | pos: usize, 91 | ) -> Vec { 92 | // For completion, the indices point between characters and not 93 | // at characters. We are generally interested in the token so far 94 | // *before* the cursor. Hence, we lookup `pos - 1` and not `pos`. 95 | let lookup_pos = if pos == 0 { 0 } else { pos - 1 }; 96 | if let Some(entry) = index.find_symbol_at_position(lookup_pos) { 97 | let line = index.lines.get(entry.line_nr).unwrap(); 98 | // Complete the item which the user is currently typing 99 | match entry.kind { 100 | IndexEntryKind::Command => complete_bazel_command(bazel_flags), 101 | IndexEntryKind::Config => vec![], 102 | IndexEntryKind::FlagName(_) => { 103 | if let Some(cmd) = &line.command { 104 | complete_bazel_flag( 105 | bazel_flags, 106 | &cmd.0, 107 | range_to_lsp(rope, &entry.span).unwrap(), 108 | ) 109 | } else { 110 | // A flag should never be on a line without a command 111 | // Don't auto-complete in this case, to not worsen 112 | // any mistakes already made. 113 | vec![] 114 | } 115 | } 116 | IndexEntryKind::FlagValue(_) => vec![], 117 | } 118 | } else if let Some(line) = index.find_line_at_position(lookup_pos) { 119 | // Not within any item, but on an existing line. 120 | if let Some(cmd) = &line.command { 121 | complete_bazel_flag( 122 | bazel_flags, 123 | &cmd.0, 124 | range_to_lsp( 125 | rope, 126 | &Span { 127 | start: pos, 128 | end: pos, 129 | }, 130 | ) 131 | .unwrap(), 132 | ) 133 | } else { 134 | vec![] 135 | } 136 | } else { 137 | // Outside any existing line, i.e. on a completely empty line 138 | // Complete the bazel command since that has to be at the beginning 139 | // of every line 140 | complete_bazel_command(bazel_flags) 141 | } 142 | } 143 | 144 | fn get_flag_documentation(flag: &crate::bazel_flags_proto::FlagInfo) -> Option { 145 | let mc = MarkupContent { 146 | kind: MarkupKind::Markdown, 147 | value: flag.get_documentation_markdown(), 148 | }; 149 | Some(Documentation::MarkupContent(mc)) 150 | } 151 | 152 | fn get_command_documentation(command: &str) -> Option { 153 | COMMAND_DOCS.get(command).map(|docs| { 154 | Documentation::MarkupContent(MarkupContent { 155 | kind: MarkupKind::Markdown, 156 | value: docs.to_string(), 157 | }) 158 | }) 159 | } 160 | -------------------------------------------------------------------------------- /src/definition.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use tower_lsp::lsp_types::*; 4 | 5 | use crate::{file_utils::resolve_bazelrc_path, line_index::IndexEntryKind, parser::Line}; 6 | 7 | pub fn get_definitions( 8 | file_path: &Path, 9 | kind: &IndexEntryKind, 10 | line: &Line, 11 | ) -> Option { 12 | match kind { 13 | IndexEntryKind::FlagValue(flag_nr) => { 14 | let flag = &line.flags[*flag_nr]; 15 | let command_name = &line.command.as_ref()?.0; 16 | if line.flags.len() != 1 { 17 | return None; 18 | } 19 | if *command_name != "import" && *command_name != "try-import" { 20 | return None; 21 | } 22 | 23 | let flag_value = &flag.value.as_ref()?.0; 24 | let path = resolve_bazelrc_path(file_path, flag_value)?; 25 | let url = Url::from_file_path(path).ok()?; 26 | Some(GotoDefinitionResponse::Scalar(Location { 27 | uri: url, 28 | range: Range { 29 | start: Position { 30 | line: 0, 31 | character: 0, 32 | }, 33 | end: Position { 34 | line: 0, 35 | character: 0, 36 | }, 37 | }, 38 | })) 39 | } 40 | _ => None, 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/diagnostic.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Write as _; 2 | use std::{ops::Deref, path::Path}; 3 | 4 | use chumsky::error::Rich; 5 | use regex::Regex; 6 | use ropey::Rope; 7 | use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, DiagnosticTag}; 8 | 9 | use crate::tokenizer::Span; 10 | use crate::{ 11 | bazel_flags::{combine_key_value_flags, BazelFlags, FlagLookupType}, 12 | file_utils::resolve_bazelrc_path, 13 | lsp_utils::range_to_lsp, 14 | parser::{parse_from_str, Line, ParserResult}, 15 | }; 16 | 17 | pub fn diagnostics_from_parser<'a>( 18 | rope: &'a Rope, 19 | errors: &'a [Rich<'a, char>], 20 | ) -> impl Iterator + 'a { 21 | errors.iter().filter_map(move |item| { 22 | let (message, err_span) = match item.reason() { 23 | chumsky::error::RichReason::ExpectedFound { expected, found } => { 24 | let mut s = String::new(); 25 | if let Some(found) = found { 26 | write!(s, "Found {}", found.deref()).unwrap(); 27 | } else { 28 | write!(&mut s, "Unexpected end of input").unwrap(); 29 | } 30 | write!(&mut s, ", expected ").unwrap(); 31 | match &expected[..] { 32 | [] => { 33 | write!(s, "something else").unwrap(); 34 | } 35 | [expected] => { 36 | write!(s, "{}", expected).unwrap(); 37 | } 38 | _ => { 39 | for expected in &expected[..expected.len() - 1] { 40 | write!(s, "{}", expected).unwrap(); 41 | write!(s, ", ").unwrap(); 42 | } 43 | write!(s, "or ").unwrap(); 44 | write!(s, "{}", expected.last().unwrap()).unwrap(); 45 | } 46 | } 47 | (s, item.span()) 48 | } 49 | chumsky::error::RichReason::Custom(msg) => (msg.to_string(), item.span()), 50 | }; 51 | 52 | let span = &Span { 53 | start: err_span.start, 54 | end: err_span.end, 55 | }; 56 | || -> Option { 57 | Some(Diagnostic::new_simple(range_to_lsp(rope, span)?, message)) 58 | }() 59 | }) 60 | } 61 | 62 | const SKIPPED_PREFIXES: [&str; 4] = ["--//", "--no//", "--@", "--no@"]; 63 | 64 | fn diagnostics_for_flags(rope: &Rope, line: &Line, bazel_flags: &BazelFlags) -> Vec { 65 | let mut diagnostics: Vec = Vec::::new(); 66 | let command = &line.command.as_ref().unwrap().0; 67 | for flag in &line.flags { 68 | if let Some(name) = &flag.name { 69 | if SKIPPED_PREFIXES 70 | .iter() 71 | .any(|prefix| name.0.starts_with(prefix)) 72 | { 73 | // Don't diagnose custom settings at all 74 | } else if let Some((lookup_type, flag_description)) = 75 | bazel_flags.get_by_invocation(&name.0) 76 | { 77 | // Diagnose flags used on the wrong command 78 | if !flag_description.supports_command(command) { 79 | diagnostics.push(Diagnostic::new_simple( 80 | range_to_lsp(rope, &name.1).unwrap(), 81 | format!("The flag {:?} is not supported for {:?}. It is supported for {:?} commands, though.", name.0, command, flag_description.commands), 82 | )) 83 | } 84 | // Diagnose deprecated options 85 | if flag_description.is_deprecated() { 86 | diagnostics.push(Diagnostic { 87 | range: range_to_lsp(rope, &name.1).unwrap(), 88 | message: format!("The flag {:?} is deprecated.", name.0), 89 | severity: Some(DiagnosticSeverity::WARNING), 90 | tags: Some(vec![DiagnosticTag::DEPRECATED]), 91 | ..Default::default() 92 | }); 93 | } else if flag_description.is_noop() { 94 | diagnostics.push(Diagnostic { 95 | range: range_to_lsp(rope, &name.1).unwrap(), 96 | message: format!("The flag {:?} is a no-op.", name.0), 97 | severity: Some(DiagnosticSeverity::WARNING), 98 | ..Default::default() 99 | }); 100 | } else if lookup_type == FlagLookupType::OldName { 101 | diagnostics.push(Diagnostic { 102 | range: range_to_lsp(rope, &name.1).unwrap(), 103 | message: format!( 104 | "The flag {:?} was renamed to \"--{}\".", 105 | name.0, flag_description.name 106 | ), 107 | tags: Some(vec![DiagnosticTag::DEPRECATED]), 108 | severity: Some(DiagnosticSeverity::WARNING), 109 | ..Default::default() 110 | }); 111 | } else if lookup_type == FlagLookupType::Abbreviation { 112 | diagnostics.push(Diagnostic { 113 | range: range_to_lsp(rope, &name.1).unwrap(), 114 | message: format!( 115 | "Use the full name {:?} instead of its abbreviation.", 116 | flag_description.name 117 | ), 118 | severity: Some(DiagnosticSeverity::WARNING), 119 | ..Default::default() 120 | }); 121 | } 122 | } else { 123 | // Diagnose unknown flags 124 | diagnostics.push(Diagnostic::new_simple( 125 | range_to_lsp(rope, &name.1).unwrap(), 126 | format!("Unknown flag {:?}", name.0), 127 | )) 128 | } 129 | } 130 | } 131 | diagnostics 132 | } 133 | 134 | fn diagnostics_for_imports(rope: &Rope, line: &Line, base_path: Option<&Path>) -> Vec { 135 | let mut diagnostics: Vec = Vec::::new(); 136 | let command = line.command.as_ref().unwrap(); 137 | if line.flags.is_empty() { 138 | diagnostics.push(Diagnostic::new_simple( 139 | range_to_lsp(rope, &command.1).unwrap(), 140 | "Missing file path".to_string(), 141 | )) 142 | } else if line.flags.len() > 1 { 143 | diagnostics.push(Diagnostic::new_simple( 144 | range_to_lsp(rope, &command.1).unwrap(), 145 | format!( 146 | "`{}` expects a single file name, but received multiple arguments", 147 | command.0 148 | ), 149 | )) 150 | } else { 151 | let flag = &line.flags[0]; 152 | if flag.name.is_some() { 153 | diagnostics.push(Diagnostic::new_simple( 154 | range_to_lsp(rope, &command.1).unwrap(), 155 | format!("`{}` expects a file name, not a flag name", command.0), 156 | )) 157 | } 158 | if let Some(act_base_path) = base_path { 159 | if let Some(value) = flag.value.as_ref() { 160 | let severity = if command.0 == "try-import" { 161 | DiagnosticSeverity::WARNING 162 | } else { 163 | DiagnosticSeverity::ERROR 164 | }; 165 | let opt_path = resolve_bazelrc_path(act_base_path, &value.0); 166 | if let Some(path) = opt_path { 167 | if !path.exists() { 168 | diagnostics.push(Diagnostic { 169 | range: range_to_lsp(rope, &value.1).unwrap(), 170 | message: "Imported file does not exist".to_string(), 171 | severity: Some(severity), 172 | ..Default::default() 173 | }) 174 | } else if !path.is_file() { 175 | diagnostics.push(Diagnostic { 176 | range: range_to_lsp(rope, &value.1).unwrap(), 177 | message: "Imported path exists, but is not a file".to_string(), 178 | severity: Some(severity), 179 | ..Default::default() 180 | }) 181 | } 182 | } else { 183 | diagnostics.push(Diagnostic { 184 | range: range_to_lsp(rope, &value.1).unwrap(), 185 | message: "Unable to resolve file name".to_string(), 186 | severity: Some(severity), 187 | ..Default::default() 188 | }) 189 | } 190 | } 191 | } 192 | } 193 | diagnostics 194 | } 195 | 196 | pub fn diagnostics_from_rcconfig( 197 | rope: &Rope, 198 | lines: &[Line], 199 | bazel_flags: &BazelFlags, 200 | file_path: Option<&Path>, 201 | ) -> Vec { 202 | let config_regex = Regex::new(r"^[a-z_][a-z0-9]*(?:[-_][a-z0-9]+)*$").unwrap(); 203 | let mut diagnostics: Vec = Vec::::new(); 204 | 205 | for l in lines { 206 | // Command-specific diagnostics 207 | if let Some((command, span)) = &l.command { 208 | if command == "import" || command == "try-import" { 209 | diagnostics.extend(diagnostics_for_imports(rope, l, file_path)) 210 | } else if bazel_flags.flags_by_commands.contains_key(command) { 211 | diagnostics.extend(diagnostics_for_flags(rope, l, bazel_flags)) 212 | } else { 213 | diagnostics.push(Diagnostic::new_simple( 214 | range_to_lsp(rope, span).unwrap(), 215 | format!("Unknown command {:?}", command), 216 | )); 217 | } 218 | } else if !l.flags.is_empty() { 219 | diagnostics.push(Diagnostic::new_simple( 220 | range_to_lsp(rope, &l.span).unwrap(), 221 | "Missing command".to_string(), 222 | )); 223 | } 224 | 225 | // Diagnostics for config names 226 | if let Some((config_name, span)) = &l.config { 227 | if config_name.is_empty() { 228 | // Empty config names make no sense 229 | diagnostics.push(Diagnostic::new_simple( 230 | range_to_lsp(rope, span).unwrap(), 231 | "Empty configuration names are pointless".to_string(), 232 | )); 233 | } else if !config_regex.is_match(config_name) { 234 | // Overly complex config names 235 | diagnostics.push(Diagnostic::new_simple( 236 | range_to_lsp(rope, span).unwrap(), 237 | "Overly complicated config name. Config names should consist only of lower-case ASCII characters.".to_string(), 238 | )); 239 | } 240 | if let Some((command, _)) = &l.command { 241 | if ["startup", "import", "try-import"].contains(&command.as_str()) { 242 | diagnostics.push(Diagnostic::new_simple( 243 | range_to_lsp(rope, span).unwrap(), 244 | format!( 245 | "Configuration names not supported on {:?} commands", 246 | command 247 | ), 248 | )); 249 | } 250 | } 251 | } 252 | } 253 | diagnostics 254 | } 255 | 256 | pub fn diagnostics_from_string( 257 | str: &str, 258 | bazel_flags: &BazelFlags, 259 | file_path: Option<&Path>, 260 | ) -> Vec { 261 | let rope = Rope::from_str(str); 262 | let ParserResult { 263 | tokens: _, 264 | mut lines, 265 | errors, 266 | } = parse_from_str(str); 267 | combine_key_value_flags(&mut lines, bazel_flags); 268 | 269 | let mut diagnostics: Vec = Vec::::new(); 270 | diagnostics.extend(diagnostics_from_parser(&rope, &errors)); 271 | diagnostics.extend(diagnostics_from_rcconfig( 272 | &rope, 273 | &lines, 274 | bazel_flags, 275 | file_path, 276 | )); 277 | diagnostics 278 | } 279 | 280 | #[cfg(test)] 281 | fn test_diagnose_string(str: &str) -> Vec { 282 | use crate::bazel_flags::load_packaged_bazel_flags; 283 | 284 | let bazel_flags = load_packaged_bazel_flags("8.0.0"); 285 | return diagnostics_from_string(str, &bazel_flags, None) 286 | .iter_mut() 287 | .map(|d| std::mem::take(&mut d.message)) 288 | .collect::>(); 289 | } 290 | 291 | #[test] 292 | fn test_diagnose_commands() { 293 | // Nothing wrong with this `build` command 294 | assert_eq!( 295 | test_diagnose_string("build --remote_upload_local_results=false"), 296 | Vec::<&str>::new() 297 | ); 298 | // The command should be named `build`, not `built` 299 | assert_eq!( 300 | test_diagnose_string("built --remote_upload_local_results=false"), 301 | vec!["Unknown command \"built\""] 302 | ); 303 | // Completely missing command 304 | assert_eq!( 305 | test_diagnose_string("--remote_upload_local_results=false"), 306 | vec!["Missing command"] 307 | ); 308 | // Completely missing command 309 | assert_eq!( 310 | test_diagnose_string(":opt --remote_upload_local_results=false"), 311 | vec!["Missing command"] 312 | ); 313 | } 314 | 315 | #[test] 316 | fn test_diagnose_config_names() { 317 | // Diagnose empty config names 318 | assert_eq!( 319 | test_diagnose_string("build: --disk_cache="), 320 | vec!["Empty configuration names are pointless"] 321 | ); 322 | 323 | // Diagnose config names on commands which don't support configs 324 | assert_eq!( 325 | test_diagnose_string("startup:opt --digest_function=blake3"), 326 | vec!["Configuration names not supported on \"startup\" commands"] 327 | ); 328 | assert_eq!( 329 | test_diagnose_string("import:opt \"x.bazelrc\""), 330 | vec!["Configuration names not supported on \"import\" commands"] 331 | ); 332 | assert_eq!( 333 | test_diagnose_string("try-import:opt \"x.bazelrc\""), 334 | vec!["Configuration names not supported on \"try-import\" commands"] 335 | ); 336 | 337 | // Diagnose overly complicated config names 338 | let config_name_diag = "Overly complicated config name. Config names should consist only of lower-case ASCII characters."; 339 | assert_eq!( 340 | test_diagnose_string("common:Uncached --disk_cache="), 341 | vec![config_name_diag] 342 | ); 343 | assert_eq!( 344 | test_diagnose_string("common:-opt --disk_cache="), 345 | vec![config_name_diag] 346 | ); 347 | assert_eq!( 348 | test_diagnose_string("common:opt- --disk_cache="), 349 | vec![config_name_diag] 350 | ); 351 | assert_eq!( 352 | test_diagnose_string("common:2opt --disk_cache="), 353 | vec![config_name_diag] 354 | ); 355 | assert_eq!( 356 | test_diagnose_string("common:opt2 --disk_cache="), 357 | Vec::::new() 358 | ); 359 | assert_eq!( 360 | test_diagnose_string("common:opt-2 --disk_cache="), 361 | Vec::::new() 362 | ); 363 | assert_eq!( 364 | test_diagnose_string("common:opt--2 --disk_cache="), 365 | vec![config_name_diag] 366 | ); 367 | // The Bazel documentation recommends to prefix all user-specific settings with an `_`. 368 | // As such, config names prefixed that way shouldn't be diagnosed as errors. 369 | assert_eq!( 370 | test_diagnose_string("common:_personal --disk_cache="), 371 | Vec::::new() 372 | ); 373 | } 374 | 375 | #[test] 376 | fn test_diagnose_flags() { 377 | // Diagnose unknown flags 378 | assert_eq!( 379 | test_diagnose_string("build --unknown_flag"), 380 | vec!["Unknown flag \"--unknown_flag\""] 381 | ); 382 | // Diagnose flags which are applied for the wrong command 383 | assert_eq!( 384 | test_diagnose_string("startup --disk_cache="), 385 | vec!["The flag \"--disk_cache\" is not supported for \"startup\". It is supported for [\"analyze-profile\", \"aquery\", \"build\", \"canonicalize-flags\", \"clean\", \"config\", \"coverage\", \"cquery\", \"dump\", \"fetch\", \"help\", \"info\", \"license\", \"mobile-install\", \"mod\", \"print_action\", \"query\", \"run\", \"shutdown\", \"sync\", \"test\", \"vendor\", \"version\"] commands, though."] 386 | ); 387 | // Diagnose deprecated flags 388 | assert_eq!( 389 | test_diagnose_string("common --legacy_whole_archive"), 390 | vec!["The flag \"--legacy_whole_archive\" is deprecated."] 391 | ); 392 | // Diagnose no_op flags 393 | assert_eq!( 394 | test_diagnose_string("common --incompatible_override_toolchain_transition"), 395 | vec!["The flag \"--incompatible_override_toolchain_transition\" is a no-op."] 396 | ); 397 | // Diagnose abbreviated flag names 398 | assert_eq!( 399 | test_diagnose_string("build -k"), 400 | vec!["Use the full name \"keep_going\" instead of its abbreviation."] 401 | ); 402 | 403 | // Don't diagnose custom flags 404 | assert_eq!( 405 | test_diagnose_string( 406 | "build --//my/package:setting=foobar 407 | build --no//my/package:bool_flag 408 | build --@dependency:my/package:bool_flag 409 | build --no@dependency:my/package:bool_flag" 410 | ), 411 | Vec::::new() 412 | ); 413 | } 414 | 415 | #[test] 416 | fn test_diagnose_combined_flags() { 417 | // The `--copt` flag expects an argument and hence consumes the 418 | // following `--std=c++20`. `--std=c++20` should not raise 419 | // an error about an unrecognized Bazel flag. 420 | assert_eq!( 421 | test_diagnose_string("build --copt --std=c++20"), 422 | Vec::<&str>::new() 423 | ); 424 | // On the other hand, `--keep_going` only takes an optional value. 425 | // Hence, the `true` is interpreted as a separate flag, which then triggers 426 | // an error. 427 | assert_eq!( 428 | test_diagnose_string("build --keep_going --foobar"), 429 | vec!["Unknown flag \"--foobar\""] 430 | ); 431 | } 432 | 433 | #[test] 434 | fn test_diagnose_import() { 435 | assert_eq!(test_diagnose_string("import"), vec!["Missing file path"]); 436 | assert_eq!( 437 | test_diagnose_string("try-import"), 438 | vec!["Missing file path"] 439 | ); 440 | assert_eq!( 441 | test_diagnose_string("import --a"), 442 | vec!["`import` expects a file name, not a flag name"] 443 | ); 444 | assert_eq!( 445 | test_diagnose_string("import a b"), 446 | vec!["`import` expects a single file name, but received multiple arguments"] 447 | ); 448 | } 449 | -------------------------------------------------------------------------------- /src/file_utils.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | fn find_file_in_parent_dirs(dir: &Path, file_names: &[&str]) -> Option { 4 | let mut path_buf = PathBuf::from(dir); 5 | loop { 6 | if path_buf.is_dir() { 7 | for file_name in file_names { 8 | if path_buf.join(file_name).is_file() { 9 | return Some(path_buf); 10 | } 11 | } 12 | } 13 | if !path_buf.pop() { 14 | break; 15 | } 16 | } 17 | None 18 | } 19 | 20 | pub fn get_workspace_path(path: &Path) -> Option { 21 | // See https://github.com/bazelbuild/bazel/blob/20c49b49d6d616aeb97d30454656ebbf9cbacd21/src/main/cpp/workspace_layout.cc#L35 22 | const ROOT_FILE_NAME: [&str; 4] = 23 | ["MODULE.bazel", "REPO.bazel", "WORKSPACE.bazel", "WORKSPACE"]; 24 | find_file_in_parent_dirs(path, &ROOT_FILE_NAME) 25 | } 26 | 27 | pub fn resolve_bazelrc_path(file_path: &Path, raw_path: &str) -> Option { 28 | let mut path = raw_path.to_string(); 29 | if path.contains("%workspace%") { 30 | path = path.replace("%workspace%", get_workspace_path(file_path)?.to_str()?); 31 | } 32 | Some(file_path.join(Path::new(&path))) 33 | } 34 | -------------------------------------------------------------------------------- /src/formatting.rs: -------------------------------------------------------------------------------- 1 | use ropey::Rope; 2 | use serde::{Deserialize, Serialize}; 3 | use tower_lsp::lsp_types::TextEdit; 4 | 5 | use crate::{ 6 | bazel_flags::BazelFlags, 7 | lsp_utils::range_to_lsp, 8 | parser::{parse_from_str, Line, ParserResult}, 9 | tokenizer::Span, 10 | }; 11 | 12 | pub fn format_token_into(out: &mut String, tok: &str) { 13 | if tok.is_empty() { 14 | out.push_str("\"\"") 15 | } else if tok 16 | .chars() 17 | .all(|c| (c.is_alphanumeric() || c.is_ascii_punctuation()) && c != '"' && c != '\\') 18 | { 19 | out.push_str(tok); 20 | } else { 21 | out.push('"'); 22 | for c in tok.chars() { 23 | match c { 24 | '\\' => out.push_str("\\\\"), 25 | '\"' => out.push_str("\\\""), 26 | _ => out.push(c), 27 | } 28 | } 29 | out.push('"'); 30 | } 31 | } 32 | 33 | pub fn format_token(tok: &str) -> String { 34 | let mut out = String::with_capacity(2 + tok.len()); 35 | format_token_into(&mut out, tok); 36 | out 37 | } 38 | 39 | pub fn format_line_into(out: &mut String, line: &Line, mut use_line_continuations: bool) { 40 | // Format the command + config 41 | let mut non_empty = false; 42 | if let Some(command) = &line.command { 43 | format_token_into(out, &command.0); 44 | non_empty = true; 45 | } 46 | if let Some(config) = &line.config { 47 | out.push(':'); 48 | format_token_into(out, &config.0); 49 | non_empty = true; 50 | } 51 | 52 | use_line_continuations = 53 | use_line_continuations && line.flags.len() >= 2 && line.comment.is_none(); 54 | 55 | // Format the flags 56 | for flag in &line.flags { 57 | if non_empty { 58 | if use_line_continuations { 59 | out.push_str(" \\\n "); 60 | } else { 61 | out.push(' '); 62 | } 63 | } 64 | non_empty = true; 65 | 66 | if let Some(name) = &flag.name { 67 | format_token_into(out, &name.0); 68 | if let Some(value) = &flag.value { 69 | out.push('='); 70 | if !value.0.is_empty() { 71 | format_token_into(out, &value.0); 72 | } 73 | } 74 | } else if let Some(value) = &flag.value { 75 | format_token_into(out, &value.0); 76 | } 77 | } 78 | 79 | // Format the comments 80 | if let Some(comment) = &line.comment { 81 | if non_empty { 82 | out.push(' '); 83 | } 84 | 85 | let could_be_ascii_art = 86 | line.command.is_none() && line.config.is_none() && line.flags.is_empty(); 87 | let stripped_comment = if could_be_ascii_art { 88 | comment.0.trim_end().to_string() 89 | } else { 90 | " ".to_string() + comment.0.trim() 91 | }; 92 | let comment_contents = stripped_comment.replace('\n', "\\\n"); 93 | out.push('#'); 94 | out.push_str(&comment_contents); 95 | } 96 | out.push('\n') 97 | } 98 | 99 | pub fn format_line(line: &Line, use_line_continuations: bool) -> String { 100 | let mut out = String::with_capacity(line.span.end - line.span.start); 101 | format_line_into(&mut out, line, use_line_continuations); 102 | out 103 | } 104 | 105 | // Should lines be combined / split when formatting bazelrc files? 106 | #[derive(PartialEq, Eq, Default, Debug, Clone, Copy, Serialize, Deserialize)] 107 | #[serde(rename_all = "camelCase")] 108 | pub enum FormatLineFlow { 109 | // Do not reflow lines 110 | #[default] 111 | Keep, 112 | // Combine subsequent commands and use `\\` line continuations 113 | LineContinuations, 114 | // Put each flag on a separate line 115 | SeparateLines, 116 | // Put all flags on a single line 117 | SingleLine, 118 | } 119 | 120 | pub fn reflow_lines(lines: &[Line], line_flow: FormatLineFlow) -> Vec { 121 | let mut result1 = Vec::::with_capacity(lines.len()); 122 | match line_flow { 123 | FormatLineFlow::Keep => result1.extend(lines.iter().cloned()), 124 | FormatLineFlow::SingleLine | FormatLineFlow::LineContinuations => { 125 | for l in lines { 126 | // Check if we should merge with the previous line 127 | if let Some(prev_line) = result1.last_mut() { 128 | if l.command.as_ref().map(|c| &c.0) == prev_line.command.as_ref().map(|c| &c.0) 129 | && l.config.as_ref().map(|c| &c.0) 130 | == prev_line.config.as_ref().map(|c| &c.0) 131 | && l.command 132 | .as_ref() 133 | .map(|c| c.0 != "import" && c.0 != "try-import") 134 | .unwrap_or(true) 135 | && l.comment.is_none() 136 | && prev_line.comment.is_none() 137 | { 138 | // Merge with previous 139 | prev_line.flags.extend(l.flags.iter().cloned()); 140 | prev_line.span.end = l.span.end; 141 | continue; 142 | } 143 | } 144 | result1.push(l.clone()); 145 | } 146 | } 147 | FormatLineFlow::SeparateLines => { 148 | for l in lines { 149 | if l.flags.is_empty() { 150 | result1.push(l.clone()); 151 | } 152 | for (i, flag) in l.flags.iter().enumerate() { 153 | let comment = if i == 0 { l.comment.clone() } else { None }; 154 | let span = if i == 0 { 155 | l.span.clone() 156 | } else { 157 | Span { 158 | start: l.span.end, 159 | end: l.span.end, 160 | } 161 | }; 162 | result1.push(Line { 163 | command: l.command.clone(), 164 | config: l.config.clone(), 165 | flags: vec![flag.clone()], 166 | comment, 167 | span, 168 | }) 169 | } 170 | } 171 | } 172 | } 173 | let mut result2 = Vec::::with_capacity(result1.len()); 174 | let is_line_empty = |l: &Line| { 175 | l.command.is_none() && l.config.is_none() && l.flags.is_empty() && l.comment.is_none() 176 | }; 177 | for l in result1.into_iter() { 178 | // Copy over all non-empty lines 179 | if !is_line_empty(&l) { 180 | result2.push(l); 181 | continue; 182 | } 183 | if let Some(prev_line) = result2.last_mut() { 184 | if is_line_empty(prev_line) { 185 | // Merge with previous line if it is also empty 186 | prev_line.span.end = l.span.end; 187 | } else { 188 | result2.push(l); 189 | } 190 | } 191 | } 192 | // We don't want to have an empty line at the end of the file 193 | if result2.last().map(is_line_empty).unwrap_or(false) { 194 | let removed_line = result2.pop().unwrap(); 195 | if let Some(last_line) = result2.last_mut() { 196 | last_line.span.end = removed_line.span.end; 197 | } 198 | } 199 | result2 200 | } 201 | 202 | // Gets the LSP edits for reformatting a line range 203 | pub fn get_text_edits_for_lines( 204 | lines: &[Line], 205 | rope: &Rope, 206 | line_flow: FormatLineFlow, 207 | ) -> Vec { 208 | reflow_lines(lines, line_flow) 209 | .iter() 210 | .filter_map(|line| { 211 | let use_line_continuations = line_flow == FormatLineFlow::LineContinuations; 212 | let formatted = format_line(line, use_line_continuations); 213 | if formatted != rope.slice(line.span.clone()) { 214 | Some(TextEdit { 215 | range: range_to_lsp(rope, &line.span)?, 216 | new_text: formatted, 217 | }) 218 | } else { 219 | None 220 | } 221 | }) 222 | .collect::>() 223 | } 224 | 225 | // Parse and pretty-print the given string 226 | pub fn pretty_print( 227 | str: &str, 228 | bazel_flags: &BazelFlags, 229 | line_flow: FormatLineFlow, 230 | ) -> Result> { 231 | let ParserResult { 232 | tokens: _, 233 | mut lines, 234 | errors, 235 | } = parse_from_str(str); 236 | if !errors.is_empty() { 237 | return Err(errors 238 | .into_iter() 239 | .map(|e| format!("{}", e)) 240 | .collect::>()); 241 | } 242 | crate::bazel_flags::combine_key_value_flags(&mut lines, bazel_flags); 243 | lines = reflow_lines(&lines, line_flow); 244 | let use_line_continuations = line_flow == FormatLineFlow::LineContinuations; 245 | let mut out = String::with_capacity(str.len()); 246 | for line in lines { 247 | format_line_into(&mut out, &line, use_line_continuations); 248 | } 249 | Ok(out) 250 | } 251 | 252 | #[cfg(test)] 253 | use crate::bazel_flags::load_packaged_bazel_flags; 254 | 255 | #[test] 256 | fn test_format_token() { 257 | // No escaping for common, unescaped versions 258 | assert_eq!(format_token("abc123"), "abc123"); 259 | assert_eq!(format_token("--my_flag"), "--my_flag"); 260 | assert_eq!(format_token("--my_flag=123"), "--my_flag=123"); 261 | assert_eq!(format_token("//my/package:target"), "//my/package:target"); 262 | assert_eq!(format_token("@a://b/c:*"), "@a://b/c:*"); 263 | // Also, non-ASCII characters are formatted without quoting 264 | assert_eq!(format_token("Täst"), "Täst"); 265 | // Whitespaces need to be escaped 266 | assert_eq!(format_token("--my_flag= "), "\"--my_flag= \""); 267 | assert_eq!(format_token("--my_flag= x"), "\"--my_flag= x\""); 268 | assert_eq!(format_token("a b c"), "\"a b c\""); 269 | // Escaping of quotes and backslashes 270 | assert_eq!(format_token("a\"b"), "\"a\\\"b\""); 271 | assert_eq!(format_token("a\\b"), "\"a\\\\b\""); 272 | } 273 | 274 | #[test] 275 | fn test_pretty_print_command() { 276 | let flags = load_packaged_bazel_flags("7.4.0"); 277 | let lf = FormatLineFlow::Keep; 278 | 279 | // Command & config names 280 | assert_eq!(pretty_print("build", &flags, lf).unwrap(), "build\n"); 281 | assert_eq!( 282 | pretty_print("build:opt", &flags, lf).unwrap(), 283 | "build:opt\n" 284 | ); 285 | assert_eq!( 286 | pretty_print("build:o\\ p\\ t", &flags, lf).unwrap(), 287 | "build:\"o p t\"\n" 288 | ); 289 | assert_eq!( 290 | pretty_print("buil\" d:o p\"\\ t", &flags, lf).unwrap(), 291 | "\"buil d\":\"o p t\"\n" 292 | ); 293 | // Invalid command & config names, but should still work 294 | assert_eq!(pretty_print(":opt", &flags, lf).unwrap(), ":opt\n"); 295 | } 296 | 297 | #[test] 298 | fn test_pretty_print_flags() { 299 | let flags = load_packaged_bazel_flags("7.4.0"); 300 | let lf = FormatLineFlow::Keep; 301 | 302 | // Flags (also works without a command, although that is strictly speaking invalid) 303 | assert_eq!(pretty_print("--x", &flags, lf).unwrap(), "--x\n"); 304 | assert_eq!( 305 | pretty_print("--x=abc123", &flags, lf).unwrap(), 306 | "--x=abc123\n" 307 | ); 308 | // Normalizes quoting and whitespaces 309 | assert_eq!( 310 | pretty_print("-\"-x=abc12\"3", &flags, lf).unwrap(), 311 | "--x=abc123\n" 312 | ); 313 | assert_eq!( 314 | pretty_print("--\\x=a\\bc", &flags, lf).unwrap(), 315 | "--x=abc\n" 316 | ); 317 | assert_eq!( 318 | pretty_print("--x=a\\ bc\"1 2 3\"", &flags, lf).unwrap(), 319 | "--x=\"a bc1 2 3\"\n" 320 | ); 321 | assert_eq!( 322 | pretty_print("--x\\ =a\\ b", &flags, lf).unwrap(), 323 | "\"--x \"=\"a b\"\n" 324 | ); 325 | // Normalizes empty strings 326 | assert_eq!(pretty_print("--x=\"\"", &flags, lf).unwrap(), "--x=\n"); 327 | // Removes whitespaces between flags 328 | assert_eq!( 329 | pretty_print("--x=1 --y=2", &flags, lf).unwrap(), 330 | "--x=1 --y=2\n" 331 | ); 332 | } 333 | 334 | #[test] 335 | fn test_pretty_print_combined_flags() { 336 | let flags = load_packaged_bazel_flags("7.4.0"); 337 | let lf = FormatLineFlow::Keep; 338 | 339 | // The `--copt` flag expects an argument and hence consumes the 340 | // following `--std=c++20`. `--std=c++20` should not raise 341 | // an error about an unrecognized Bazel flag. 342 | assert_eq!( 343 | pretty_print("build --copt --std=c++20", &flags, lf).unwrap(), 344 | "build --copt=--std=c++20\n" 345 | ); 346 | // On the other hand, `--keep_going` only takes an optional value. 347 | // Hence, the `true` is interpreted as a separate flag, which then triggers 348 | // an error. 349 | assert_eq!( 350 | pretty_print("build --keep_going --foobar", &flags, lf).unwrap(), 351 | "build --keep_going --foobar\n" 352 | ); 353 | 354 | // Leaves abbreviated flag names alone. `-cdbg` would not be valid. 355 | assert_eq!( 356 | pretty_print("build -c dbg", &flags, lf).unwrap(), 357 | "build -c dbg\n" 358 | ); 359 | 360 | // Handles empty parameters correctly 361 | assert_eq!( 362 | pretty_print("build --x \"\"", &flags, lf).unwrap(), 363 | "build --x \"\"\n" 364 | ); 365 | assert_eq!( 366 | pretty_print("build --x=\"\"", &flags, lf).unwrap(), 367 | "build --x=\n" 368 | ); 369 | } 370 | 371 | #[test] 372 | fn test_pretty_print_comments() { 373 | // TODO 374 | } 375 | 376 | #[test] 377 | fn test_pretty_print_whitespace() { 378 | let flags = load_packaged_bazel_flags("7.4.0"); 379 | let lf = FormatLineFlow::Keep; 380 | 381 | // Removes unnecessary whitespace 382 | assert_eq!(pretty_print(" build ", &flags, lf).unwrap(), "build\n"); 383 | assert_eq!( 384 | pretty_print(" build --x=1 --y", &flags, lf).unwrap(), 385 | "build --x=1 --y\n" 386 | ); 387 | assert_eq!( 388 | pretty_print(" build --x=1 # My comment ", &flags, lf).unwrap(), 389 | "build --x=1 # My comment\n" 390 | ); 391 | // We keep whitespace if there are no commands / flags on the line. 392 | // The line might be part of an ASCII art and we don't want to destroy that 393 | assert_eq!( 394 | pretty_print("# My comment ", &flags, lf).unwrap(), 395 | "# My comment\n" 396 | ); 397 | } 398 | 399 | #[test] 400 | fn test_pretty_print_newlines() { 401 | let flags = load_packaged_bazel_flags("7.4.0"); 402 | let lf = FormatLineFlow::Keep; 403 | 404 | // We add a final new line, if it is missing 405 | assert_eq!(pretty_print("build", &flags, lf).unwrap(), "build\n"); 406 | 407 | // We keep empty lines 408 | assert_eq!( 409 | pretty_print("build\n\nbuild\n", &flags, lf).unwrap(), 410 | "build\n\nbuild\n" 411 | ); 412 | 413 | // Multiple empty lines are combined into a single empty line 414 | assert_eq!( 415 | pretty_print("build\n\n\n\n\nbuild\n", &flags, lf).unwrap(), 416 | "build\n\nbuild\n" 417 | ); 418 | 419 | // Empty lines at the end of the file are removed 420 | assert_eq!(pretty_print("build\n\n\n", &flags, lf).unwrap(), "build\n"); 421 | 422 | // Comments are kept on separate lines 423 | assert_eq!( 424 | pretty_print("build\n#a\ntest", &flags, lf).unwrap(), 425 | "build\n#a\ntest\n" 426 | ); 427 | } 428 | 429 | #[test] 430 | fn test_pretty_print_line_styles() { 431 | let flags = load_packaged_bazel_flags("7.4.0"); 432 | 433 | let input = "build:c1 --a=b\n\ 434 | build:c1 --c=d\n\ 435 | build:c2 --e=f --g=h\n\ 436 | build:c3 --xyz"; 437 | 438 | assert_eq!( 439 | pretty_print(input, &flags, FormatLineFlow::LineContinuations).unwrap(), 440 | "build:c1 \\\n --a=b \\\n --c=d\n\ 441 | build:c2 \\\n --e=f \\\n --g=h\n\ 442 | build:c3 --xyz\n" 443 | ); 444 | 445 | assert_eq!( 446 | pretty_print(input, &flags, FormatLineFlow::SeparateLines).unwrap(), 447 | "build:c1 --a=b\n\ 448 | build:c1 --c=d\n\ 449 | build:c2 --e=f\n\ 450 | build:c2 --g=h\n\ 451 | build:c3 --xyz\n" 452 | ); 453 | 454 | assert_eq!( 455 | pretty_print(input, &flags, FormatLineFlow::SingleLine).unwrap(), 456 | "build:c1 --a=b --c=d\n\ 457 | build:c2 --e=f --g=h\n\ 458 | build:c3 --xyz\n" 459 | ); 460 | 461 | assert_eq!( 462 | pretty_print( 463 | "import \"a.bazelrc\"\nimport \"b.bazelrc\"", 464 | &flags, 465 | FormatLineFlow::SingleLine 466 | ) 467 | .unwrap(), 468 | "import a.bazelrc\n\ 469 | import b.bazelrc\n" 470 | ); 471 | } 472 | -------------------------------------------------------------------------------- /src/language_server.rs: -------------------------------------------------------------------------------- 1 | use crate::bazel_flags::{combine_key_value_flags, BazelFlags, COMMAND_DOCS}; 2 | use crate::completion::get_completion_items; 3 | use crate::definition::get_definitions; 4 | use crate::diagnostic::{diagnostics_from_parser, diagnostics_from_rcconfig}; 5 | use crate::file_utils::resolve_bazelrc_path; 6 | use crate::formatting::{get_text_edits_for_lines, FormatLineFlow}; 7 | use crate::line_index::{IndexEntry, IndexEntryKind, IndexedLines}; 8 | use crate::lsp_utils::{lsp_pos_to_offset, range_to_lsp}; 9 | use crate::parser::{parse_from_str, Line, ParserResult}; 10 | use crate::semantic_token::{ 11 | convert_to_lsp_tokens, semantic_tokens_from_lines, RCSemanticToken, LEGEND_TYPE, 12 | }; 13 | use dashmap::DashMap; 14 | use ropey::Rope; 15 | use serde::{Deserialize, Serialize}; 16 | use tower_lsp::jsonrpc::{Error, Result}; 17 | use tower_lsp::lsp_types::*; 18 | use tower_lsp::{Client, LanguageServer}; 19 | 20 | struct TextDocumentItem { 21 | uri: Url, 22 | text: String, 23 | version: i32, 24 | } 25 | 26 | #[derive(Debug)] 27 | pub struct AnalyzedDocument { 28 | rope: Rope, 29 | semantic_tokens: Vec, 30 | indexed_lines: IndexedLines, 31 | has_parser_errors: bool, 32 | } 33 | 34 | #[derive(Deserialize, Serialize, Debug)] 35 | #[serde(rename_all = "camelCase")] 36 | pub struct Settings { 37 | #[serde(default)] 38 | pub format_lines: FormatLineFlow, 39 | } 40 | 41 | #[derive(Debug)] 42 | pub struct Backend { 43 | pub client: Client, 44 | pub document_map: DashMap, 45 | pub bazel_flags: BazelFlags, 46 | pub settings: std::sync::RwLock, 47 | // An optional message which should be displayed to the user on startup 48 | pub startup_warning: Option, 49 | } 50 | 51 | impl Backend { 52 | async fn on_change(&self, params: TextDocumentItem) { 53 | let rope = ropey::Rope::from_str(¶ms.text); 54 | let src = rope.to_string(); 55 | 56 | let file_path_buf = params.uri.to_file_path().ok(); 57 | let file_path = file_path_buf.as_deref(); 58 | 59 | let ParserResult { 60 | tokens: _, 61 | mut lines, 62 | errors, 63 | } = parse_from_str(&src); 64 | combine_key_value_flags(&mut lines, &self.bazel_flags); 65 | let semantic_tokens = semantic_tokens_from_lines(&lines); 66 | let indexed_lines = IndexedLines::from_lines(lines); 67 | 68 | let mut diagnostics: Vec = Vec::::new(); 69 | diagnostics.extend(diagnostics_from_parser(&rope, &errors)); 70 | diagnostics.extend(diagnostics_from_rcconfig( 71 | &rope, 72 | &indexed_lines.lines, 73 | &self.bazel_flags, 74 | file_path, 75 | )); 76 | 77 | self.document_map.insert( 78 | params.uri.to_string(), 79 | AnalyzedDocument { 80 | rope, 81 | semantic_tokens, 82 | indexed_lines, 83 | has_parser_errors: !errors.is_empty(), 84 | }, 85 | ); 86 | 87 | self.client 88 | .publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version)) 89 | .await; 90 | } 91 | } 92 | 93 | #[tower_lsp::async_trait] 94 | impl LanguageServer for Backend { 95 | async fn initialize(&self, _: InitializeParams) -> Result { 96 | Ok(InitializeResult { 97 | server_info: Some(ServerInfo { 98 | name: "bazelrc Language Server".to_string(), 99 | version: Some("1".to_string()), 100 | }), 101 | offset_encoding: None, 102 | capabilities: ServerCapabilities { 103 | text_document_sync: Some(TextDocumentSyncCapability::Kind( 104 | TextDocumentSyncKind::FULL, 105 | )), 106 | semantic_tokens_provider: Some( 107 | SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions( 108 | SemanticTokensRegistrationOptions { 109 | text_document_registration_options: { 110 | TextDocumentRegistrationOptions { 111 | document_selector: Some(vec![DocumentFilter { 112 | language: Some("bazelrc".to_string()), 113 | scheme: None, 114 | pattern: None, 115 | }]), 116 | } 117 | }, 118 | semantic_tokens_options: SemanticTokensOptions { 119 | work_done_progress_options: WorkDoneProgressOptions::default(), 120 | legend: SemanticTokensLegend { 121 | token_types: LEGEND_TYPE.into(), 122 | token_modifiers: vec![], 123 | }, 124 | range: None, 125 | full: Some(SemanticTokensFullOptions::Bool(true)), 126 | }, 127 | static_registration_options: StaticRegistrationOptions::default(), 128 | }, 129 | ), 130 | ), 131 | completion_provider: Some(CompletionOptions { 132 | trigger_characters: Some(vec!["-".to_string()]), 133 | ..Default::default() 134 | }), 135 | hover_provider: Some(HoverProviderCapability::Simple(true)), 136 | document_formatting_provider: Some(OneOf::Left(true)), 137 | document_range_formatting_provider: Some(OneOf::Left(true)), 138 | document_link_provider: Some(DocumentLinkOptions { 139 | resolve_provider: None, 140 | work_done_progress_options: Default::default(), 141 | }), 142 | definition_provider: Some(OneOf::Left(true)), 143 | ..ServerCapabilities::default() 144 | }, 145 | }) 146 | } 147 | 148 | async fn initialized(&self, _: InitializedParams) { 149 | self.client 150 | .log_message(MessageType::INFO, "server initialized!") 151 | .await; 152 | 153 | if let Some(warning) = &self.startup_warning { 154 | self.client 155 | .show_message(MessageType::WARNING, warning) 156 | .await; 157 | } 158 | } 159 | 160 | async fn shutdown(&self) -> Result<()> { 161 | Ok(()) 162 | } 163 | 164 | async fn did_change_configuration(&self, mut params: DidChangeConfigurationParams) { 165 | let Some(bazelrc_settings) = params 166 | .settings 167 | .as_object_mut() 168 | .and_then(|o| o.remove("bazelrc")) 169 | else { 170 | return; 171 | }; 172 | match serde_json::from_value::(bazelrc_settings) { 173 | Ok(new_settings) => *self.settings.write().unwrap() = new_settings, 174 | Err(err) => { 175 | self.client 176 | .show_message(MessageType::ERROR, format!("Invalid settings: {}", err)) 177 | .await; 178 | self.client 179 | .log_message(MessageType::ERROR, format!("Invalid settings: {}", err)) 180 | .await; 181 | } 182 | } 183 | } 184 | 185 | async fn did_open(&self, params: DidOpenTextDocumentParams) { 186 | self.on_change(TextDocumentItem { 187 | uri: params.text_document.uri, 188 | text: params.text_document.text, 189 | version: params.text_document.version, 190 | }) 191 | .await 192 | } 193 | 194 | async fn did_change(&self, mut params: DidChangeTextDocumentParams) { 195 | self.on_change(TextDocumentItem { 196 | uri: params.text_document.uri, 197 | text: std::mem::take(&mut params.content_changes[0].text), 198 | version: params.text_document.version, 199 | }) 200 | .await 201 | } 202 | 203 | async fn did_close(&self, params: DidCloseTextDocumentParams) { 204 | self.document_map 205 | .remove(¶ms.text_document.uri.to_string()); 206 | } 207 | 208 | async fn semantic_tokens_full( 209 | &self, 210 | params: SemanticTokensParams, 211 | ) -> Result> { 212 | let uri = params.text_document.uri.to_string(); 213 | let doc = self 214 | .document_map 215 | .get(&uri) 216 | .ok_or(Error::invalid_params("Unknown document!"))?; 217 | let lsp_tokens = convert_to_lsp_tokens(&doc.rope, &doc.semantic_tokens); 218 | Ok(Some(SemanticTokensResult::Tokens(SemanticTokens { 219 | result_id: None, 220 | data: lsp_tokens, 221 | }))) 222 | } 223 | 224 | async fn completion(&self, params: CompletionParams) -> Result> { 225 | let text_document_position = params.text_document_position; 226 | let uri = text_document_position.text_document.uri.to_string(); 227 | let doc = self 228 | .document_map 229 | .get(&uri) 230 | .ok_or(Error::invalid_params("Unknown document!"))?; 231 | let pos = lsp_pos_to_offset(&doc.rope, &text_document_position.position) 232 | .ok_or(Error::invalid_params("Position out of range"))?; 233 | 234 | Ok(Some(CompletionResponse::Array(get_completion_items( 235 | &self.bazel_flags, 236 | &doc.rope, 237 | &doc.indexed_lines, 238 | pos, 239 | )))) 240 | } 241 | 242 | async fn goto_definition( 243 | &self, 244 | params: GotoDefinitionParams, 245 | ) -> Result> { 246 | let uri = params.text_document_position_params.text_document.uri; 247 | let file_path = uri 248 | .to_file_path() 249 | .ok() 250 | .ok_or(Error::invalid_params("Unsupported URI scheme!"))?; 251 | let doc = self 252 | .document_map 253 | .get(&uri.to_string()) 254 | .ok_or(Error::invalid_params("Unknown document!"))?; 255 | let pos = lsp_pos_to_offset(&doc.rope, ¶ms.text_document_position_params.position) 256 | .ok_or(Error::invalid_params("Position out of range"))?; 257 | let IndexEntry { kind, line_nr, .. } = 258 | doc.indexed_lines.find_symbol_at_position(pos).unwrap(); 259 | let definitions = get_definitions(&file_path, kind, &doc.indexed_lines.lines[*line_nr]); 260 | Ok(definitions) 261 | } 262 | 263 | async fn hover(&self, params: HoverParams) -> Result> { 264 | // Find the right document and offset 265 | let text_document_position = params.text_document_position_params; 266 | let uri = text_document_position.text_document.uri.to_string(); 267 | let doc = self 268 | .document_map 269 | .get(&uri) 270 | .ok_or(Error::invalid_params("Unknown document!"))?; 271 | let pos = lsp_pos_to_offset(&doc.rope, &text_document_position.position) 272 | .ok_or(Error::invalid_params("Position out of range"))?; 273 | 274 | Ok(|| -> Option { 275 | // Find the symbol at the position and provide the hover documentation 276 | let IndexEntry { 277 | span, 278 | line_nr, 279 | kind, 280 | } = doc.indexed_lines.find_symbol_at_position(pos)?; 281 | match kind { 282 | IndexEntryKind::Command => { 283 | let line = &doc.indexed_lines.lines[*line_nr]; 284 | 285 | line.command 286 | .as_ref() 287 | .and_then(|cmd| COMMAND_DOCS.get(cmd.0.as_str())) 288 | .map(|docs| { 289 | let contents = 290 | HoverContents::Scalar(MarkedString::String(docs.to_string())); 291 | Hover { 292 | contents, 293 | range: range_to_lsp(&doc.rope, span), 294 | } 295 | }) 296 | } 297 | IndexEntryKind::Config => None, 298 | IndexEntryKind::FlagValue(flag_nr) | IndexEntryKind::FlagName(flag_nr) => { 299 | let line = &doc.indexed_lines.lines[*line_nr]; 300 | let flag_name = &line.flags.get(*flag_nr)?.name.as_ref()?.0; 301 | let (_, flag_info) = self.bazel_flags.get_by_invocation(flag_name)?; 302 | let content = flag_info.get_documentation_markdown(); 303 | let contents = HoverContents::Scalar(MarkedString::String(content)); 304 | Some(Hover { 305 | contents, 306 | range: range_to_lsp(&doc.rope, span), 307 | }) 308 | } 309 | } 310 | }()) 311 | } 312 | 313 | async fn formatting(&self, params: DocumentFormattingParams) -> Result>> { 314 | // Find the right document 315 | let uri = params.text_document.uri.to_string(); 316 | let doc = self 317 | .document_map 318 | .get(&uri) 319 | .ok_or(Error::invalid_params("Unknown document!"))?; 320 | let rope = &doc.rope; 321 | 322 | if doc.has_parser_errors { 323 | return Err(Error::invalid_params( 324 | "Formatting can only be applied if there are no parsing errors", 325 | )); 326 | } 327 | 328 | // Format all lines 329 | let lines = &doc.indexed_lines.lines; 330 | Ok(Some(get_text_edits_for_lines( 331 | lines, 332 | rope, 333 | self.settings.read().unwrap().format_lines, 334 | ))) 335 | } 336 | 337 | async fn range_formatting( 338 | &self, 339 | params: DocumentRangeFormattingParams, 340 | ) -> Result>> { 341 | // Find the right document 342 | let uri = params.text_document.uri.to_string(); 343 | let doc = self 344 | .document_map 345 | .get(&uri) 346 | .ok_or(Error::invalid_params("Unknown document!"))?; 347 | let rope = &doc.rope; 348 | 349 | if doc.has_parser_errors { 350 | return Err(Error::invalid_params( 351 | "Formatting can only be applied if there are no parsing errors", 352 | )); 353 | } 354 | 355 | // Format the line range 356 | let all_lines = &doc.indexed_lines.lines; 357 | let start_offset = lsp_pos_to_offset(rope, ¶ms.range.start) 358 | .ok_or(Error::invalid_params("Position out of range!"))?; 359 | let end_offset = lsp_pos_to_offset(rope, ¶ms.range.end) 360 | .ok_or(Error::invalid_params("Position out of range!"))?; 361 | // XXX not correct, yet 362 | let first_idx = all_lines.partition_point(|l: &Line| l.span.start < start_offset); 363 | let last_idx = all_lines.partition_point(|l: &Line| l.span.end < end_offset) + 1; 364 | 365 | Ok(Some(get_text_edits_for_lines( 366 | &all_lines[first_idx..last_idx], 367 | rope, 368 | self.settings.read().unwrap().format_lines, 369 | ))) 370 | } 371 | 372 | async fn document_link(&self, params: DocumentLinkParams) -> Result>> { 373 | // Find the right document 374 | let uri = params.text_document.uri.to_string(); 375 | let doc = self 376 | .document_map 377 | .get(&uri) 378 | .ok_or(Error::invalid_params("Unknown document!"))?; 379 | let rope = &doc.rope; 380 | let file_path = params 381 | .text_document 382 | .uri 383 | .to_file_path() 384 | .ok() 385 | .ok_or(Error::invalid_params("Unsupported URI scheme!"))?; 386 | 387 | // Link all `import` and `try-import` lines 388 | let links = doc 389 | .indexed_lines 390 | .lines 391 | .iter() 392 | .filter_map(|line| { 393 | let command = line.command.as_ref()?; 394 | if command.0 != "import" && command.0 != "try-import" { 395 | return None; 396 | } 397 | if line.flags.len() != 1 { 398 | return None; 399 | } 400 | let flag = &line.flags[0]; 401 | if flag.name.is_some() { 402 | return None; 403 | } 404 | let value = flag.value.as_ref()?; 405 | let path = resolve_bazelrc_path(&file_path, &value.0)?; 406 | let url = Url::from_file_path(path).ok()?; 407 | Some(DocumentLink { 408 | range: range_to_lsp(rope, &value.1)?, 409 | target: Some(url), 410 | tooltip: None, 411 | data: None, 412 | }) 413 | }) 414 | .collect::>(); 415 | Ok(Some(links)) 416 | } 417 | } 418 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod bazel_flags; 2 | pub mod bazel_flags_proto; 3 | pub mod bazel_version; 4 | pub mod completion; 5 | pub mod definition; 6 | pub mod diagnostic; 7 | pub mod file_utils; 8 | pub mod formatting; 9 | pub mod language_server; 10 | pub mod line_index; 11 | pub mod lsp_utils; 12 | pub mod parser; 13 | pub mod semantic_token; 14 | pub mod tokenizer; 15 | -------------------------------------------------------------------------------- /src/line_index.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use crate::{parser::Line, tokenizer::Span}; 4 | 5 | #[derive(Clone, Debug, PartialEq, Eq)] 6 | pub enum IndexEntryKind { 7 | Command, 8 | Config, 9 | FlagName(usize), 10 | FlagValue(usize), 11 | } 12 | 13 | #[derive(Clone, Debug, PartialEq, Eq)] 14 | pub struct IndexEntry { 15 | pub span: Span, 16 | pub line_nr: usize, 17 | pub kind: IndexEntryKind, 18 | } 19 | 20 | #[derive(Clone, Debug, PartialEq, Eq)] 21 | pub struct IndexedLines { 22 | pub lines: Vec, 23 | reverse_token_idx: BTreeMap, 24 | reverse_line_idx: BTreeMap, 25 | } 26 | 27 | impl IndexedLines { 28 | pub fn from_lines(lines: Vec) -> IndexedLines { 29 | let mut reverse_token_idx_entries = Vec::<(usize, IndexEntry)>::new(); 30 | let mut reverse_line_idx_entries = Vec::<(usize, usize)>::new(); 31 | 32 | for (line_nr, line) in lines.iter().enumerate() { 33 | reverse_line_idx_entries.push((line.span.start, line_nr)); 34 | 35 | // Helper function to add a token to the index 36 | let mut add_token_to_idx = |span: &Span, kind: IndexEntryKind| { 37 | reverse_token_idx_entries.push(( 38 | span.start, 39 | IndexEntry { 40 | span: span.clone(), 41 | line_nr, 42 | kind, 43 | }, 44 | )); 45 | }; 46 | 47 | // Index the command 48 | if let Some(cmd) = &line.command { 49 | add_token_to_idx(&cmd.1, IndexEntryKind::Command); 50 | } 51 | // Index the config 52 | if let Some(config) = &line.config { 53 | add_token_to_idx(&config.1, IndexEntryKind::Config); 54 | } 55 | // Index the flags 56 | for (flag_nr, flag) in line.flags.iter().enumerate() { 57 | if let Some(name) = &flag.name { 58 | add_token_to_idx(&name.1, IndexEntryKind::FlagName(flag_nr)); 59 | } 60 | if let Some(value) = &flag.value { 61 | add_token_to_idx(&value.1, IndexEntryKind::FlagValue(flag_nr)); 62 | } 63 | } 64 | } 65 | IndexedLines { 66 | lines, 67 | reverse_token_idx: BTreeMap::from_iter(reverse_token_idx_entries), 68 | reverse_line_idx: BTreeMap::from_iter(reverse_line_idx_entries), 69 | } 70 | } 71 | 72 | pub fn find_linenr_at_position(&self, pos: usize) -> Option { 73 | self.reverse_line_idx 74 | .values() 75 | .find(|e| self.lines[**e].span.contains(&pos)) 76 | .copied() 77 | /* TODO use 'upper_bound' 78 | self.reverse_idx 79 | .upper_bound(Bound::Included(&pos)) 80 | .value() 81 | .filter(|s| s.span.contains(&pos)) 82 | */ 83 | } 84 | 85 | pub fn find_line_at_position(&self, pos: usize) -> Option<&Line> { 86 | self.find_linenr_at_position(pos) 87 | .and_then(|i| self.lines.get(i)) 88 | } 89 | 90 | pub fn find_symbol_at_position(&self, pos: usize) -> Option<&IndexEntry> { 91 | self.reverse_token_idx 92 | .values() 93 | .find(|e| e.span.contains(&pos)) 94 | /* TODO use 'upper_bound' */ 95 | } 96 | } 97 | 98 | #[test] 99 | #[rustfmt::skip] 100 | fn test_index() { 101 | use crate::parser::parse_from_str; 102 | 103 | let index = IndexedLines::from_lines( 104 | parse_from_str( 105 | "# config 106 | common --remote_cache= --disk_cache= 107 | build:opt --upload_results=false 108 | ", 109 | ) 110 | .lines, 111 | ); 112 | 113 | // Test the line index 114 | assert_eq!(index.reverse_line_idx, BTreeMap::::from([ 115 | (0, 0), (9, 1), (46, 2), 116 | ])); 117 | 118 | assert_eq!(index.find_linenr_at_position(0), Some(0)); 119 | assert_eq!(index.find_linenr_at_position(1), Some(0)); 120 | assert_eq!(index.find_linenr_at_position(8), Some(0)); 121 | assert_eq!(index.find_linenr_at_position(9), Some(1)); 122 | assert_eq!(index.find_linenr_at_position(10), Some(1)); 123 | assert_eq!(index.find_linenr_at_position(40), Some(1)); 124 | assert_eq!(index.find_linenr_at_position(48), Some(2)); 125 | 126 | // Test the token index 127 | assert_eq!(index.reverse_token_idx, BTreeMap::::from([ 128 | (9, IndexEntry { span: 9..15, line_nr: 1, kind: IndexEntryKind::Command }), 129 | (16, IndexEntry { span: 16..30, line_nr: 1, kind: IndexEntryKind::FlagName(0) }), 130 | (30, IndexEntry { span: 30..31, line_nr: 1, kind: IndexEntryKind::FlagValue(0) }), 131 | (32, IndexEntry { span: 32..44, line_nr: 1, kind: IndexEntryKind::FlagName(1) }), 132 | (44, IndexEntry { span: 44..45, line_nr: 1, kind: IndexEntryKind::FlagValue(1) }), 133 | (46, IndexEntry { span: 46..51, line_nr: 2, kind: IndexEntryKind::Command }), 134 | (51, IndexEntry { span: 51..55, line_nr: 2, kind: IndexEntryKind::Config }), 135 | (56, IndexEntry { span: 56..72, line_nr: 2, kind: IndexEntryKind::FlagName(0) }), 136 | (72, IndexEntry { span: 72..78, line_nr: 2, kind: IndexEntryKind::FlagValue(0) }), 137 | ])); 138 | 139 | assert_eq!(index.find_symbol_at_position(20).unwrap().kind, IndexEntryKind::FlagName(0)); 140 | } 141 | -------------------------------------------------------------------------------- /src/lsp_utils.rs: -------------------------------------------------------------------------------- 1 | use ropey::Rope; 2 | use tower_lsp::lsp_types::{Position, Range}; 3 | 4 | use crate::tokenizer::Span; 5 | 6 | pub fn lsp_pos_to_offset(rope: &Rope, pos: &Position) -> Option { 7 | let char = rope.try_line_to_char(pos.line as usize).ok()?; 8 | Some(char + pos.character as usize) 9 | } 10 | 11 | pub fn offset_to_lsp_pos(rope: &Rope, pos: usize) -> Option { 12 | let line = rope.try_byte_to_line(pos).ok()?; 13 | let first = rope.try_line_to_char(line).ok()?; 14 | let character = rope.try_byte_to_char(pos).ok()? - first; 15 | Some(Position { 16 | line: line.try_into().ok()?, 17 | character: character.try_into().ok()?, 18 | }) 19 | } 20 | 21 | pub fn range_to_lsp(rope: &Rope, span: &Span) -> Option { 22 | Some(Range { 23 | start: offset_to_lsp_pos(rope, span.start)?, 24 | end: offset_to_lsp_pos(rope, span.end)?, 25 | }) 26 | } 27 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use std::io::Read; 2 | use std::ops::Deref; 3 | use std::path::Path; 4 | use std::{env, fs, io, process}; 5 | 6 | use bazelrc_lsp::bazel_flags::{ 7 | load_bazel_flags_from_command, load_packaged_bazel_flags, BazelFlags, 8 | }; 9 | use bazelrc_lsp::bazel_version::{ 10 | determine_bazelisk_version, find_closest_version, AVAILABLE_BAZEL_VERSIONS, 11 | }; 12 | use bazelrc_lsp::diagnostic::diagnostics_from_string; 13 | use bazelrc_lsp::formatting::{pretty_print, FormatLineFlow}; 14 | use bazelrc_lsp::language_server::{Backend, Settings}; 15 | use clap::{CommandFactory, Parser, Subcommand}; 16 | use tower_lsp::{LspService, Server}; 17 | use walkdir::WalkDir; 18 | 19 | #[derive(Parser)] 20 | #[command(version)] 21 | #[command(about = "Code Intelligence for bazelrc config files")] 22 | struct Cli { 23 | /// The Bazel version 24 | #[arg(long, value_name = "VERSION", group = "bazel-version")] 25 | bazel_version: Option, 26 | /// Path to a Bazel version 27 | #[arg(long, value_name = "PATH", group = "bazel-version")] 28 | bazel_path: Option, 29 | /// Should lines be combined / split when formatting bazelrc files? 30 | #[arg(long, default_value = "keep")] 31 | format_lines: FormatLineFlowCli, 32 | 33 | #[command(subcommand)] 34 | command: Option, 35 | } 36 | 37 | #[derive(Clone, Copy)] 38 | struct FormatLineFlowCli(FormatLineFlow); 39 | impl clap::ValueEnum for FormatLineFlowCli { 40 | fn value_variants<'a>() -> &'a [Self] { 41 | &[ 42 | FormatLineFlowCli(FormatLineFlow::Keep), 43 | FormatLineFlowCli(FormatLineFlow::LineContinuations), 44 | FormatLineFlowCli(FormatLineFlow::SeparateLines), 45 | FormatLineFlowCli(FormatLineFlow::SingleLine), 46 | ] 47 | } 48 | 49 | fn to_possible_value(&self) -> Option { 50 | match self.0 { 51 | FormatLineFlow::Keep => Some(clap::builder::PossibleValue::new("keep")), 52 | FormatLineFlow::LineContinuations => { 53 | Some(clap::builder::PossibleValue::new("line-continuations")) 54 | } 55 | FormatLineFlow::SeparateLines => { 56 | Some(clap::builder::PossibleValue::new("separate-lines")) 57 | } 58 | FormatLineFlow::SingleLine => Some(clap::builder::PossibleValue::new("single-line")), 59 | } 60 | } 61 | } 62 | 63 | #[derive(Subcommand)] 64 | enum Commands { 65 | /// Spawns the language server 66 | Lsp {}, 67 | /// Format a bazelrc file 68 | /// 69 | /// If no arguments are specified, format the bazelrc contents 70 | /// from stdin and write the result to stdout. 71 | /// If s are given, reformat the files. If -i is specified, 72 | /// the files are edited in-place. Otherwise, the result is written to the stdout. 73 | Format(FormatArgs), 74 | /// Check your bazelrc files for mistakes 75 | Lint(LintArgs), 76 | /// List supported Bazel versions 77 | #[clap(hide = true)] 78 | BazelVersions {}, 79 | } 80 | 81 | #[tokio::main] 82 | async fn main() { 83 | let mut cli = Cli::parse(); 84 | if cli.bazel_version.is_none() && cli.bazel_path.is_none() { 85 | // The bazel path can also provided as an environment variable 86 | cli.bazel_path = env::var("BAZELRC_LSP_RUN_BAZEL_PATH").ok(); 87 | } 88 | // For backwards compatibility: If no command is specified, assume we should 89 | // launch the language server. 90 | cli.command = Some(cli.command.unwrap_or(Commands::Lsp {})); 91 | 92 | let (bazel_flags, version_message) = load_bazel_flags(&cli); 93 | 94 | match cli.command.unwrap() { 95 | Commands::Lsp {} => { 96 | let stdin = tokio::io::stdin(); 97 | let stdout = tokio::io::stdout(); 98 | 99 | let (service, socket) = LspService::new(|client| Backend { 100 | client, 101 | document_map: Default::default(), 102 | bazel_flags, 103 | settings: Settings { 104 | format_lines: cli.format_lines.0, 105 | } 106 | .into(), 107 | startup_warning: version_message, 108 | }); 109 | Server::new(stdin, stdout, socket).serve(service).await; 110 | } 111 | Commands::Format(args) => { 112 | if let Some(msg) = &version_message { 113 | eprintln!("{}", msg); 114 | } 115 | handle_format_cmd(&args, &bazel_flags, cli.format_lines.0); 116 | } 117 | Commands::Lint(args) => { 118 | handle_lint_cmd(&args, &bazel_flags); 119 | } 120 | Commands::BazelVersions {} => { 121 | println!( 122 | "{}", 123 | serde_json::to_string(AVAILABLE_BAZEL_VERSIONS.deref()).unwrap() 124 | ); 125 | } 126 | }; 127 | } 128 | 129 | fn load_bazel_flags(cli: &Cli) -> (BazelFlags, Option) { 130 | if let Some(bazel_command) = &cli.bazel_path { 131 | match load_bazel_flags_from_command(bazel_command) { 132 | Ok(flags) => (flags, None), 133 | Err(msg) => { 134 | let bazel_version = 135 | find_closest_version(AVAILABLE_BAZEL_VERSIONS.as_slice(), "latest").0; 136 | let message = 137 | format!("Using flags from Bazel {bazel_version} because running `{bazel_command}` failed:\n{}\n", msg); 138 | (load_packaged_bazel_flags(&bazel_version), Some(message)) 139 | } 140 | } 141 | } else if let Some(cli_version) = &cli.bazel_version { 142 | let (bazel_version, msg) = 143 | find_closest_version(AVAILABLE_BAZEL_VERSIONS.as_slice(), cli_version); 144 | (load_packaged_bazel_flags(&bazel_version), msg) 145 | } else if let Some(auto_detected) = determine_bazelisk_version(&env::current_dir().unwrap()) { 146 | let (bazel_version, msg) = 147 | find_closest_version(AVAILABLE_BAZEL_VERSIONS.as_slice(), &auto_detected); 148 | (load_packaged_bazel_flags(&bazel_version), msg) 149 | } else { 150 | let bazel_version = find_closest_version(AVAILABLE_BAZEL_VERSIONS.as_slice(), "latest").0; 151 | let message = format!( 152 | "Using flags from Bazel {bazel_version} because auto-detecting the Bazel version failed"); 153 | (load_packaged_bazel_flags(&bazel_version), Some(message)) 154 | } 155 | } 156 | 157 | fn for_each_input_file(files: &[String], handle_file: CB) -> bool 158 | where 159 | CB: Fn(String, Option<&Path>) -> bool, 160 | { 161 | let mut had_errors = false; 162 | 163 | if files.is_empty() { 164 | // Read complete stdin 165 | let mut input = String::new(); 166 | io::stdin() 167 | .read_to_string(&mut input) 168 | .expect("Failed to read from stdin"); 169 | had_errors |= handle_file(input, None); 170 | } else { 171 | for path_str in files { 172 | let path = std::path::Path::new(path_str); 173 | if path.is_dir() { 174 | let walker = WalkDir::new(path).into_iter().filter_entry(|e| { 175 | let s = e.file_name().to_string_lossy(); 176 | // We want to skip all hidden sub-directories, but still visit `.bazelrc` files 177 | // and also work if the user called `bazelrc-lsp format .` 178 | !s.starts_with('.') || s == "." || s == ".." || s == ".bazelrc" 179 | }); 180 | for entry in walker { 181 | match entry { 182 | Ok(entry) => { 183 | let subpath = entry.into_path(); 184 | let has_bazelrc_suffix = 185 | subpath.to_string_lossy().ends_with(".bazelrc"); 186 | if has_bazelrc_suffix && subpath.is_file() { 187 | let input = 188 | fs::read_to_string(&subpath).expect("Failed to read file"); 189 | had_errors |= handle_file(input, Some(subpath.as_path())); 190 | } 191 | } 192 | Err(err) => { 193 | eprintln!( 194 | "Failed to enumerate files in `{}`: {}", 195 | path_str, 196 | err.io_error().unwrap() 197 | ); 198 | had_errors = true; 199 | } 200 | } 201 | } 202 | } else { 203 | let input = fs::read_to_string(path).expect("Failed to read file"); 204 | had_errors |= handle_file(input, Some(path)); 205 | } 206 | } 207 | } 208 | 209 | had_errors 210 | } 211 | 212 | #[derive(Parser)] 213 | struct LintArgs { 214 | /// File(s) to format 215 | files: Vec, 216 | /// Suppress output and only indicate errors through the exit code 217 | #[arg(long, group = "fmt-action")] 218 | quiet: bool, 219 | } 220 | 221 | fn handle_lint_cmd(args: &LintArgs, bazel_flags: &BazelFlags) { 222 | let had_errors = for_each_input_file(&args.files, |input: String, path: Option<&Path>| { 223 | let diagnostics = diagnostics_from_string(&input, bazel_flags, path); 224 | if !args.quiet { 225 | for d in &diagnostics { 226 | // TODO: improve printing, either using ariadne or codespan-reporting 227 | println!( 228 | "{}: {}", 229 | path.and_then(Path::to_str).unwrap_or(""), 230 | d.message 231 | ); 232 | } 233 | } 234 | !diagnostics.is_empty() 235 | }); 236 | if had_errors { 237 | process::exit(1); 238 | } 239 | } 240 | 241 | #[derive(Parser)] 242 | struct FormatArgs { 243 | /// File(s) to format 244 | files: Vec, 245 | /// Inplace edit s 246 | #[arg(short = 'i', long, group = "fmt-action")] 247 | inplace: bool, 248 | /// Only check if the given file(s) are formatted correctly 249 | #[arg(long, group = "fmt-action")] 250 | check: bool, 251 | } 252 | 253 | fn handle_format_cmd(args: &FormatArgs, bazel_flags: &BazelFlags, line_flow: FormatLineFlow) { 254 | if args.inplace && args.files.is_empty() { 255 | let mut cmd = Cli::command(); 256 | cmd.error( 257 | clap::error::ErrorKind::ArgumentConflict, 258 | "If the `-i` flag is specified, input file(s) must be specified as part of the command line invocation", 259 | ).exit(); 260 | } 261 | 262 | let had_errors = for_each_input_file(&args.files, |input: String, path: Option<&Path>| { 263 | let result = pretty_print(&input, bazel_flags, line_flow); 264 | match result { 265 | Ok(formatted) => { 266 | if args.check { 267 | let input_name = path 268 | .map(|p| p.to_string_lossy().into_owned()) 269 | .unwrap_or("".to_string()); 270 | if formatted != input { 271 | println!( 272 | "{} is NOT correctly formatted and needs reformatting", 273 | input_name 274 | ); 275 | return true; 276 | } else { 277 | println!("{} is already correctly formatted", input_name); 278 | } 279 | } else if args.inplace { 280 | fs::write(path.unwrap(), formatted).expect("Failed to write file"); 281 | } else { 282 | if let Some(p) = path { 283 | println!("--- {} ---", p.to_string_lossy()); 284 | } 285 | print!("{}", formatted); 286 | } 287 | } 288 | Err(errors) => { 289 | for e in errors { 290 | eprintln!("{}", e); 291 | } 292 | return true; 293 | } 294 | }; 295 | false 296 | }); 297 | if had_errors { 298 | process::exit(1); 299 | } 300 | } 301 | 302 | #[test] 303 | fn verify_cli() { 304 | use clap::CommandFactory; 305 | Cli::command().debug_assert(); 306 | } 307 | -------------------------------------------------------------------------------- /src/parser.rs: -------------------------------------------------------------------------------- 1 | use chumsky::{error::Rich, Parser}; 2 | 3 | use crate::tokenizer::{tokenizer, Span, Spanned, Token}; 4 | 5 | #[derive(Clone, Debug, PartialEq, Eq, Default)] 6 | pub struct Flag { 7 | pub name: Option>, 8 | pub value: Option>, 9 | } 10 | 11 | #[derive(Clone, Debug, PartialEq, Eq, Default)] 12 | pub struct Line { 13 | pub command: Option>, 14 | pub config: Option>, 15 | pub flags: Vec, 16 | pub comment: Option>, 17 | // The span of this line (without the comment) 18 | pub span: Span, 19 | } 20 | 21 | pub struct ParserResult<'a> { 22 | pub tokens: Vec>, 23 | pub lines: Vec, 24 | pub errors: Vec>, 25 | } 26 | 27 | // Splits a token at a given separator, keeping the position tracking 28 | fn split_token( 29 | str: &str, 30 | span: &Span, 31 | orig: &str, 32 | sep: char, 33 | ) -> Option<(Spanned, Spanned)> { 34 | if let Some(split_pos) = str.find(sep) { 35 | let orig_slice = &orig[span.start..span.end]; 36 | let orig_offset = orig_slice.find(sep).unwrap(); 37 | let (p1, p2_) = str.split_at(split_pos); 38 | let (_, p2) = p2_.split_at(1); 39 | Some(( 40 | (p1.to_string(), span.start..span.start + orig_offset), 41 | (p2.to_string(), (span.start + orig_offset..span.end)), 42 | )) 43 | } else { 44 | None 45 | } 46 | } 47 | 48 | fn parse_flag(str: &str, span: &Span, orig: &str) -> Flag { 49 | if str.starts_with('-') { 50 | // This is flag. Try to split at `=` 51 | if let Some((name, value)) = split_token(str, span, orig, '=') { 52 | Flag { 53 | name: Some(name), 54 | value: Some(value), 55 | } 56 | } else { 57 | Flag { 58 | name: Some((str.to_string(), span.clone())), 59 | value: None, 60 | } 61 | } 62 | } else { 63 | // This is only a value 64 | Flag { 65 | name: None, 66 | value: Some((str.to_string(), span.clone())), 67 | } 68 | } 69 | } 70 | 71 | fn parse(tokens: &[(Token, Span)], orig: &str) -> Vec { 72 | let mut result_lines = Vec::::new(); 73 | 74 | let mut current_line_start = 0; 75 | let mut current_line = Option::::None; 76 | for t in tokens { 77 | match &t.0 { 78 | Token::Token(s) => { 79 | let line = current_line.get_or_insert_with(Default::default); 80 | // The first token is the command name 81 | if line.command.is_none() && line.flags.is_empty() && !s.starts_with('-') { 82 | if let Some((command, config)) = split_token(s, &t.1, orig, ':') { 83 | line.command = if command.0.is_empty() { 84 | None 85 | } else { 86 | Some(command) 87 | }; 88 | line.config = Some(config); 89 | } else { 90 | line.command = Some((s.clone(), t.1.clone())); 91 | line.config = None 92 | } 93 | } else { 94 | // All other tokens are flags 95 | line.flags.push(parse_flag(s, &t.1, orig)); 96 | } 97 | } 98 | Token::Comment(s) => { 99 | let line = current_line.get_or_insert_with(Default::default); 100 | assert!(line.comment.is_none()); 101 | line.comment = Some((s.clone(), t.1.clone())); 102 | } 103 | Token::Newline => { 104 | let mut line = current_line.take().unwrap_or_default(); 105 | line.span = current_line_start..t.1.end; 106 | result_lines.push(line); 107 | current_line_start = t.1.end; 108 | } 109 | Token::EscapedNewline => (), 110 | }; 111 | } 112 | if let Some(mut l) = current_line.take() { 113 | let implicit_final_newline = orig.len(); 114 | l.span = current_line_start..implicit_final_newline; 115 | result_lines.push(l); 116 | } 117 | 118 | result_lines 119 | } 120 | 121 | // Parser for bazelrc files. 122 | pub fn parse_from_str(str: &str) -> ParserResult { 123 | // Tokenize 124 | let tokenizer_result = tokenizer().parse(str); 125 | let tokens = tokenizer_result.output().unwrap_or(&Vec::new()).clone(); 126 | let errors = tokenizer_result.into_errors(); 127 | 128 | // Parse 129 | let lines = parse(&tokens, str); 130 | 131 | ParserResult { 132 | tokens, 133 | lines, 134 | errors, 135 | } 136 | } 137 | 138 | #[test] 139 | fn test_command_specifier() { 140 | // The first token is the command name 141 | assert_eq!( 142 | parse_from_str("cmd").lines, 143 | Vec::from([Line { 144 | command: Some(("cmd".to_string(), 0..3)), 145 | span: 0..3, 146 | ..Default::default() 147 | },]) 148 | ); 149 | 150 | // The command name might be followed by `:config-name` 151 | assert_eq!( 152 | parse_from_str("cmd:my-config").lines, 153 | vec!(Line { 154 | command: Some(("cmd".to_string(), 0..3)), 155 | config: Some(("my-config".to_string(), 3..13)), 156 | span: 0..13, 157 | ..Default::default() 158 | }) 159 | ); 160 | 161 | // The config might contain arbitrarily complex escaped tokens 162 | assert_eq!( 163 | parse_from_str("cmd:my-\\ con'f ig'").lines, 164 | vec!(Line { 165 | command: Some(("cmd".to_string(), 0..3)), 166 | config: Some(("my- conf ig".to_string(), 3..18)), 167 | span: 0..18, 168 | ..Default::default() 169 | }) 170 | ); 171 | 172 | // The command combined with some actual arguments 173 | assert_eq!( 174 | parse_from_str("bu'ild\\:o'pt --x=y").lines, 175 | vec!(Line { 176 | command: Some(("build".to_string(), 0..7)), 177 | config: Some(("opt".to_string(), 7..12)), 178 | flags: vec!(Flag { 179 | name: Some(("--x".to_string(), 13..16)), 180 | value: Some(("y".to_string(), 16..18)), 181 | }), 182 | span: 0..18, 183 | ..Default::default() 184 | }) 185 | ); 186 | 187 | // In case the leading command name is missing, parse flags 188 | assert_eq!( 189 | parse_from_str("--x y").lines, 190 | vec!(Line { 191 | command: None, 192 | flags: vec!( 193 | Flag { 194 | name: Some(("--x".to_string(), 0..3)), 195 | value: None 196 | }, 197 | Flag { 198 | name: None, 199 | value: Some(("y".to_string(), 4..5)), 200 | } 201 | ), 202 | span: 0..5, 203 | ..Default::default() 204 | }) 205 | ); 206 | 207 | // Parse something useful if the leading command name is missing, but a config name is provided 208 | assert_eq!( 209 | parse_from_str(":opt --x").lines, 210 | vec!(Line { 211 | command: None, 212 | config: Some(("opt".to_string(), 0..4)), 213 | flags: vec!(Flag { 214 | name: Some(("--x".to_string(), 5..8)), 215 | value: None 216 | },), 217 | span: 0..8, 218 | ..Default::default() 219 | }) 220 | ); 221 | } 222 | 223 | #[test] 224 | fn test_flag_parsing() { 225 | // An unnamed flag with only a value 226 | assert_eq!( 227 | parse_from_str("build foo").lines, 228 | vec!(Line { 229 | command: Some(("build".to_string(), 0..5)), 230 | flags: vec!(Flag { 231 | name: None, 232 | value: Some(("foo".to_string(), 6..9)), 233 | }), 234 | span: 0..9, 235 | ..Default::default() 236 | }) 237 | ); 238 | 239 | // A long flag 240 | assert_eq!( 241 | parse_from_str("--x").lines, 242 | vec!(Line { 243 | command: None, 244 | flags: vec!(Flag { 245 | name: Some(("--x".to_string(), 0..3)), 246 | value: None 247 | }), 248 | span: 0..3, 249 | ..Default::default() 250 | }) 251 | ); 252 | 253 | // An abbreviated flag 254 | assert_eq!( 255 | parse_from_str("-x").lines, 256 | vec!(Line { 257 | command: None, 258 | flags: vec!(Flag { 259 | name: Some(("-x".to_string(), 0..2)), 260 | value: None 261 | }), 262 | span: 0..2, 263 | ..Default::default() 264 | }) 265 | ); 266 | 267 | // An `=` flag 268 | assert_eq!( 269 | parse_from_str("--x=y").lines, 270 | vec!(Line { 271 | command: None, 272 | flags: vec!(Flag { 273 | name: Some(("--x".to_string(), 0..3)), 274 | value: Some(("y".to_string(), 3..5)), 275 | }), 276 | span: 0..5, 277 | ..Default::default() 278 | }) 279 | ); 280 | } 281 | 282 | #[test] 283 | fn test_comments() { 284 | // Comments 285 | assert_eq!( 286 | parse_from_str(" # my comment\n#2nd comment").lines, 287 | vec!( 288 | Line { 289 | comment: Some((" my comment".to_string(), 1..13)), 290 | span: 0..14, 291 | ..Default::default() 292 | }, 293 | Line { 294 | comment: Some(("2nd comment".to_string(), 14..26)), 295 | span: 14..26, 296 | ..Default::default() 297 | } 298 | ) 299 | ); 300 | // Comments can be continued across lines with `\` 301 | assert_eq!( 302 | parse_from_str(" # my\\\nco\\mment").lines, 303 | vec!(Line { 304 | comment: Some((" my\nco\\mment".to_string(), 1..15)), 305 | span: 0..15, 306 | ..Default::default() 307 | }) 308 | ); 309 | 310 | // Comments can even start in the middle of a token, without a whitespace 311 | assert_eq!( 312 | parse_from_str("cmd #comment").lines, 313 | vec!(Line { 314 | command: Some(("cmd".to_string(), 0..3)), 315 | comment: Some(("comment".to_string(), 4..12)), 316 | span: 0..12, 317 | ..Default::default() 318 | }) 319 | ); 320 | } 321 | 322 | #[test] 323 | fn test_empty_lines() { 324 | // Check that we keep also keep a representation for empty lines 325 | assert_eq!( 326 | parse_from_str("build --x=y\n\ncommon --z=w\n\n\n").lines, 327 | vec!( 328 | Line { 329 | command: Some(("build".to_string(), 0..5)), 330 | config: None, 331 | flags: vec!(Flag { 332 | name: Some(("--x".to_string(), 6..9)), 333 | value: Some(("y".to_string(), 9..11)) 334 | }), 335 | comment: None, 336 | span: 0..12 337 | }, 338 | Line { 339 | span: 12..13, 340 | ..Default::default() 341 | }, 342 | Line { 343 | command: Some(("common".to_string(), 13..19)), 344 | config: None, 345 | flags: vec!(Flag { 346 | name: Some(("--z".to_string(), 20..23)), 347 | value: Some(("w".to_string(), 23..25)) 348 | }), 349 | comment: None, 350 | span: 13..26 351 | }, 352 | Line { 353 | span: 26..27, 354 | ..Default::default() 355 | }, 356 | Line { 357 | span: 27..28, 358 | ..Default::default() 359 | }, 360 | ) 361 | ); 362 | } 363 | 364 | #[test] 365 | fn test_unicode() { 366 | // Check that we keep also keep a representation for empty lines 367 | assert_eq!( 368 | parse_from_str("build:🔥 --❄️=🔥").lines, 369 | vec!(Line { 370 | command: Some(("build".to_string(), 0..5)), 371 | config: Some(("🔥".to_string(), 5..10)), 372 | flags: vec!(Flag { 373 | name: Some(("--❄️".to_string(), 11..19)), 374 | value: Some(("🔥".to_string(), 19..24)) 375 | }), 376 | comment: None, 377 | span: 0..24 378 | }) 379 | ); 380 | } 381 | -------------------------------------------------------------------------------- /src/semantic_token.rs: -------------------------------------------------------------------------------- 1 | use ropey::Rope; 2 | use tower_lsp::lsp_types::{SemanticToken, SemanticTokenType}; 3 | 4 | use crate::{parser::Line, tokenizer::Span}; 5 | 6 | pub const LEGEND_TYPE: &[SemanticTokenType] = &[ 7 | SemanticTokenType::COMMENT, 8 | SemanticTokenType::KEYWORD, // For the `build`, `common`, `startup` commands 9 | SemanticTokenType::NAMESPACE, // For the `:opt` config name 10 | SemanticTokenType::VARIABLE, // For the flag names 11 | SemanticTokenType::STRING, // For the flag values 12 | ]; 13 | 14 | #[derive(Debug)] 15 | pub struct RCSemanticToken { 16 | pub start: usize, 17 | pub end: usize, 18 | pub token_type: usize, 19 | } 20 | 21 | pub fn create_semantic_token(span: &Span, ttype: &SemanticTokenType) -> RCSemanticToken { 22 | RCSemanticToken { 23 | start: span.start, 24 | end: span.end, 25 | token_type: LEGEND_TYPE.iter().position(|item| item == ttype).unwrap(), 26 | } 27 | } 28 | 29 | /// Creates semantic tokens from the lexer tokens 30 | pub fn semantic_tokens_from_lines(lines: &[Line]) -> Vec { 31 | let mut tokens = Vec::::new(); 32 | 33 | for line in lines { 34 | // Highlight commands 35 | if let Some(cmd) = &line.command { 36 | tokens.push(create_semantic_token(&cmd.1, &SemanticTokenType::KEYWORD)) 37 | } 38 | 39 | // Highlight config names 40 | if let Some(config) = &line.config { 41 | tokens.push(create_semantic_token( 42 | &config.1, 43 | &SemanticTokenType::NAMESPACE, 44 | )) 45 | } 46 | 47 | // Highlight all the flags 48 | for flag in &line.flags { 49 | if let Some(name) = &flag.name { 50 | tokens.push(create_semantic_token(&name.1, &SemanticTokenType::VARIABLE)) 51 | } 52 | if let Some(value) = &flag.value { 53 | tokens.push(create_semantic_token(&value.1, &SemanticTokenType::STRING)) 54 | } 55 | } 56 | 57 | // Highlight comments 58 | if let Some(comment) = &line.comment { 59 | tokens.push(create_semantic_token( 60 | &comment.1, 61 | &SemanticTokenType::COMMENT, 62 | )) 63 | } 64 | } 65 | 66 | tokens 67 | } 68 | 69 | // Converts our internal semantic tokens to the LSP representation of tokens 70 | pub fn convert_to_lsp_tokens(rope: &Rope, semtoks: &[RCSemanticToken]) -> Vec { 71 | let mut pre_line = 0; 72 | let mut pre_start = 0; 73 | let lsp_tokens = semtoks 74 | .iter() 75 | .filter_map(|token| { 76 | let start_line = rope.try_char_to_line(token.start).ok()?; 77 | let end_line = rope.try_char_to_line(token.end).ok()?; 78 | let tokens = (start_line..(end_line + 1)) 79 | .filter_map(|line| { 80 | // Figure out start and end offset within line 81 | let first = rope.try_line_to_char(line).ok()? as u32; 82 | let start: u32 = if line == start_line { 83 | token.start as u32 - first 84 | } else { 85 | 0 86 | }; 87 | let end: u32 = if line == end_line { 88 | token.end as u32 - first 89 | } else { 90 | rope.get_line(line).unwrap().len_chars() as u32 91 | }; 92 | let length = end - start; 93 | // Compute deltas to previous token 94 | assert!(line >= pre_line); 95 | let delta_line = (line - pre_line) as u32; 96 | pre_line = line; 97 | let delta_start = if delta_line == 0 { 98 | start - pre_start 99 | } else { 100 | start 101 | }; 102 | pre_start = start; 103 | // Build token 104 | Some(SemanticToken { 105 | delta_line, 106 | delta_start, 107 | length, 108 | token_type: token.token_type as u32, 109 | token_modifiers_bitset: 0, 110 | }) 111 | }) 112 | .collect::>(); 113 | Some(tokens) 114 | }) 115 | .flatten() 116 | .collect::>(); 117 | lsp_tokens 118 | } 119 | -------------------------------------------------------------------------------- /src/tokenizer.rs: -------------------------------------------------------------------------------- 1 | use core::fmt; 2 | 3 | use chumsky::prelude::*; 4 | use chumsky::Parser; 5 | 6 | pub type Span = std::ops::Range; 7 | pub type Spanned = (T, Span); 8 | 9 | #[derive(Debug, PartialEq, Clone, Eq, Hash)] 10 | pub enum Token { 11 | Token(String), 12 | Comment(String), 13 | Newline, 14 | EscapedNewline, 15 | } 16 | 17 | impl fmt::Display for Token { 18 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 19 | match self { 20 | Token::Token(_) => write!(f, "token"), 21 | Token::Comment(_) => write!(f, "comment"), 22 | Token::Newline => write!(f, "\\n"), 23 | Token::EscapedNewline => write!(f, "escaped newline"), 24 | } 25 | } 26 | } 27 | 28 | // Tokenizer for bazelrc files. 29 | // 30 | // The syntax supported by bazelrc is primarily implementation-defined 31 | // and it seems to be a bit ad-hoc. 32 | // 33 | // As such, rather exotic lines like 34 | // > b"uil"d':o'pt --"x"='y' 35 | // are valid. In this case, the line is equivalent to 36 | // > build:opt --x=y 37 | // 38 | // See rc_file.cc and util/strings.cc from the Bazel source code 39 | pub fn tokenizer<'src>( 40 | ) -> impl Parser<'src, &'src str, Vec>, extra::Err>> { 41 | // The token separators 42 | let specialchars = " \t\r\n\"\'#"; 43 | 44 | // All characters except for separators and `\` characters are part of tokens 45 | let raw_token_char = any().filter(|c| *c != '\\' && !specialchars.contains(*c)); 46 | 47 | // Characters can be escaped with a `\` (except for newlines; those are treated in escaped_newline) 48 | let escaped_char = just('\\').ignore_then(any().filter(|c| *c != '\n' && *c != '\r')); 49 | 50 | // A newline. Either a Windows or a Unix newline 51 | let newline_raw = just('\n').or(just('\r').ignore_then(just('\n'))); 52 | let newline = newline_raw.map(|_| Token::Newline); 53 | 54 | // Newlines can be escaped using a `\`, but in contrast to other escaped parameters they 55 | // don't contribute any characters to the token value. 56 | let escaped_newline_raw = just('\\').ignore_then(newline_raw); 57 | let escaped_newline = escaped_newline_raw.map(|_| Token::EscapedNewline); 58 | 59 | // A token character can be either a raw character, an escaped character 60 | // or an escaped newline. 61 | let token_char = (raw_token_char.or(escaped_char)) 62 | .map(Option::Some) 63 | .or(escaped_newline_raw.to(Option::::None)); 64 | 65 | // A token consists of multiple token_chars 66 | let unquoted_token_raw = token_char.repeated().at_least(1).collect::>(); 67 | 68 | // Quoted tokens with `"` 69 | let dquoted_token_raw = just('"') 70 | .ignore_then( 71 | token_char 72 | .or(one_of(" \t\'#").map(Option::Some)) 73 | .repeated() 74 | .collect::>(), 75 | ) 76 | .then_ignore(just('"')); 77 | 78 | // Quoted tokens with `'` 79 | let squoted_token_raw = just('\'') 80 | .ignore_then( 81 | token_char 82 | .or(one_of(" \t\"#").map(Option::Some)) 83 | .repeated() 84 | .collect(), 85 | ) 86 | .then_ignore(just('\'')); 87 | 88 | // Quoted tokens. Either with `"` or with `'` 89 | let quoted_token_raw = dquoted_token_raw.or(squoted_token_raw); 90 | 91 | // Mixed tokens, consisting of both quoted and unquoted parts 92 | let mixed_token = unquoted_token_raw 93 | .or(quoted_token_raw) 94 | .repeated() 95 | .at_least(1) 96 | .collect::>() 97 | .map(|v| Token::Token(v.iter().flatten().filter_map(|c| *c).collect::())); 98 | 99 | // Comments go until the end of line. 100 | // However a newline might be escaped using `\` 101 | let non_newline = any().and_is(one_of("\n\r").not()); 102 | let comment = just('#') 103 | .ignore_then( 104 | escaped_newline_raw 105 | .or(non_newline) 106 | .repeated() 107 | .collect::(), 108 | ) 109 | .map(Token::Comment); 110 | 111 | // Detect `command` and `command:config` in the beginnig of a line 112 | let token = choice((comment, escaped_newline, newline, mixed_token)) 113 | .recover_with(skip_then_retry_until(any().ignored(), any().ignored())) 114 | .map_with(|tok, e| { 115 | let span: SimpleSpan = e.span(); 116 | ( 117 | tok, 118 | Span { 119 | start: span.start, 120 | end: span.end, 121 | }, 122 | ) 123 | }); 124 | 125 | token 126 | .padded_by(one_of(" \t").repeated()) 127 | .repeated() 128 | .collect::>() 129 | .then_ignore(end()) 130 | } 131 | 132 | #[test] 133 | fn test_newlines() { 134 | // Our tokenizer accepts empty strings 135 | assert_eq!(tokenizer().parse("").into_result(), Ok(Vec::from([]))); 136 | 137 | // `\n` and `\r\n``separate lines. 138 | // Lines can have leading and trailing whitespace. 139 | // We also preserve empty lines 140 | assert_eq!( 141 | tokenizer().parse("cmd\n\r\n\ncmd -x \n").into_result(), 142 | Ok(Vec::from([ 143 | (Token::Token("cmd".to_string()), 0..3), 144 | (Token::Newline, 3..4), 145 | (Token::Newline, 4..6), 146 | (Token::Newline, 6..7), 147 | (Token::Token("cmd".to_string()), 7..10), 148 | (Token::Token("-x".to_string()), 11..13), 149 | (Token::Newline, 14..15), 150 | ])) 151 | ); 152 | 153 | // Newlines can be escaped 154 | assert_eq!( 155 | tokenizer().parse("cmd \\\n -x\n").into_result(), 156 | Ok(Vec::from([ 157 | (Token::Token("cmd".to_string()), 0..3), 158 | (Token::EscapedNewline, 4..6), 159 | (Token::Token("-x".to_string()), 7..9), 160 | (Token::Newline, 9..10), 161 | ])) 162 | ); 163 | } 164 | 165 | #[test] 166 | fn test_tokens() { 167 | let flags_only = |e: &'static str| { 168 | tokenizer().parse(e).into_result().map(|v| { 169 | v.iter() 170 | // Remove positions 171 | .map(|v2| v2.0.clone()) 172 | .collect::>() 173 | }) 174 | }; 175 | let token_vec = |t: &[String]| { 176 | Ok(t.iter() 177 | .map(|s| Token::Token(s.to_string())) 178 | .collect::>()) 179 | }; 180 | 181 | macro_rules! assert_single_flag { 182 | ($a1:expr, $a2:expr) => { 183 | assert_eq!(flags_only($a1), token_vec(&[$a2.to_string()])); 184 | }; 185 | } 186 | 187 | // A simple token without escaped characters 188 | assert_single_flag!("abc", "abc"); 189 | // Characters inside tokens can be escaped using `\` 190 | assert_single_flag!("a\\bc\\d", "abcd"); 191 | // A `\` is escaped using another `\` 192 | assert_single_flag!("a\\\\b", "a\\b"); 193 | // A `\` can also be used to escape whitespaces or tabs 194 | assert_single_flag!("a\\ b\\\tc", "a b\tc"); 195 | 196 | // A token can contain be escaped using `"` 197 | assert_single_flag!("\"a b\tc\"", "a b\tc"); 198 | // Instead of `"`, one can also use `'` to escape 199 | assert_single_flag!("'a b\tc'", "a b\tc"); 200 | // Inside `"`, other `"` can be escaped. `'` can be included unescaped 201 | assert_single_flag!("\"a\\\"b'c\"", "a\"b'c"); 202 | // Inside `'`, other `'` can be escaped. `"` can be included unescaped 203 | assert_single_flag!("'a\"b\\'c'", "a\"b'c"); 204 | 205 | // Quoted parts can also appear in the middle of tokens 206 | assert_single_flag!("abc' cd\t e\\''fg\"h i\"j", "abc cd\t e'fgh ij"); 207 | 208 | // A whitespace separates two tokens 209 | assert_eq!( 210 | flags_only("ab c"), 211 | token_vec(&["ab".to_string(), "c".to_string()]) 212 | ); 213 | // Instead of a whitespace, one can also use a tab 214 | assert_eq!( 215 | flags_only("ab\tc"), 216 | token_vec(&["ab".to_string(), "c".to_string()]) 217 | ); 218 | // Two tokens can also be separated by multiple whitespaces 219 | assert_eq!( 220 | flags_only("ab\t \t c"), 221 | token_vec(&["ab".to_string(), "c".to_string()]) 222 | ); 223 | // Multiple quoted tokens 224 | assert_eq!( 225 | flags_only("\"t 1\" 't 2'"), 226 | token_vec(&["t 1".to_string(), "t 2".to_string()]) 227 | ); 228 | 229 | // A token can be continued on the next line using a `\` 230 | assert_single_flag!("a\\\nbc", "abc".to_string()); 231 | // A quoted token does not continue across lines 232 | assert!(tokenizer().parse("'my\ntoken'").has_errors()); 233 | // But a quoted token can contain escaped newlines 234 | assert_single_flag!("'my\\\ntoken'", "mytoken".to_string()); 235 | 236 | // `#` inside a quoted token does not start a token 237 | assert_single_flag!("'a#c'", "a#c".to_string()); 238 | // `#` can be escaped as part of a token 239 | assert_single_flag!("a\\#c", "a#c".to_string()); 240 | } 241 | 242 | #[test] 243 | fn test_comments() { 244 | // Comments 245 | assert_eq!( 246 | tokenizer() 247 | .parse(" # my comment\n#2nd comment") 248 | .into_result(), 249 | Ok(vec!( 250 | (Token::Comment(" my comment".to_string()), 1..13), 251 | (Token::Newline, 13..14), 252 | (Token::Comment("2nd comment".to_string()), 14..26) 253 | )) 254 | ); 255 | // Comments can be continued across lines with `\` 256 | assert_eq!( 257 | tokenizer().parse(" # my\\\nco\\mment").into_result(), 258 | Ok(vec!((Token::Comment(" my\nco\\mment".to_string()), 1..15))) 259 | ); 260 | 261 | // Comments can even start in the middle of a token, without a whitespace 262 | assert_eq!( 263 | tokenizer().parse("flag#comment").into_result(), 264 | Ok(vec!( 265 | (Token::Token("flag".to_string()), 0..4), 266 | (Token::Comment("comment".to_string()), 4..12) 267 | )) 268 | ); 269 | } 270 | 271 | #[test] 272 | fn test_unicode() { 273 | // Unicode characters are allowed in tokens. 274 | // They are counted in bytes, not characters inside the spans 275 | assert_eq!( 276 | tokenizer().parse("build:🔥 --❄️=🔥").into_result(), 277 | Ok(vec!( 278 | (Token::Token("build:🔥".to_string()), 0..10), 279 | (Token::Token("--❄️=🔥".to_string()), 11..24) 280 | )) 281 | ); 282 | } 283 | -------------------------------------------------------------------------------- /vscode-extension/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "env": { 3 | "browser": true, 4 | "es2021": true 5 | }, 6 | "extends": "love", 7 | "overrides": [ 8 | { 9 | "env": { 10 | "node": true 11 | }, 12 | "files": [ 13 | ".eslintrc.{js,cjs}" 14 | ], 15 | "parserOptions": { 16 | "sourceType": "script" 17 | } 18 | } 19 | ], 20 | "parserOptions": { 21 | "ecmaVersion": "latest" 22 | }, 23 | "rules": { 24 | "semi": ["error", "always"], 25 | "@typescript-eslint/semi": ["error", "always"], 26 | "@typescript-eslint/explicit-function-return-type": "off", 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /vscode-extension/bazelrc-language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | "lineComment": "#", 4 | }, 5 | "surroundingPairs": [ 6 | ["\"", "\""], 7 | ["'", "'"] 8 | ], 9 | } 10 | -------------------------------------------------------------------------------- /vscode-extension/build.js: -------------------------------------------------------------------------------- 1 | const esbuild = require('esbuild'); 2 | const fs = require('fs/promises'); 3 | const util = require('util'); 4 | const execFile = util.promisify(require('child_process').execFile); 5 | 6 | const args = util.parseArgs({ 7 | options: { 8 | release: { type: "boolean" }, 9 | watch: { type: "boolean" }, 10 | }, 11 | strict: true, 12 | }); 13 | const release = args.values.release; 14 | const watch = args.values.watch; 15 | 16 | function areStringArraysEqual(arr1, arr2) { 17 | if (arr1.length !== arr2.length) return false; 18 | for (let i = 0; i < arr1.length; i++) { 19 | if (arr1[i] !== arr2[i]) return false; 20 | } 21 | return true; 22 | } 23 | 24 | async function build() { 25 | // Rust build 26 | console.log("build rust..."); 27 | const execExt = process.platform == "win32" ? ".exe" : ""; 28 | const rustOutFolder = release ? "release" : "debug"; 29 | const bazelrcExec = `../target/${rustOutFolder}/bazelrc-lsp${execExt}`; 30 | { 31 | const buildArgs = release ? ["--release"] : []; 32 | const { stdout, stderr } = await execFile("cargo", ["build"].concat(buildArgs), { cwd: ".." }); 33 | console.log(stdout); 34 | console.error(stderr); 35 | } 36 | 37 | // Check if `./package.json` is up-to-date 38 | const packageJson = JSON.parse(await fs.readFile("./package.json")); 39 | console.log((await execFile(bazelrcExec, ["--version"])).stdout.trim()); 40 | const bazelrclspVersion = (await execFile(bazelrcExec, ["--version"])).stdout.trim().match("bazelrc-lsp (\\d+.\\d+.\\d+)")[1]; 41 | if (bazelrclspVersion != packageJson.version) { 42 | console.error("Error: Mismatch between package.json version and bazelrc version"); 43 | console.error("package.json versions:", packageJson.version); 44 | console.error("bazelrc-lsp versions:", bazelrclspVersion); 45 | throw new Error("Version mismatch detected."); 46 | } 47 | const versions = packageJson 48 | .contributes.configuration.properties["bazelrc.bazelVersion"].enum; 49 | const rustVersionsJson = (await execFile(bazelrcExec, ["bazel-versions"])).stdout; 50 | const rustVersions = JSON.parse(rustVersionsJson); 51 | const expectedVersions = ["auto"].concat(rustVersions) 52 | if (!areStringArraysEqual(versions, expectedVersions)) { 53 | console.error("Error: Mismatch between supported Bazel version"); 54 | console.error("package.json versions:", versions); 55 | console.error("bazelrc-lsp versions:", rustVersions); 56 | throw new Error("Version mismatch detected."); 57 | } 58 | 59 | // Cleanup 60 | await fs.rm("./dist", { recursive: true, force: true }); 61 | await fs.mkdir("./dist"); 62 | // Copy static artifacts 63 | await fs.copyFile('./package.json', './dist/package.json'); 64 | await fs.copyFile('./bazelrc-language-configuration.json', './dist/bazelrc-language-configuration.json'); 65 | await fs.copyFile('../LICENSE', './dist/LICENSE'); 66 | await fs.copyFile('../README.md', './dist/README.md'); 67 | await fs.copyFile(bazelrcExec, `./dist/bazelrc-lsp${execExt}`); 68 | // Typescript build 69 | console.log("build typescript..."); 70 | const ctx = await esbuild.context({ 71 | entryPoints: ['./src/extension.ts'], 72 | outfile: './dist/extension.js', 73 | platform: "node", 74 | format: "cjs", 75 | external: ["vscode"], 76 | bundle: true, 77 | minify: release, 78 | sourcemap: release ? false : "linked", 79 | }); 80 | await ctx.rebuild(); 81 | // Watching 82 | if (watch) { 83 | console.log("watching typescript..."); 84 | await ctx.watch(); 85 | } else { 86 | ctx.dispose(); 87 | } 88 | } 89 | 90 | build() 91 | .catch((e) => { 92 | console.log(e); 93 | process.exit(1); 94 | }) 95 | -------------------------------------------------------------------------------- /vscode-extension/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bazelrc-lsp", 3 | "description": "Code intelligence for bazerc files", 4 | "displayName": "bazelrc language server", 5 | "publisher": "Adrian Vogelsgesang", 6 | "license": "Apache-2.0", 7 | "version": "0.2.3", 8 | "categories": [ 9 | "Programming Languages", 10 | "Formatters", 11 | "Linters" 12 | ], 13 | "keywords": [ 14 | "language-server", 15 | "Bazel" 16 | ], 17 | "repository": { 18 | "url": "https://github.com/vogelsgesang/bazelrc-lsp" 19 | }, 20 | "engines": { 21 | "vscode": "^1.83.0" 22 | }, 23 | "enabledApiProposals": [], 24 | "activationEvents": [], 25 | "main": "./extension.js", 26 | "markdown": "github", 27 | "contributes": { 28 | "languages": [ 29 | { 30 | "id": "bazelrc", 31 | "extensions": [ 32 | ".bazelrc" 33 | ], 34 | "aliases": [ 35 | "Bazel RC" 36 | ], 37 | "configuration": "./bazelrc-language-configuration.json" 38 | } 39 | ], 40 | "configuration": { 41 | "title": "Bazelrc", 42 | "properties": { 43 | "bazelrc.bazelVersion": { 44 | "type": "string", 45 | "default": "auto", 46 | "description": "The Bazel version to use", 47 | "enum": [ 48 | "auto", 49 | "7.0.0", 50 | "7.0.1", 51 | "7.0.2", 52 | "7.1.0", 53 | "7.1.1", 54 | "7.1.2", 55 | "7.2.0", 56 | "7.2.1", 57 | "7.3.0", 58 | "7.3.1", 59 | "7.3.2", 60 | "7.4.0", 61 | "7.4.1", 62 | "7.5.0", 63 | "7.6.0", 64 | "7.6.1", 65 | "8.0.0", 66 | "8.0.1", 67 | "8.1.0", 68 | "8.1.1", 69 | "8.2.0", 70 | "8.2.1", 71 | "9.0.0-pre.20250317.2" 72 | ], 73 | "scope": "machine-overridable" 74 | }, 75 | "bazelrc.formatLines": { 76 | "type": "string", 77 | "default": "keep", 78 | "description": "Should lines be combined / split when formatting bazelrc files?", 79 | "enum": ["keep", "lineContinuations", "separateLines", "singleLine"], 80 | "enumItemLabels": [ 81 | "Do not reflow lines", 82 | "Combine subsequent commands and use `\\` line continuations", 83 | "Put each flag on a separate line", 84 | "Put all flags on a single line" 85 | ], 86 | "scope": "machine-overridable" 87 | } 88 | } 89 | } 90 | }, 91 | "scripts": { 92 | "watch": "node ./build.js --watch", 93 | "test-compile": "tsc -p ./", 94 | "lint": "eslint src --ext ts", 95 | "build": "node ./build.js", 96 | "package": "pnpm build && cd dist && vsce package --no-dependencies -o ..", 97 | "package:release": "pnpm build --release && cd dist && vsce package --no-dependencies -o ..", 98 | "publish": "pnpm build --release && cd dist && vsce publish --no-dependencies -o .." 99 | }, 100 | "dependencies": { 101 | "vscode-languageclient": "^9.0.1" 102 | }, 103 | "devDependencies": { 104 | "@types/node": "^20.8.6", 105 | "@types/vscode": "^1.83.0", 106 | "@typescript-eslint/eslint-plugin": "^7.18.0", 107 | "@typescript-eslint/parser": "^7.18.0", 108 | "@vscode/vsce": "2.21.1", 109 | "esbuild": "^0.24.2", 110 | "eslint": "^8.57.1", 111 | "eslint-config-love": "^44.0.0", 112 | "eslint-plugin-import": "^2.31.0", 113 | "eslint-plugin-n": "^16.6.2", 114 | "eslint-plugin-promise": "^6.6.0", 115 | "typescript": "5.5.4" 116 | } 117 | } -------------------------------------------------------------------------------- /vscode-extension/src/extension.ts: -------------------------------------------------------------------------------- 1 | import { 2 | workspace, 3 | type ExtensionContext 4 | } from 'vscode'; 5 | 6 | import { 7 | type Executable, 8 | LanguageClient, 9 | type LanguageClientOptions, 10 | type ServerOptions 11 | } from 'vscode-languageclient/node'; 12 | 13 | async function startLsp (context: ExtensionContext) { 14 | const command = process.env.SERVER_PATH ?? context.asAbsolutePath('bazelrc-lsp'); 15 | 16 | const config = workspace.getConfiguration('bazelrc'); 17 | const bazelVersion = config.get('bazelVersion') ?? 'auto'; 18 | const bazelVersionArgs = 19 | bazelVersion !== 'auto' ? ['--bazel-version', bazelVersion] : []; 20 | 21 | const run: Executable = { 22 | command, 23 | args: bazelVersionArgs.concat(['lsp']), 24 | options: { 25 | env: { 26 | ...process.env, 27 | // eslint-disable-next-line @typescript-eslint/naming-convention 28 | RUST_LOG: 'debug', 29 | // eslint-disable-next-line @typescript-eslint/naming-convention 30 | RUST_BACKTRACE: '1' 31 | } 32 | } 33 | }; 34 | // If the extension is launched in debug mode then the debug server options are used 35 | // Otherwise the run options are used 36 | const serverOptions: ServerOptions = { 37 | run, 38 | debug: run 39 | }; 40 | // Options to control the language client 41 | const clientOptions: LanguageClientOptions = { 42 | // Register the server for bazelrc documents 43 | documentSelector: [{ language: 'bazelrc' }], 44 | synchronize: { 45 | configurationSection: 'bazelrc' 46 | } 47 | }; 48 | 49 | // Create the language client and start the client. 50 | const client = new LanguageClient('bazelrc-lsp', 'Bazelrc Language Server', serverOptions, clientOptions); 51 | await client.start(); 52 | return client; 53 | } 54 | 55 | let client: LanguageClient | null = null; 56 | 57 | export async function activate (context: ExtensionContext) { 58 | client = await startLsp(context); 59 | 60 | context.subscriptions.push(workspace.onDidChangeConfiguration(async (e) => { 61 | if (e.affectsConfiguration('bazelrc.bazelVersion')) { 62 | await client?.stop(); 63 | client = await startLsp(context); 64 | } 65 | })); 66 | } 67 | 68 | export function deactivate (): Thenable | undefined { 69 | if (client === null) { 70 | return undefined; 71 | } 72 | return client.stop(); 73 | } 74 | -------------------------------------------------------------------------------- /vscode-extension/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "strict": true, 4 | "module": "commonjs", 5 | "target": "es2019", 6 | "lib": ["ES2019"], 7 | "outDir": "dist", 8 | "rootDir": "src", 9 | "sourceMap": true 10 | }, 11 | "include": ["src"], 12 | "exclude": ["node_modules", ".vscode-test"] 13 | } 14 | --------------------------------------------------------------------------------