├── .cargo └── config.toml ├── .github ├── npm │ ├── .gitignore │ ├── getBinary.js │ ├── package.json │ ├── run.js │ └── scripts.js ├── release-drafter.yml └── workflows │ └── ci.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── flake.lock ├── flake.nix ├── readme-images └── github-personal-access-token.png ├── resources ├── cache │ ├── dep1 │ │ └── hash1 │ │ │ └── proto │ │ │ └── example.proto │ ├── dep2 │ │ └── hash2 │ │ │ └── proto │ │ │ ├── example2.proto │ │ │ ├── example3.proto │ │ │ ├── example4.proto │ │ │ └── example5.proto │ └── dep3 │ │ └── hash3 │ │ ├── proto │ │ └── example.proto │ │ └── root │ │ └── proto │ │ └── root.proto └── proto_out │ ├── example.proto │ └── example2.proto ├── rust-toolchain.toml ├── rustfmt.toml └── src ├── api ├── builder.rs └── mod.rs ├── cache ├── git.rs └── mod.rs ├── cli ├── command_handlers.rs └── mod.rs ├── config.rs ├── fetch.rs ├── flock.rs ├── git ├── cache.rs ├── mod.rs └── repository.rs ├── lib.rs ├── main.rs ├── model ├── mod.rs ├── protodep.rs └── protofetch │ ├── lock.rs │ ├── mod.rs │ └── resolved.rs ├── proto.rs └── resolver ├── git.rs ├── lock.rs └── mod.rs /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [registries.crates-io] 2 | protocol = "sparse" 3 | -------------------------------------------------------------------------------- /.github/npm/.gitignore: -------------------------------------------------------------------------------- 1 | /package-lock.json 2 | /node_modules/ 3 | -------------------------------------------------------------------------------- /.github/npm/getBinary.js: -------------------------------------------------------------------------------- 1 | import { Binary } from 'simple-binary-install'; 2 | import * as os from 'os'; 3 | import * as fs from 'fs'; 4 | 5 | function getPlatform() { 6 | const type = os.type(); 7 | const arch = os.arch(); 8 | 9 | if (type === 'Windows_NT' && arch === 'x64') { 10 | return 'x86_64-pc-windows-msvc'; 11 | } 12 | 13 | if (type === 'Linux' && arch === 'x64') { 14 | return 'x86_64-unknown-linux-musl'; 15 | } 16 | 17 | if (type === 'Linux' && arch === 'arm64') { 18 | return 'aarch64-unknown-linux-musl'; 19 | } 20 | 21 | if (type === 'Darwin' && arch === 'x64') { 22 | return 'x86_64-apple-darwin'; 23 | } 24 | 25 | if (type === 'Darwin' && arch === 'arm64') { 26 | return 'aarch64-apple-darwin'; 27 | } 28 | 29 | throw new Error(`Unsupported platform: ${type} ${arch}. Please create an issue at https://github.com/coralogix/protofetch/issues`); 30 | } 31 | 32 | export function getBinary() { 33 | const platform = getPlatform(); 34 | const { version } = JSON.parse(fs.readFileSync('./package.json')); 35 | const url = `https://github.com/coralogix/protofetch/releases/download/v${version}/protofetch_${platform}.tar.gz`; 36 | const name = 'protofetch'; 37 | 38 | return new Binary(name, url) 39 | } 40 | -------------------------------------------------------------------------------- /.github/npm/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cx-protofetch", 3 | "version": "VERSION#TO#REPLACE", 4 | "description": "A source dependency management tool for Protobuf.", 5 | "repository": "https://github.com/coralogix/protofetch.git", 6 | "homepage": "https://github.com/coralogix/protofetch", 7 | "license": "Apache-2.0", 8 | "type": "module", 9 | "bin": { 10 | "protofetch": "run.js" 11 | }, 12 | "scripts": { 13 | "postinstall": "node scripts.js install" 14 | }, 15 | "dependencies": { 16 | "simple-binary-install": "^0.2.1" 17 | }, 18 | "keywords": [ 19 | "proto", 20 | "cli", 21 | "toml", 22 | "protobuf", 23 | "dependencies", 24 | "dependency-manager", 25 | "grpc" 26 | ], 27 | "main": "index.js" 28 | } 29 | -------------------------------------------------------------------------------- /.github/npm/run.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import { getBinary } from './getBinary.js'; 3 | 4 | getBinary().run(); 5 | -------------------------------------------------------------------------------- /.github/npm/scripts.js: -------------------------------------------------------------------------------- 1 | import { getBinary } from './getBinary.js'; 2 | 3 | if (process.argv.includes('install')) { 4 | getBinary().install(); 5 | } 6 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name-template: 'v$RESOLVED_VERSION' 2 | tag-template: 'v$RESOLVED_VERSION' 3 | template: | 4 | # What's Changed 5 | $CHANGES 6 | categories: 7 | - title: 'Breaking' 8 | label: 'type: breaking' 9 | - title: 'New' 10 | label: 'type: feature' 11 | - title: 'Bug Fixes' 12 | label: 'type: bug' 13 | - title: 'Maintenance' 14 | label: 'type: maintenance' 15 | - title: 'Documentation' 16 | label: 'type: docs' 17 | - title: 'Dependency Updates' 18 | label: 'type: dependencies' 19 | 20 | version-resolver: 21 | major: 22 | labels: 23 | - 'type: breaking' 24 | minor: 25 | labels: 26 | - 'type: feature' 27 | patch: 28 | labels: 29 | - 'type: bug' 30 | - 'type: maintenance' 31 | - 'type: docs' 32 | - 'type: dependencies' 33 | - 'type: security' 34 | 35 | exclude-labels: 36 | - 'skip-changelog' 37 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: {} 3 | push: 4 | branches: 5 | - master 6 | tags: [ 'v*.*.*' ] 7 | 8 | name: CI 9 | 10 | env: 11 | CARGO_TERM_COLOR: always 12 | 13 | jobs: 14 | lint: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Checkout sources 18 | uses: actions/checkout@v3 19 | 20 | - name: Cache rust dependencies 21 | uses: Swatinem/rust-cache@v2 22 | 23 | - name: Check that Cargo.lock is up-to-date 24 | run: cargo metadata --format-version 1 --locked 25 | 26 | - name: Run cargo fmt 27 | run: cargo fmt --check 28 | 29 | - name: Run cargo clippy 30 | run: cargo clippy -- -D warnings 31 | 32 | - name: Run cargo check 33 | run: cargo check 34 | 35 | test: 36 | strategy: 37 | matrix: 38 | runner: [ ubuntu-latest, macos-latest, windows-latest ] 39 | runs-on: ${{ matrix.runner }} 40 | steps: 41 | - name: Checkout sources 42 | uses: actions/checkout@v3 43 | 44 | - name: Cache rust dependencies 45 | uses: Swatinem/rust-cache@v2 46 | 47 | - name: Run cargo test 48 | run: cargo test 49 | 50 | versions: 51 | runs-on: ubuntu-latest 52 | steps: 53 | - uses: taiki-e/install-action@v2 54 | with: 55 | tool: cargo-hack,cargo-minimal-versions 56 | 57 | - name: Checkout sources 58 | uses: actions/checkout@v3 59 | 60 | - name: Cache rust dependencies 61 | uses: Swatinem/rust-cache@v2 62 | 63 | # Check with minimal dependency versions and MSRV from Cargo.toml 64 | - name: Check minimal versions 65 | run: cargo minimal-versions check --rust-version --features vendored-openssl,vendored-libgit2 66 | 67 | semver: 68 | runs-on: ubuntu-latest 69 | steps: 70 | - name: Checkout sources 71 | uses: actions/checkout@v3 72 | 73 | - name: Check semver 74 | uses: obi1kenobi/cargo-semver-checks-action@v2 75 | 76 | flake: 77 | runs-on: ubuntu-latest 78 | steps: 79 | - name: Checkout sources 80 | uses: actions/checkout@v3 81 | 82 | - name: Install nix 83 | uses: DeterminateSystems/nix-installer-action@v12 84 | 85 | - name: Cache nix dependencies 86 | uses: DeterminateSystems/magic-nix-cache-action@v7 87 | 88 | - name: Build 89 | run: nix flake check 90 | 91 | update-release-draft: 92 | runs-on: ubuntu-latest 93 | if: github.repository == 'coralogix/protofetch' && github.ref == 'refs/heads/master' 94 | steps: 95 | - uses: release-drafter/release-drafter@v5 96 | env: 97 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 98 | 99 | package: 100 | needs: [ lint, test, versions, semver ] 101 | strategy: 102 | fail-fast: false 103 | matrix: 104 | target: 105 | - rust: aarch64-unknown-linux-musl 106 | runner: ubuntu-latest 107 | tar: tar 108 | cross: true 109 | ext: 110 | - rust: x86_64-unknown-linux-musl 111 | runner: ubuntu-latest 112 | tar: tar 113 | cross: true 114 | ext: 115 | - rust: aarch64-apple-darwin 116 | runner: macos-14 117 | # We use gtar to make sure compressed files are not detected as sparse 118 | tar: gtar 119 | cross: false 120 | ext: 121 | - rust: x86_64-apple-darwin 122 | runner: macos-13 123 | # We use gtar to make sure compressed files are not detected as sparse 124 | tar: gtar 125 | cross: false 126 | ext: 127 | - rust: x86_64-pc-windows-msvc 128 | runner: windows-latest 129 | tar: tar 130 | cross: false 131 | ext: '.exe' 132 | runs-on: ${{ matrix.target.runner }} 133 | steps: 134 | - name: Checkout 135 | uses: actions/checkout@v3 136 | 137 | - name: Cache rust dependencies 138 | uses: Swatinem/rust-cache@v2 139 | with: 140 | key: ${{ matrix.target.rust }} 141 | 142 | - name: Install cross 143 | if: ${{ matrix.target.cross }} 144 | run: cargo install --locked cross 145 | 146 | - name: Build 147 | run: ${{ matrix.target.cross && 'cross' || 'cargo' }} build --release --target ${{ matrix.target.rust }} --features vendored-openssl,vendored-libgit2 148 | 149 | - name: Package 150 | run: | 151 | mv target/${{ matrix.target.rust }}/release bin/ 152 | ${{ matrix.target.tar }} -czvf protofetch_${{ matrix.target.rust }}.tar.gz bin/protofetch${{ matrix.target.ext }} 153 | 154 | - name: Upload 155 | uses: actions/upload-artifact@v4 156 | with: 157 | name: package-${{ matrix.target.rust }} 158 | path: protofetch_${{ matrix.target.rust }}.tar.gz 159 | 160 | release: 161 | runs-on: ubuntu-latest 162 | if: github.repository == 'coralogix/protofetch' && startsWith(github.ref, 'refs/tags/') 163 | needs: [ package ] 164 | env: 165 | CRATES_IO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} 166 | NPM_TOKEN: ${{ secrets.NPM_ACCESS_TOKEN }} 167 | 168 | steps: 169 | - name: Checkout 170 | uses: actions/checkout@v3 171 | 172 | - name: Publish cargo package 173 | run: cargo publish --token ${{ env.CRATES_IO_TOKEN }} 174 | 175 | - name: Publish npm package 176 | run: | 177 | VERSION=$(sed -n -e '/version/ s/.* = *//p' "Cargo.toml" | head -1 | tr -d '"') 178 | export VERSION 179 | # Tee had issue to write to the same file which is used for read so creating a temp package.json file 180 | mv .github/npm/package.json .github/npm/package.json.temp 181 | sed "s/VERSION#TO#REPLACE/${VERSION}/g" .github/npm/package.json.temp | tee .github/npm/package.json 182 | echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > ".npmrc" 183 | npm publish .github/npm 184 | 185 | - name: Download artifacts 186 | uses: actions/download-artifact@v4 187 | with: 188 | pattern: package-* 189 | merge-multiple: true 190 | 191 | - name: Upload release artifacts 192 | uses: softprops/action-gh-release@v1 193 | with: 194 | files: | 195 | protofetch_aarch64-unknown-linux-musl.tar.gz 196 | protofetch_x86_64-unknown-linux-musl.tar.gz 197 | protofetch_aarch64-apple-darwin.tar.gz 198 | protofetch_x86_64-apple-darwin.tar.gz 199 | protofetch_x86_64-pc-windows-msvc.tar.gz 200 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | 3 | # Created by https://www.toptal.com/developers/gitignore/api/rust 4 | # Edit at https://www.toptal.com/developers/gitignore?templates=rust 5 | 6 | ### Rust ### 7 | # Generated by Cargo 8 | # will have compiled files and executables 9 | debug/ 10 | target/ 11 | 12 | # These are backup files generated by rustfmt 13 | **/*.rs.bk 14 | 15 | # MSVC Windows builds of rustc generate these, which store debugging information 16 | *.pdb 17 | 18 | # End of https://www.toptal.com/developers/gitignore/api/rust 19 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [Unreleased] 8 | 9 | ## [0.1.11](https://github.com/coralogix/protofetch/compare/v0.1.10...v0.1.11) - 2025-02-27 10 | 11 | ### Other 12 | 13 | - Fix cache directory lock ([#157](https://github.com/coralogix/protofetch/pull/157)) 14 | 15 | ## [0.1.10](https://github.com/coralogix/protofetch/compare/v0.1.9...v0.1.10) - 2025-02-25 16 | 17 | ### Other 18 | - Fix packages not being attached to the release ([#155](https://github.com/coralogix/protofetch/pull/155)) 19 | 20 | ## [0.1.9](https://github.com/coralogix/protofetch/compare/v0.1.8...v0.1.9) - 2025-02-24 21 | 22 | ### Other 23 | - Update dependencies ([#152](https://github.com/coralogix/protofetch/pull/152)) 24 | - Update upload/download artifact actions ([#153](https://github.com/coralogix/protofetch/pull/153)) 25 | 26 | ## [0.1.8](https://github.com/coralogix/protofetch/compare/v0.1.7...v0.1.8) - 2024-08-16 27 | 28 | ### Other 29 | - Use more robust cache locking ([#150](https://github.com/coralogix/protofetch/pull/150)) 30 | - Fix fetching when no branch is specified ([#148](https://github.com/coralogix/protofetch/pull/148)) 31 | 32 | ## [0.1.7](https://github.com/coralogix/protofetch/compare/v0.1.6...v0.1.7) - 2024-07-29 33 | 34 | ### Other 35 | - Fix nix flake build and check this on CI ([#145](https://github.com/coralogix/protofetch/pull/145)) 36 | - Update dependencies ([#144](https://github.com/coralogix/protofetch/pull/144)) 37 | 38 | ## [0.1.6](https://github.com/coralogix/protofetch/compare/v0.1.5...v0.1.6) - 2024-07-02 39 | 40 | ### Other 41 | - Fetch optimizations ([#142](https://github.com/coralogix/protofetch/pull/142)) 42 | 43 | ## [0.1.5](https://github.com/coralogix/protofetch/compare/v0.1.4...v0.1.5) - 2024-06-27 44 | 45 | ### Other 46 | - Cache lock improvements ([#140](https://github.com/coralogix/protofetch/pull/140)) 47 | 48 | ## [0.1.4](https://github.com/coralogix/protofetch/compare/v0.1.3...v0.1.4) - 2024-05-22 49 | 50 | ### Other 51 | - Prevent concurrent cache access ([#135](https://github.com/coralogix/protofetch/pull/135)) 52 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "protofetch" 3 | version = "0.1.11" 4 | edition = "2021" 5 | rust-version = "1.75" 6 | license = "Apache-2.0" 7 | description = "A source dependency management tool for Protobuf." 8 | homepage = "https://github.com/coralogix/protofetch" 9 | repository = "https://github.com/coralogix/protofetch" 10 | readme = "README.md" 11 | keywords = ["proto", "cli", "protobuf", "dependency-manager", "grpc"] 12 | categories = ["command-line-utilities"] 13 | exclude = [".github", ".gitignore"] 14 | 15 | [features] 16 | vendored-openssl = ["git2/vendored-openssl"] 17 | vendored-libgit2 = ["git2/vendored-libgit2"] 18 | 19 | [dependencies] 20 | anyhow = "1.0.98" 21 | clap = { version = "4.5.36", features = ["derive"] } 22 | config = { version = "0.15.11", default-features = false, features = ["toml"] } 23 | env_logger = { version = "0.11.8", default-features = false, features = ["auto-color"] } 24 | fs4 = "0.13.1" 25 | git2 = ">=0.18.0, <0.21.0" 26 | # Upgrading home to 0.5.11 will bring MSRV to 1.81.0 27 | home = "0.5.9" 28 | log = "0.4.27" 29 | regex-lite = "0.1.6" 30 | serde = { version = "1.0.219", features = ["derive"] } 31 | ssh-key = "0.6.7" 32 | thiserror = "2.0.12" 33 | toml = { version = "0.8.20", features = ["preserve_order"] } 34 | 35 | [dev-dependencies] 36 | pretty_assertions = "1.4.1" 37 | project-root = "0.2.2" 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Protofetch 2 | ![CI](https://github.com/coralogix/protofetch/workflows/CI/badge.svg) 3 | [![Apache 2 License License](http://img.shields.io/badge/license-APACHE2-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0) 4 | [![Crates.io](https://img.shields.io/crates/v/protofetch.svg)](https://crates.io/crates/protofetch) 5 | [![npm version](https://img.shields.io/npm/v/cx-protofetch.svg??style=flat)](https://www.npmjs.com/package/cx-protofetch) 6 | ![GitHub Stars](https://img.shields.io/github/stars/coralogix/protofetch.svg) 7 | 8 | A source dependency management tool for Protobuf files. 9 | 10 | ## Motivation 11 | 12 | If you use protobuf extensively as a data format for services to communicate with or to share your APIs with the outside world, 13 | you need a way to get correct versions of protobuf files for each service and ability to depend on a specific version. 14 | This is needed on both server and client side. 15 | Without automation, it can quickly become cumbersome, error-prone and overall unmanageable. 16 | 17 | To make it bearable, usable and stable, one needs tooling that automates this work and makes it predictable. This is what Protofetch aims to do. 18 | 19 | ## Why Protofetch? 20 | 21 | Protofetch aims to tackle the complexity of handling protobuf dependencies in a declarative fashion. 22 | It makes it trivial to declare dependencies and to manage them. 23 | 24 | It gives you the ability to have: 25 | * dependency on specific version/hash; 26 | * predictable builds/test/CI that depend on protobufs; 27 | * easy to read declarative specification of protobuf dependencies; 28 | * automate fetching of the dependencies themselves with their transitive dependencies. 29 | * caching of dependencies so that they can be shared across multiple projects. 30 | 31 | ## Roadmap 32 | 33 | This project is still under development and is subject to change in the future. 34 | We aim to achieve at least the following goals before releasing the first stable version. 35 | 36 | - [x] Fetch dependencies based on git tag or branch 37 | - [x] Cache dependencies locally by revision 38 | - [x] Fetch transitive dependencies 39 | - [x] Declarative rules per dependency 40 | - [x] Allow policies 41 | - [x] Deny policies 42 | - [x] Dependency pruning (remove `proto` files that are not needed) 43 | - [ ] Prevent circular dependencies 44 | 45 | ## Getting Started 46 | 47 | You can download pre-built binaries from the [GitHub Releases](https://github.com/coralogix/protofetch/releases/latest) page. 48 | 49 | Protofetch is also released to [crates.io](https://crates.io/crates/protofetch), so if you have a Rust toolchain installed, you can build Protofetch from source with `cargo install protofetch`. 50 | 51 | ### Usage 52 | 53 | ```sh 54 | # Fetch proto sources, updating the lock file if needed. 55 | protofetch fetch 56 | 57 | # Verify the lock file, and fetch proto sources. Useful for CI. 58 | protofetch fetch --locked 59 | ``` 60 | 61 | ## Protofetch module 62 | 63 | Each service using protofetch will require a module descriptor which uses `toml` format. 64 | This descriptor is by default called `protofetch.toml` and is located in the root of the service's repository. 65 | This can be changed, but it is heavily discouraged. 66 | 67 | | Field | Type | Required | Description | 68 | |--------------|:-------------|:----------|:-----------------------------| 69 | | name | String | Mandatory | A name of the defined module | 70 | | description | String | Optional | A description of the module | 71 | | dependencies | [Dependency] | Optional | Dependencies to fetch | 72 | 73 | ### Dependency format 74 | 75 | | Field | Type | Required | Description | Example | 76 | |----------------|:---------|:----------|:------------------------------------------------------------------------------|:--------------------------------------------------| 77 | | url | String | Mandatory | An address of the repository to checkout protobuf files from | "github.com/coralogix/cx-api-users/" | 78 | | revision | String | Optional | A revision to checkout, this can either be a tagged version or a commit hash | v0.2 | 79 | | branch | Boolean | Optional | A branch to checkout, fetches last commit | feature/v2 | 80 | | protocol | String | Optional | A protocol to use: [ssh, https] | ssh | 81 | | allow_policies | [String] | Optional | Allow policy rules | "/prefix/*", "*/subpath/*", "/path/to/file.proto" | 82 | | deny_policies | [String] | Optional | Deny policy rules | "/prefix/*", "*/subpath/*", "/path/to/file.proto" | 83 | | prune | bool | Optional | Whether to prune unneeded transitive proto files | true /false | 84 | | transitive | bool | Optional | Flags this dependency as transitive | true /false | 85 | | content_roots | [String] | Optional | Which subdirectories to import from | ["/myservice", "/com/org/client"] | 86 | 87 | ### Protofetch dependency toml example 88 | 89 | ```toml 90 | name = "repository name" 91 | description = "this is a repository" 92 | 93 | [dep1] 94 | url = "github.com/org/dep1" 95 | protocol = "https" 96 | revision = "1.3.0" 97 | prune = true 98 | allow_policies = ["/prefix/*", "*/subpath/*", "/path/to/file.proto"] 99 | 100 | [dep2] 101 | url = "github.com/org/dep2" 102 | branch = "feature/v2" 103 | 104 | [another-name] 105 | url = "github.com/org/dep3" 106 | revision = "a16f097eab6e64f2b711fd4b977e610791376223" 107 | transitive = true 108 | 109 | [scoped-down-dep4] 110 | url = "github.com/org/dep4" 111 | revision = "v1.1" 112 | content_roots = ["/scope/path"] 113 | allow_policies = ["prefix/subpath/scoped_path/*"] 114 | ``` 115 | 116 | ## Git protocol 117 | 118 | Protofetch supports accessing Git repositories using `ssh` or `https`. By default, Protofetch uses `ssh`. You can configure the default Git protocol with the `PROTOFETCH_GIT_PROTOCOL` environment variable. 119 | 120 | It is also possible to set protocol in the `protofetch.toml`, but this should be only necessary if the Git server does not support both protocols. Otherwise, it is better to leave this field unset, to let users choose whichever protocol they prefer. 121 | 122 | ### SSH support 123 | 124 | You need to have an SSH agent running, with your SSH key loaded: 125 | ```sh 126 | ssh-add ~/.ssh/your-private-key 127 | ``` 128 | 129 | ### HTTPS support 130 | 131 | If you want to use https you need to configure git to use a [credentials helper](https://git-scm.com/docs/gitcredentials). 132 | 133 | To support https when `2FA` is enabled you must generate a personal access token and set it as the password. 134 | The following permissions are sufficient when creating the token. 135 | 136 | ![GitHub personal access token](readme-images/github-personal-access-token.png) 137 | 138 | ## Scope down multi API repo 139 | 140 | In the case of a repo that supports multiple APIs, but only a specific directory is needed, a combination of `content_roots` and `allow_policies` can be used. 141 | 142 | For example: the `dep4` repo contains the following: 143 | ```sh 144 | dep4 145 | ├── scope 146 | │ ├── path1 147 | │ └── path2 148 | └── scope2 149 | └── unrelated 150 | ``` 151 | We only need protobuf files from `dep4/scope/path1`, where `path1` is the package name. 152 | 153 | ```toml 154 | [scoped-down-dep4] 155 | url = "github.com/org/dep4" 156 | revision = "v1.1" 157 | content_roots = ["/scope"] 158 | allow_policies = ["path1/*"] 159 | ``` 160 | 161 | 162 | ## Transitive dependency support and pruning 163 | 164 | Protofetch supports pulling transitive dependencies for your convenience. 165 | However, there is some manual work involved if the dependencies do not define their own protofetch module. 166 | 167 | In a situation where A depends on B, you should flag that dependency as transitive. 168 | 169 | This is helpful especially when you take advantage of the pruning feature which allows you to only recursively fetch 170 | the proto files you actually need. With pruning enabled, protofetch will recursively find what protofiles your root 171 | protos depend on and fetch them for as long as they are imported (flag as transitive dependency or fetched from other modules). 172 | 173 | Moreover, you can also use the allow_policies to scope down the root proto files you want from a dependency. 174 | As an example, the following module depends only on A's file `/proto/path/example.proto` but since pruning is enabled and 175 | B is flagged as transitive, if the allowed file has any file dependencies it will pull them and its dependencies, recursively. 176 | 177 | IMPORTANT: If you are using the `prune` feature, you must also use the `transitive` feature. However, do not use transitive 178 | unless you strictly want to pull the transitive dependencies. This is a workaround for dependencies that do not define 179 | their protofetch file on their repo. 180 | 181 | ```toml 182 | name = "repository name" 183 | description = "this is a repository" 184 | proto_out_dir = "proto/src/dir/output" 185 | 186 | [A] 187 | protocol = "https" 188 | url = "github.com/org/A" 189 | revision = "1.3.0" 190 | allow_policies = ["/proto/path/example.proto"] 191 | prune = true 192 | 193 | [B] 194 | protocol = "ssh" 195 | url = "github.com/org/B" 196 | revision = "5.2.0" 197 | transitive = true 198 | ``` 199 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "crane": { 4 | "locked": { 5 | "lastModified": 1739936662, 6 | "narHash": "sha256-x4syUjNUuRblR07nDPeLDP7DpphaBVbUaSoeZkFbGSk=", 7 | "owner": "ipetkov", 8 | "repo": "crane", 9 | "rev": "19de14aaeb869287647d9461cbd389187d8ecdb7", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "ipetkov", 14 | "repo": "crane", 15 | "type": "github" 16 | } 17 | }, 18 | "flake-utils": { 19 | "inputs": { 20 | "systems": "systems" 21 | }, 22 | "locked": { 23 | "lastModified": 1731533236, 24 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 25 | "owner": "numtide", 26 | "repo": "flake-utils", 27 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 28 | "type": "github" 29 | }, 30 | "original": { 31 | "owner": "numtide", 32 | "repo": "flake-utils", 33 | "type": "github" 34 | } 35 | }, 36 | "nixpkgs": { 37 | "locked": { 38 | "lastModified": 1740303746, 39 | "narHash": "sha256-XcdiWLEhjJkMxDLKQJ0CCivmYYCvA5MDxu9pMybM5kM=", 40 | "owner": "NixOS", 41 | "repo": "nixpkgs", 42 | "rev": "2d068ae5c6516b2d04562de50a58c682540de9bf", 43 | "type": "github" 44 | }, 45 | "original": { 46 | "owner": "NixOS", 47 | "ref": "nixpkgs-unstable", 48 | "repo": "nixpkgs", 49 | "type": "github" 50 | } 51 | }, 52 | "root": { 53 | "inputs": { 54 | "crane": "crane", 55 | "flake-utils": "flake-utils", 56 | "nixpkgs": "nixpkgs" 57 | } 58 | }, 59 | "systems": { 60 | "locked": { 61 | "lastModified": 1681028828, 62 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 63 | "owner": "nix-systems", 64 | "repo": "default", 65 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 66 | "type": "github" 67 | }, 68 | "original": { 69 | "owner": "nix-systems", 70 | "repo": "default", 71 | "type": "github" 72 | } 73 | } 74 | }, 75 | "root": "root", 76 | "version": 7 77 | } 78 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Protofetch - A source dependency management tool for Protobuf files"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 6 | flake-utils.url = "github:numtide/flake-utils"; 7 | crane.url = "github:ipetkov/crane"; 8 | }; 9 | 10 | outputs = 11 | { 12 | self, 13 | nixpkgs, 14 | flake-utils, 15 | crane, 16 | }: 17 | flake-utils.lib.eachDefaultSystem ( 18 | system: 19 | let 20 | pkgs = nixpkgs.legacyPackages.${system}; 21 | inherit (pkgs) lib; 22 | craneLib = crane.mkLib pkgs; 23 | 24 | protofetch = craneLib.buildPackage { 25 | pname = "protofetch"; 26 | src = lib.cleanSourceWith { 27 | src = ./.; # The original, unfiltered source 28 | filter = path: type: (lib.hasSuffix "\.proto" path) || (craneLib.filterCargoSources path type); 29 | }; 30 | buildInputs = [ 31 | pkgs.openssl 32 | pkgs.libgit2 33 | pkgs.pkg-config 34 | ] ++ lib.optionals pkgs.stdenv.isDarwin [ pkgs.darwin.apple_sdk.frameworks.Security ]; 35 | preBuild = '' 36 | export HOME=$(mktemp -d) 37 | ''; 38 | }; 39 | in 40 | { 41 | packages = rec { 42 | inherit protofetch; 43 | default = protofetch; 44 | }; 45 | checks = { 46 | # Build the crate as part of `nix flake check` 47 | inherit protofetch; 48 | }; 49 | } 50 | ); 51 | } 52 | -------------------------------------------------------------------------------- /readme-images/github-personal-access-token.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coralogix/protofetch/9787dc1fe632b91700f26584723de095293ea558/readme-images/github-personal-access-token.png -------------------------------------------------------------------------------- /resources/cache/dep1/hash1/proto/example.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "proto/example2.proto"; 6 | import "google/protobuf/descriptor.proto"; 7 | 8 | option (scalapb.options) = { 9 | scope: PACKAGE 10 | flat_package: true 11 | }; 12 | -------------------------------------------------------------------------------- /resources/cache/dep2/hash2/proto/example2.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "scalapb/scalapb.proto"; 6 | import "proto/example3.proto"; 7 | import "google/protobuf/descriptor.proto"; 8 | import "google/protobuf/struct.proto"; 9 | 10 | option (scalapb.options) = { 11 | scope: PACKAGE 12 | flat_package: true 13 | }; 14 | 15 | 16 | -------------------------------------------------------------------------------- /resources/cache/dep2/hash2/proto/example3.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "google/protobuf/struct.proto"; 6 | import "proto/example5.proto"; 7 | 8 | option (scalapb.options) = { 9 | scope: PACKAGE 10 | flat_package: true 11 | }; 12 | 13 | -------------------------------------------------------------------------------- /resources/cache/dep2/hash2/proto/example4.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "google/protobuf/struct.proto"; 6 | 7 | option (scalapb.options) = { 8 | scope: PACKAGE 9 | flat_package: true 10 | }; 11 | -------------------------------------------------------------------------------- /resources/cache/dep2/hash2/proto/example5.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "google/protobuf/struct.proto"; 6 | 7 | option (scalapb.options) = { 8 | scope: PACKAGE 9 | flat_package: true 10 | }; 11 | -------------------------------------------------------------------------------- /resources/cache/dep3/hash3/proto/example.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "proto/example2.proto"; 6 | import "google/protobuf/descriptor.proto"; 7 | 8 | option (scalapb.options) = { 9 | scope: PACKAGE 10 | flat_package: true 11 | }; 12 | -------------------------------------------------------------------------------- /resources/cache/dep3/hash3/root/proto/root.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "proto/example2.proto"; 6 | import "google/protobuf/descriptor.proto"; 7 | 8 | option (scalapb.options) = { 9 | scope: PACKAGE 10 | flat_package: true 11 | }; 12 | -------------------------------------------------------------------------------- /resources/proto_out/example.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "proto_out/example.proto"; 6 | 7 | option (scalapb.options) = { 8 | scope: PACKAGE 9 | flat_package: true 10 | }; 11 | -------------------------------------------------------------------------------- /resources/proto_out/example2.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package proto; 4 | 5 | import "scalapb/scalapb.proto"; 6 | import "google/protobuf/descriptor.proto"; 7 | import "google/protobuf/struct.proto"; 8 | 9 | option (scalapb.options) = { 10 | scope: PACKAGE 11 | flat_package: true 12 | }; 13 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | # The default profile includes rustc, rust-std, cargo, rust-docs, rustfmt and clippy. 3 | # https://rust-lang.github.io/rustup/concepts/profiles.html 4 | profile = "default" 5 | channel = "1.86.0" 6 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | newline_style = "Unix" 2 | max_width = 100 3 | -------------------------------------------------------------------------------- /src/api/builder.rs: -------------------------------------------------------------------------------- 1 | use std::{env, error::Error, path::PathBuf}; 2 | 3 | use crate::{config::ProtofetchConfig, git::cache::ProtofetchGitCache, Protofetch}; 4 | 5 | #[derive(Default)] 6 | pub struct ProtofetchBuilder { 7 | // All other paths are relative to `root` 8 | root: Option, 9 | module_file_name: Option, 10 | lock_file_name: Option, 11 | cache_directory_path: Option, 12 | output_directory_name: Option, 13 | } 14 | 15 | impl ProtofetchBuilder { 16 | /// Project root directory. 17 | /// 18 | /// Defaults to the current directory. 19 | pub fn root(mut self, path: impl Into) -> Self { 20 | self.root = Some(path.into()); 21 | self 22 | } 23 | 24 | /// Name of the protofetch configuration toml file. 25 | /// 26 | /// Defaults to `protofetch.toml`. 27 | pub fn module_file_name(mut self, path: impl Into) -> Self { 28 | self.module_file_name = Some(path.into()); 29 | self 30 | } 31 | 32 | /// Name of the protofetch lock file. 33 | /// 34 | /// Defaults to `protofetch.lock`. 35 | pub fn lock_file_name(mut self, path: impl Into) -> Self { 36 | self.lock_file_name = Some(path.into()); 37 | self 38 | } 39 | 40 | /// Name of the default output directory for proto source files. 41 | /// It will override the `proto_out_dir` set in the module toml config. 42 | pub fn output_directory_name(mut self, path: impl Into) -> Self { 43 | self.output_directory_name = Some(path.into()); 44 | self 45 | } 46 | 47 | /// Location of the protofetch cache directory. 48 | /// 49 | /// Defaults to `$HOME/.protofetch/cache`. 50 | pub fn cache_directory(mut self, path: impl Into) -> Self { 51 | self.cache_directory_path = Some(path.into()); 52 | self 53 | } 54 | 55 | pub fn try_build(self) -> Result> { 56 | let config = ProtofetchConfig::load()?; 57 | 58 | let Self { 59 | root, 60 | module_file_name, 61 | lock_file_name, 62 | output_directory_name, 63 | cache_directory_path, 64 | } = self; 65 | let root = match root { 66 | Some(root) => root, 67 | None => env::current_dir()?, 68 | }; 69 | 70 | let module_file_name = module_file_name.unwrap_or_else(|| PathBuf::from("protofetch.toml")); 71 | 72 | let lock_file_name = lock_file_name.unwrap_or_else(|| PathBuf::from("protofetch.lock")); 73 | 74 | let cache_directory = root.join(cache_directory_path.unwrap_or(config.cache_dir)); 75 | 76 | let git_config = git2::Config::open_default()?; 77 | 78 | let cache = ProtofetchGitCache::new(cache_directory, git_config, config.default_protocol)?; 79 | 80 | Ok(Protofetch { 81 | cache, 82 | root, 83 | module_file_name, 84 | lock_file_name, 85 | output_directory_name, 86 | }) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/api/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | error::Error, 3 | path::{Path, PathBuf}, 4 | }; 5 | 6 | use crate::{ 7 | cli::command_handlers::{do_clean, do_fetch, do_init, do_lock, do_migrate}, 8 | git::cache::ProtofetchGitCache, 9 | }; 10 | 11 | mod builder; 12 | 13 | pub use builder::ProtofetchBuilder; 14 | 15 | pub struct Protofetch { 16 | cache: ProtofetchGitCache, 17 | root: PathBuf, 18 | module_file_name: PathBuf, 19 | lock_file_name: PathBuf, 20 | output_directory_name: Option, 21 | } 22 | 23 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] 24 | pub enum LockMode { 25 | /// Verify that the lock file is up to date. This mode should be normally used on CI. 26 | Locked, 27 | /// Update the lock file if necessary. 28 | Update, 29 | /// Recreate the lock file from scratch. 30 | Recreate, 31 | } 32 | 33 | impl Protofetch { 34 | pub fn builder() -> ProtofetchBuilder { 35 | ProtofetchBuilder::default() 36 | } 37 | 38 | /// Creates an initial protofetch setup 39 | pub fn init(&self, name: Option) -> Result<(), Box> { 40 | do_init(&self.root, name, &self.module_file_name) 41 | } 42 | 43 | /// Fetches dependencies defined in the toml configuration file 44 | pub fn fetch(&self, lock_mode: LockMode) -> Result<(), Box> { 45 | do_fetch( 46 | lock_mode, 47 | &self.cache, 48 | &self.root, 49 | &self.module_file_name, 50 | &self.lock_file_name, 51 | self.output_directory_name.as_deref(), 52 | ) 53 | } 54 | 55 | /// Creates, updates or verifies a lock file based on the toml configuration file 56 | pub fn lock(&self, lock_mode: LockMode) -> Result<(), Box> { 57 | do_lock( 58 | lock_mode, 59 | &self.cache, 60 | &self.root, 61 | &self.module_file_name, 62 | &self.lock_file_name, 63 | )?; 64 | Ok(()) 65 | } 66 | 67 | /// Migrates a protodep.toml file to the protofetch format 68 | pub fn migrate( 69 | &self, 70 | name: Option, 71 | source_directory_path: impl AsRef, 72 | ) -> Result<(), Box> { 73 | do_migrate( 74 | &self.root, 75 | name, 76 | &self.module_file_name, 77 | source_directory_path.as_ref(), 78 | ) 79 | } 80 | 81 | /// Delete generated proto sources and the lock file 82 | pub fn clean(&self) -> Result<(), Box> { 83 | do_clean( 84 | &self.root, 85 | &self.module_file_name, 86 | &self.lock_file_name, 87 | self.output_directory_name.as_deref(), 88 | ) 89 | } 90 | 91 | pub fn clear_cache(&self) -> Result<(), Box> { 92 | self.cache.clear()?; 93 | Ok(()) 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/cache/git.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use crate::{ 4 | git::cache::ProtofetchGitCache, 5 | model::protofetch::{Coordinate, ModuleName, RevisionSpecification}, 6 | }; 7 | 8 | use super::RepositoryCache; 9 | 10 | impl RepositoryCache for ProtofetchGitCache { 11 | fn fetch( 12 | &self, 13 | coordinate: &Coordinate, 14 | specification: &RevisionSpecification, 15 | commit_hash: &str, 16 | ) -> anyhow::Result<()> { 17 | let repository = self.repository(coordinate)?; 18 | repository.fetch_commit(specification, commit_hash)?; 19 | Ok(()) 20 | } 21 | 22 | fn create_worktree( 23 | &self, 24 | coordinate: &Coordinate, 25 | commit_hash: &str, 26 | name: &ModuleName, 27 | ) -> anyhow::Result { 28 | let path = self 29 | .repository(coordinate)? 30 | .create_worktree(name, commit_hash)?; 31 | Ok(path) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/cache/mod.rs: -------------------------------------------------------------------------------- 1 | mod git; 2 | 3 | use std::path::PathBuf; 4 | 5 | use crate::model::protofetch::{Coordinate, ModuleName, RevisionSpecification}; 6 | 7 | pub trait RepositoryCache { 8 | fn fetch( 9 | &self, 10 | coordinate: &Coordinate, 11 | specification: &RevisionSpecification, 12 | commit_hash: &str, 13 | ) -> anyhow::Result<()>; 14 | 15 | fn create_worktree( 16 | &self, 17 | coordinate: &Coordinate, 18 | commit_hash: &str, 19 | name: &ModuleName, 20 | ) -> anyhow::Result; 21 | } 22 | -------------------------------------------------------------------------------- /src/cli/command_handlers.rs: -------------------------------------------------------------------------------- 1 | use log::{debug, info}; 2 | 3 | use crate::{ 4 | api::LockMode, 5 | fetch, 6 | git::cache::ProtofetchGitCache, 7 | model::{ 8 | protodep::ProtodepDescriptor, 9 | protofetch::{lock::LockFile, resolved::ResolvedModule, Descriptor, ModuleName}, 10 | }, 11 | proto, 12 | resolver::LockFileModuleResolver, 13 | }; 14 | use std::{ 15 | error::Error, 16 | path::{Path, PathBuf}, 17 | }; 18 | 19 | const DEFAULT_OUTPUT_DIRECTORY_NAME: &str = "proto_src"; 20 | 21 | /// Handler to fetch command 22 | pub fn do_fetch( 23 | lock_mode: LockMode, 24 | cache: &ProtofetchGitCache, 25 | root: &Path, 26 | module_file_name: &Path, 27 | lock_file_name: &Path, 28 | output_directory_name: Option<&Path>, 29 | ) -> Result<(), Box> { 30 | let module_descriptor = load_module_descriptor(root, module_file_name)?; 31 | 32 | let resolved = do_lock(lock_mode, cache, root, module_file_name, lock_file_name)?; 33 | 34 | let output_directory_name = output_directory_name 35 | .or_else(|| module_descriptor.proto_out_dir.as_ref().map(Path::new)) 36 | .unwrap_or(Path::new(DEFAULT_OUTPUT_DIRECTORY_NAME)); 37 | fetch::fetch_sources(cache, &resolved.dependencies)?; 38 | 39 | //Copy proto_out files to actual target 40 | proto::copy_proto_files(cache, &resolved, &root.join(output_directory_name))?; 41 | 42 | Ok(()) 43 | } 44 | 45 | /// Handler to lock command 46 | /// Loads dependency descriptor from protofetch toml or protodep toml 47 | /// Generates a lock file based on the protofetch.toml 48 | pub fn do_lock( 49 | lock_mode: LockMode, 50 | cache: &ProtofetchGitCache, 51 | root: &Path, 52 | module_file_name: &Path, 53 | lock_file_name: &Path, 54 | ) -> Result> { 55 | let module_descriptor = load_module_descriptor(root, module_file_name)?; 56 | 57 | let lock_file_path = root.join(lock_file_name); 58 | 59 | let (old_lock, (resolved, lockfile)) = match (lock_mode, lock_file_path.exists()) { 60 | (LockMode::Locked, false) => return Err("Lock file does not exist".into()), 61 | 62 | (LockMode::Locked, true) => { 63 | let old_lock = LockFile::from_file(&lock_file_path)?; 64 | let resolver = LockFileModuleResolver::new(cache, &old_lock, true); 65 | debug!("Verifying lockfile..."); 66 | let resolved = fetch::resolve(&module_descriptor, &resolver)?; 67 | (Some(old_lock), resolved) 68 | } 69 | 70 | (LockMode::Update, false) => { 71 | debug!("Generating lockfile..."); 72 | (None, fetch::resolve(&module_descriptor, &cache)?) 73 | } 74 | 75 | (LockMode::Update, true) => { 76 | let old_lock = LockFile::from_file(&lock_file_path)?; 77 | let resolver = LockFileModuleResolver::new(cache, &old_lock, false); 78 | debug!("Updating lockfile..."); 79 | let resolved = fetch::resolve(&module_descriptor, &resolver)?; 80 | (Some(old_lock), resolved) 81 | } 82 | 83 | (LockMode::Recreate, _) => { 84 | debug!("Generating lockfile..."); 85 | (None, fetch::resolve(&module_descriptor, &cache)?) 86 | } 87 | }; 88 | 89 | debug!("Generated lockfile: {:?}", lockfile); 90 | 91 | if old_lock.is_some_and(|old_lock| old_lock == lockfile) { 92 | debug!("Lockfile is up to date"); 93 | } else { 94 | let lock_file_path = root.join(lock_file_name); 95 | std::fs::write(&lock_file_path, lockfile.to_string()?)?; 96 | info!("Wrote lockfile to {}", lock_file_path.display()); 97 | } 98 | 99 | Ok(resolved) 100 | } 101 | 102 | /// Handler to init command 103 | pub fn do_init( 104 | root: &Path, 105 | name: Option, 106 | module_file_name: &Path, 107 | ) -> Result<(), Box> { 108 | let name = build_module_name(name, root)?; 109 | let descriptor = { 110 | Descriptor { 111 | name, 112 | description: None, 113 | proto_out_dir: None, 114 | dependencies: vec![], 115 | } 116 | }; 117 | let module_file_path = root.join(module_file_name); 118 | create_module_dir(descriptor, &module_file_path, false) 119 | } 120 | 121 | ///Migrate from protodep to protofetch 122 | /// 1 - Reads protodep.toml 123 | /// 2 - Translates descriptor 124 | /// 3 - Writes protofetch.toml 125 | /// 4 - Deletes protodep.toml 126 | pub fn do_migrate( 127 | root: &Path, 128 | name: Option, 129 | module_file_name: &Path, 130 | source_directory_path: &Path, 131 | ) -> Result<(), Box> { 132 | let descriptor = ProtodepDescriptor::from_file(&source_directory_path.join("protodep.toml")) 133 | .and_then(|d| d.into_proto_fetch())?; 134 | 135 | let name = build_module_name(name, root)?; 136 | let descriptor_with_name = Descriptor { name, ..descriptor }; 137 | create_module_dir(descriptor_with_name, &root.join(module_file_name), false)?; 138 | 139 | std::fs::remove_file(source_directory_path.join("protodep.toml"))?; 140 | std::fs::remove_file(source_directory_path.join("protodep.lock"))?; 141 | 142 | Ok(()) 143 | } 144 | 145 | pub fn do_clean( 146 | root: &Path, 147 | module_file_name: &Path, 148 | lock_file_name: &Path, 149 | output_directory_name: Option<&Path>, 150 | ) -> Result<(), Box> { 151 | let module_descriptor = load_module_descriptor(root, module_file_name)?; 152 | 153 | let lock_file_path = root.join(lock_file_name); 154 | 155 | let output_directory_name = output_directory_name 156 | .or_else(|| module_descriptor.proto_out_dir.as_ref().map(Path::new)) 157 | .unwrap_or(Path::new(DEFAULT_OUTPUT_DIRECTORY_NAME)); 158 | let output_directory_path = root.join(output_directory_name); 159 | 160 | info!( 161 | "Cleaning protofetch proto_out source files folder {}.", 162 | output_directory_path.display() 163 | ); 164 | let output1 = std::fs::remove_dir_all(&output_directory_path); 165 | let output2 = std::fs::remove_file(&lock_file_path); 166 | 167 | for (output, path) in [(output1, output_directory_path), (output2, lock_file_path)] { 168 | match output { 169 | Err(err) if err.kind() == std::io::ErrorKind::NotFound => { 170 | info!("{} is already removed, nothing to do", path.display()); 171 | Ok(()) 172 | } 173 | otherwise => otherwise, 174 | }?; 175 | } 176 | 177 | Ok(()) 178 | } 179 | 180 | fn load_module_descriptor( 181 | root: &Path, 182 | module_file_name: &Path, 183 | ) -> Result> { 184 | let module_descriptor = Descriptor::from_file(&root.join(module_file_name)).or_else(|_| { 185 | ProtodepDescriptor::from_file(&root.join("protodep.toml")) 186 | .and_then(|d| d.into_proto_fetch()) 187 | })?; 188 | 189 | Ok(module_descriptor) 190 | } 191 | 192 | /// Name if present otherwise attempt to extract from directory 193 | fn build_module_name(name: Option, path: &Path) -> Result> { 194 | match name { 195 | Some(name) => Ok(ModuleName::from(name)), 196 | None => match path.canonicalize()?.file_name() { 197 | Some(dir) => Ok(ModuleName::from(dir.to_string_lossy().to_string())), 198 | None => { 199 | Err("Module name not given and could not convert location to directory name".into()) 200 | } 201 | }, 202 | } 203 | } 204 | 205 | fn create_module_dir( 206 | descriptor: Descriptor, 207 | module_filename_path: &PathBuf, 208 | ow: bool, 209 | ) -> Result<(), Box> { 210 | if !module_filename_path.exists() { 211 | std::fs::write( 212 | module_filename_path, 213 | toml::to_string_pretty(&descriptor.into_toml())?, 214 | )?; 215 | Ok(()) 216 | } else if ow { 217 | std::fs::remove_file(module_filename_path)?; 218 | std::fs::write( 219 | module_filename_path, 220 | toml::to_string_pretty(&descriptor.into_toml())?, 221 | )?; 222 | Ok(()) 223 | } else { 224 | Err(format!("File already exists: {}", module_filename_path.display()).into()) 225 | } 226 | } 227 | -------------------------------------------------------------------------------- /src/cli/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod command_handlers; 2 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, path::PathBuf}; 2 | 3 | use anyhow::bail; 4 | use config::{Config, ConfigError, Environment, File, FileFormat}; 5 | use log::{debug, trace}; 6 | use serde::Deserialize; 7 | 8 | use crate::model::protofetch::Protocol; 9 | 10 | #[derive(Debug)] 11 | pub struct ProtofetchConfig { 12 | pub cache_dir: PathBuf, 13 | pub default_protocol: Protocol, 14 | } 15 | 16 | impl ProtofetchConfig { 17 | pub fn load() -> anyhow::Result { 18 | let config_dir = config_dir(); 19 | let raw_config = RawConfig::load(config_dir, None, None)?; 20 | 21 | let config = Self { 22 | cache_dir: match raw_config.cache.dir { 23 | Some(cache_dir) => cache_dir, 24 | None => default_cache_dir()?, 25 | }, 26 | default_protocol: raw_config.git.protocol.unwrap_or(Protocol::Ssh), 27 | }; 28 | trace!("Loaded configuration: {:?}", config); 29 | 30 | Ok(config) 31 | } 32 | } 33 | 34 | #[derive(Default, Debug, Deserialize, PartialEq, Eq)] 35 | struct RawConfig { 36 | #[serde(default)] 37 | cache: CacheConfig, 38 | #[serde(default)] 39 | git: GitConfig, 40 | } 41 | 42 | #[derive(Default, Debug, Deserialize, PartialEq, Eq)] 43 | struct CacheConfig { 44 | dir: Option, 45 | } 46 | 47 | #[derive(Default, Debug, Deserialize, PartialEq, Eq)] 48 | struct GitConfig { 49 | protocol: Option, 50 | } 51 | 52 | impl RawConfig { 53 | fn load( 54 | config_dir: Option, 55 | config_override: Option, 56 | env_override: Option>, 57 | ) -> Result { 58 | let mut builder = Config::builder(); 59 | 60 | if let Some(mut path) = config_dir { 61 | path.push("config.toml"); 62 | debug!("Loading configuration from {}", path.display()); 63 | builder = builder.add_source(File::from(path).required(false)); 64 | } 65 | 66 | if let Some(config_override) = config_override { 67 | builder = builder.add_source(File::from_str( 68 | &config_override.to_string(), 69 | FileFormat::Toml, 70 | )); 71 | } 72 | 73 | builder 74 | .add_source( 75 | Environment::with_prefix("PROTOFETCH") 76 | .separator("_") 77 | .source(env_override), 78 | ) 79 | .build()? 80 | .try_deserialize() 81 | } 82 | } 83 | 84 | fn config_dir() -> Option { 85 | if let Ok(path) = std::env::var("PROTOFETCH_CONFIG_DIR") { 86 | return Some(PathBuf::from(path)); 87 | } 88 | if let Ok(path) = std::env::var("XDG_CONFIG_HOME") { 89 | let mut path = PathBuf::from(path); 90 | path.push("protofetch"); 91 | return Some(path); 92 | } 93 | if let Some(mut path) = home::home_dir() { 94 | path.push(".config"); 95 | path.push("protofetch"); 96 | return Some(path); 97 | } 98 | None 99 | } 100 | 101 | fn default_cache_dir() -> anyhow::Result { 102 | if let Ok(path) = std::env::var("XDG_CACHE_HOME") { 103 | let mut path = PathBuf::from(path); 104 | path.push("protofetch"); 105 | return Ok(path); 106 | } 107 | if let Some(mut path) = home::home_dir() { 108 | path.push(".cache"); 109 | path.push("protofetch"); 110 | return Ok(path); 111 | } 112 | bail!("Could not find home dir. Please define $HOME env variable.") 113 | } 114 | 115 | #[cfg(test)] 116 | mod tests { 117 | use toml::toml; 118 | 119 | use super::*; 120 | 121 | use pretty_assertions::assert_eq; 122 | 123 | #[test] 124 | fn load_empty() { 125 | let env = HashMap::new(); 126 | let config = RawConfig::load(None, Some(Default::default()), Some(env)).unwrap(); 127 | assert_eq!( 128 | config, 129 | RawConfig { 130 | cache: CacheConfig { dir: None }, 131 | git: GitConfig { protocol: None } 132 | } 133 | ) 134 | } 135 | 136 | #[test] 137 | fn load_environment() { 138 | let env = HashMap::from([ 139 | ("PROTOFETCH_CACHE_DIR".to_owned(), "/cache".to_owned()), 140 | ("PROTOFETCH_GIT_PROTOCOL".to_owned(), "ssh".to_owned()), 141 | ]); 142 | let config = RawConfig::load(None, Some(Default::default()), Some(env)).unwrap(); 143 | assert_eq!( 144 | config, 145 | RawConfig { 146 | cache: CacheConfig { 147 | dir: Some("/cache".into()) 148 | }, 149 | git: GitConfig { 150 | protocol: Some(Protocol::Ssh) 151 | } 152 | } 153 | ) 154 | } 155 | 156 | #[test] 157 | fn load_config_file() { 158 | let env = HashMap::new(); 159 | let config = RawConfig::load( 160 | None, 161 | Some(toml! { 162 | [cache] 163 | dir = "/cache" 164 | 165 | [git] 166 | protocol = "ssh" 167 | }), 168 | Some(env), 169 | ) 170 | .unwrap(); 171 | assert_eq!( 172 | config, 173 | RawConfig { 174 | cache: CacheConfig { 175 | dir: Some("/cache".into()) 176 | }, 177 | git: GitConfig { 178 | protocol: Some(Protocol::Ssh) 179 | } 180 | } 181 | ) 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /src/fetch.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, str::Utf8Error}; 2 | 3 | use crate::{ 4 | cache::RepositoryCache, 5 | model::protofetch::{ 6 | lock::{LockFile, LockedCoordinate, LockedDependency}, 7 | resolved::{ResolvedDependency, ResolvedModule}, 8 | Dependency, Descriptor, ModuleName, 9 | }, 10 | resolver::{CommitAndDescriptor, ModuleResolver}, 11 | }; 12 | use log::{error, info}; 13 | use thiserror::Error; 14 | 15 | #[derive(Error, Debug)] 16 | pub enum FetchError { 17 | #[error("Error while fetching repo from cache: {0}")] 18 | Cache(anyhow::Error), 19 | #[error("Git error: {0}")] 20 | GitError(#[from] git2::Error), 21 | #[error("Error while decoding utf8 bytes from blob: {0}")] 22 | BlobRead(#[from] Utf8Error), 23 | #[error("Error while parsing descriptor")] 24 | Parsing(#[from] crate::model::ParseError), 25 | #[error("Error while processing protobuf repository: {0}")] 26 | ProtoRepoError(#[from] crate::git::repository::ProtoRepoError), 27 | #[error("IO error: {0}")] 28 | IO(#[from] std::io::Error), 29 | #[error(transparent)] 30 | Resolver(anyhow::Error), 31 | } 32 | 33 | pub fn resolve( 34 | descriptor: &Descriptor, 35 | resolver: &impl ModuleResolver, 36 | ) -> Result<(ResolvedModule, LockFile), FetchError> { 37 | fn go( 38 | resolver: &impl ModuleResolver, 39 | results: &mut BTreeMap, 40 | dependencies: &[Dependency], 41 | ) -> Result<(), FetchError> { 42 | let mut children = Vec::new(); 43 | for dependency in dependencies { 44 | let locked_coordinate = LockedCoordinate::from(&dependency.coordinate); 45 | match results.get(&dependency.name) { 46 | None => { 47 | log::info!("Resolving {}", dependency.coordinate); 48 | let CommitAndDescriptor { 49 | commit_hash, 50 | mut descriptor, 51 | } = resolver 52 | .resolve( 53 | &dependency.coordinate, 54 | &dependency.specification, 55 | None, 56 | &dependency.name, 57 | ) 58 | .map_err(FetchError::Resolver)?; 59 | 60 | let locked = LockedDependency { 61 | name: dependency.name.clone(), 62 | commit_hash: commit_hash.clone(), 63 | coordinate: locked_coordinate, 64 | specification: dependency.specification.clone(), 65 | }; 66 | 67 | let resolved = ResolvedDependency { 68 | name: dependency.name.clone(), 69 | commit_hash, 70 | coordinate: dependency.coordinate.clone(), 71 | specification: dependency.specification.clone(), 72 | rules: dependency.rules.clone(), 73 | dependencies: descriptor 74 | .dependencies 75 | .iter() 76 | .map(|d| d.name.clone()) 77 | .collect(), 78 | }; 79 | 80 | results.insert(dependency.name.clone(), (locked, resolved)); 81 | children.append(&mut descriptor.dependencies); 82 | } 83 | Some((already_locked, _)) => { 84 | if already_locked.coordinate != locked_coordinate { 85 | log::warn!( 86 | "discarded {} in favor of {} for {}", 87 | dependency.coordinate, 88 | already_locked.coordinate, 89 | &dependency.name 90 | ); 91 | } else if already_locked.specification != dependency.specification { 92 | log::warn!( 93 | "discarded {} in favor of {} for {}", 94 | dependency.specification, 95 | already_locked.specification, 96 | &dependency.name 97 | ); 98 | } 99 | } 100 | } 101 | } 102 | 103 | if !children.is_empty() { 104 | go(resolver, results, &children)?; 105 | } 106 | 107 | Ok(()) 108 | } 109 | 110 | let mut results = BTreeMap::new(); 111 | 112 | go(resolver, &mut results, &descriptor.dependencies)?; 113 | 114 | let (locked, resolved) = results.into_values().unzip(); 115 | 116 | let resolved = ResolvedModule { 117 | module_name: descriptor.name.clone(), 118 | dependencies: resolved, 119 | }; 120 | 121 | let lockfile = LockFile { 122 | dependencies: locked, 123 | }; 124 | 125 | Ok((resolved, lockfile)) 126 | } 127 | 128 | pub fn fetch_sources( 129 | cache: &impl RepositoryCache, 130 | dependencies: &[ResolvedDependency], 131 | ) -> Result<(), FetchError> { 132 | info!("Fetching dependencies source files..."); 133 | for dependency in dependencies { 134 | cache 135 | .fetch( 136 | &dependency.coordinate, 137 | &dependency.specification, 138 | &dependency.commit_hash, 139 | ) 140 | .map_err(FetchError::Cache)?; 141 | } 142 | 143 | Ok(()) 144 | } 145 | 146 | #[cfg(test)] 147 | mod tests { 148 | use anyhow::anyhow; 149 | 150 | use crate::{ 151 | model::protofetch::{Coordinate, Revision, RevisionSpecification, Rules}, 152 | resolver::CommitAndDescriptor, 153 | }; 154 | 155 | use super::*; 156 | 157 | use pretty_assertions::assert_eq; 158 | 159 | #[derive(Default)] 160 | struct FakeModuleResolver { 161 | entries: BTreeMap>, 162 | } 163 | 164 | impl FakeModuleResolver { 165 | fn push(&mut self, name: &str, revision: &str, commit_hash: &str, descriptor: Descriptor) { 166 | self.entries.entry(coordinate(name)).or_default().insert( 167 | RevisionSpecification { 168 | revision: Revision::pinned(revision), 169 | branch: None, 170 | }, 171 | CommitAndDescriptor { 172 | commit_hash: commit_hash.to_string(), 173 | descriptor, 174 | }, 175 | ); 176 | } 177 | } 178 | 179 | impl ModuleResolver for FakeModuleResolver { 180 | fn resolve( 181 | &self, 182 | coordinate: &Coordinate, 183 | specification: &RevisionSpecification, 184 | _: Option<&str>, 185 | _: &ModuleName, 186 | ) -> anyhow::Result { 187 | Ok(self 188 | .entries 189 | .get(coordinate) 190 | .ok_or_else(|| anyhow!("Coordinate not found: {}", coordinate))? 191 | .get(specification) 192 | .ok_or_else(|| anyhow!("Specification not found: {}", specification))? 193 | .clone()) 194 | } 195 | } 196 | 197 | fn coordinate(name: &str) -> Coordinate { 198 | Coordinate::from_url(&format!("example.com/org/{}", name)).unwrap() 199 | } 200 | 201 | fn dependency(name: &str, revision: &str) -> Dependency { 202 | Dependency { 203 | name: ModuleName::from(name), 204 | coordinate: coordinate(name), 205 | specification: RevisionSpecification { 206 | revision: Revision::pinned(revision), 207 | branch: None, 208 | }, 209 | rules: Rules::default(), 210 | } 211 | } 212 | 213 | fn locked_dependency(name: &str, revision: &str, commit_hash: &str) -> LockedDependency { 214 | LockedDependency { 215 | name: ModuleName::from(name), 216 | coordinate: LockedCoordinate { 217 | url: format!("example.com/org/{}", name), 218 | protocol: None, 219 | }, 220 | specification: RevisionSpecification { 221 | revision: Revision::pinned(revision), 222 | branch: None, 223 | }, 224 | commit_hash: commit_hash.to_owned(), 225 | } 226 | } 227 | 228 | #[test] 229 | fn resolve_transitive() { 230 | let mut resolver = FakeModuleResolver::default(); 231 | resolver.push( 232 | "foo", 233 | "1.0.0", 234 | "c1", 235 | Descriptor { 236 | name: ModuleName::from("foo"), 237 | description: None, 238 | proto_out_dir: None, 239 | dependencies: vec![dependency("bar", "2.0.0")], 240 | }, 241 | ); 242 | 243 | resolver.push( 244 | "bar", 245 | "2.0.0", 246 | "c2", 247 | Descriptor { 248 | name: ModuleName::from("bar"), 249 | description: None, 250 | proto_out_dir: None, 251 | dependencies: Vec::new(), 252 | }, 253 | ); 254 | 255 | let (_, lockfile) = resolve( 256 | &Descriptor { 257 | name: ModuleName::from("root"), 258 | description: None, 259 | proto_out_dir: None, 260 | dependencies: vec![dependency("foo", "1.0.0")], 261 | }, 262 | &resolver, 263 | ) 264 | .unwrap(); 265 | 266 | assert_eq!( 267 | lockfile, 268 | LockFile { 269 | dependencies: vec![ 270 | locked_dependency("bar", "2.0.0", "c2"), 271 | locked_dependency("foo", "1.0.0", "c1") 272 | ] 273 | } 274 | ) 275 | } 276 | 277 | #[test] 278 | fn resolve_transitive_root_priority() { 279 | let mut resolver = FakeModuleResolver::default(); 280 | resolver.push( 281 | "foo", 282 | "1.0.0", 283 | "c1", 284 | Descriptor { 285 | name: ModuleName::from("foo"), 286 | description: None, 287 | proto_out_dir: None, 288 | dependencies: vec![dependency("bar", "2.0.0")], 289 | }, 290 | ); 291 | 292 | resolver.push( 293 | "bar", 294 | "1.0.0", 295 | "c3", 296 | Descriptor { 297 | name: ModuleName::from("bar"), 298 | description: None, 299 | proto_out_dir: None, 300 | dependencies: Vec::new(), 301 | }, 302 | ); 303 | resolver.push( 304 | "bar", 305 | "2.0.0", 306 | "c2", 307 | Descriptor { 308 | name: ModuleName::from("bar"), 309 | description: None, 310 | proto_out_dir: None, 311 | dependencies: Vec::new(), 312 | }, 313 | ); 314 | 315 | let (_, lockfile) = resolve( 316 | &Descriptor { 317 | name: ModuleName::from("root"), 318 | description: None, 319 | proto_out_dir: None, 320 | dependencies: vec![dependency("foo", "1.0.0"), dependency("bar", "1.0.0")], 321 | }, 322 | &resolver, 323 | ) 324 | .unwrap(); 325 | 326 | assert_eq!( 327 | lockfile, 328 | LockFile { 329 | dependencies: vec![ 330 | locked_dependency("bar", "1.0.0", "c3"), 331 | locked_dependency("foo", "1.0.0", "c1"), 332 | ] 333 | } 334 | ) 335 | } 336 | } 337 | -------------------------------------------------------------------------------- /src/flock.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs::File, 3 | path::Path, 4 | time::{Duration, Instant}, 5 | }; 6 | 7 | use fs4::fs_std::FileExt; 8 | use log::debug; 9 | use thiserror::Error; 10 | 11 | pub struct FileLock { 12 | _file: File, 13 | } 14 | 15 | #[derive(Error, Debug)] 16 | #[error(transparent)] 17 | pub struct Error(#[from] std::io::Error); 18 | 19 | impl FileLock { 20 | pub fn new(path: &Path) -> Result { 21 | let file = File::create(path)?; 22 | let start = Instant::now(); 23 | loop { 24 | match file.try_lock_exclusive().or_else(|error| { 25 | if error.raw_os_error() == fs4::lock_contended_error().raw_os_error() { 26 | Ok(false) 27 | } else { 28 | Err(error) 29 | } 30 | }) { 31 | Ok(true) => { 32 | return Ok(Self { _file: file }); 33 | } 34 | Ok(false) if start.elapsed().as_secs() < 300 => { 35 | debug!("Failed to acquire a lock on {}, retrying", path.display()); 36 | std::thread::sleep(Duration::from_secs(1)); 37 | } 38 | Ok(false) => { 39 | return Err(Error(std::io::Error::other(format!( 40 | "Failed to acquire a lock on {}", 41 | path.display() 42 | )))) 43 | } 44 | Err(error) => return Err(error.into()), 45 | } 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/git/cache.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use git2::{ 4 | cert::Cert, AutotagOption, CertificateCheckStatus, Config, Cred, CredentialType, FetchOptions, 5 | RemoteCallbacks, Repository, 6 | }; 7 | use log::{debug, info, trace}; 8 | use ssh_key::{known_hosts::HostPatterns, KnownHosts}; 9 | use thiserror::Error; 10 | 11 | use crate::{ 12 | flock::FileLock, 13 | git::repository::ProtoGitRepository, 14 | model::protofetch::{Coordinate, Protocol}, 15 | }; 16 | 17 | const WORKTREES_DIR: &str = "dependencies"; 18 | const GLOBAL_KNOWN_HOSTS: &str = "/etc/ssh/ssh_known_hosts"; 19 | 20 | pub struct ProtofetchGitCache { 21 | location: PathBuf, 22 | worktrees: PathBuf, 23 | git_config: Config, 24 | default_protocol: Protocol, 25 | _lock: FileLock, 26 | } 27 | 28 | #[derive(Error, Debug)] 29 | pub enum CacheError { 30 | #[error("Git error: {0}")] 31 | Git(#[from] git2::Error), 32 | #[error("Cache location {location} does not exist")] 33 | BadLocation { location: String }, 34 | #[error("Cache lock cannot be acquired")] 35 | Lock(#[from] crate::flock::Error), 36 | #[error("IO error: {0}")] 37 | IO(#[from] std::io::Error), 38 | } 39 | 40 | impl ProtofetchGitCache { 41 | pub fn new( 42 | location: PathBuf, 43 | git_config: Config, 44 | default_protocol: Protocol, 45 | ) -> Result { 46 | if location.exists() { 47 | if !location.is_dir() { 48 | return Err(CacheError::BadLocation { 49 | location: location.to_str().unwrap_or("").to_string(), 50 | }); 51 | } 52 | } else { 53 | std::fs::create_dir_all(&location)?; 54 | } 55 | 56 | let lock = Self::acquire_lock(&location)?; 57 | 58 | let worktrees = location.join(WORKTREES_DIR); 59 | Ok(ProtofetchGitCache { 60 | location, 61 | worktrees, 62 | git_config, 63 | default_protocol, 64 | _lock: lock, 65 | }) 66 | } 67 | 68 | pub fn clear(&self) -> anyhow::Result<()> { 69 | if self.location.exists() { 70 | info!( 71 | "Clearing protofetch repository cache {}.", 72 | &self.location.display() 73 | ); 74 | std::fs::remove_dir_all(&self.location)?; 75 | } 76 | Ok(()) 77 | } 78 | 79 | pub fn repository(&self, entry: &Coordinate) -> Result { 80 | let mut path = self.location.clone(); 81 | path.push(entry.to_path()); 82 | 83 | let url = entry.to_git_url(self.default_protocol); 84 | 85 | let repo = if path.exists() { 86 | self.open_entry(&path, &url)? 87 | } else { 88 | self.create_repo(&path, &url)? 89 | }; 90 | 91 | Ok(ProtoGitRepository::new(self, repo, url)) 92 | } 93 | 94 | pub fn worktrees_path(&self) -> &Path { 95 | &self.worktrees 96 | } 97 | 98 | fn acquire_lock(location: &Path) -> Result { 99 | let location = location.join(".lock"); 100 | debug!( 101 | "Acquiring a lock on the cache location: {}", 102 | location.display() 103 | ); 104 | let lock = FileLock::new(&location)?; 105 | info!("Acquired a lock on the cache location"); 106 | Ok(lock) 107 | } 108 | 109 | fn open_entry(&self, path: &Path, url: &str) -> Result { 110 | trace!("Opening existing repository at {}", path.display()); 111 | 112 | let repo = Repository::open(path)?; 113 | 114 | { 115 | let remote = repo.find_remote("origin")?; 116 | if remote.url() != Some(url) { 117 | // If true then the protocol was updated before updating the cache. 118 | trace!( 119 | "Updating remote existing url {:?} to new url {}", 120 | remote.url(), 121 | url 122 | ); 123 | repo.remote_set_url("origin", url)?; 124 | } 125 | } 126 | 127 | Ok(repo) 128 | } 129 | 130 | fn create_repo(&self, path: &Path, url: &str) -> Result { 131 | trace!("Creating a new repository at {}", path.display()); 132 | 133 | let repo = Repository::init_bare(path)?; 134 | repo.remote_with_fetch("origin", url, "")?; 135 | 136 | Ok(repo) 137 | } 138 | 139 | pub(super) fn fetch_options(&self) -> Result, CacheError> { 140 | let mut callbacks = RemoteCallbacks::new(); 141 | // Consider using https://crates.io/crates/git2_credentials that supports 142 | // more authentication options 143 | callbacks.credentials(move |url, username, allowed_types| { 144 | trace!( 145 | "Requested credentials for {}, username {:?}, allowed types {:?}", 146 | url, 147 | username, 148 | allowed_types 149 | ); 150 | // Asking for ssh username 151 | if allowed_types.contains(CredentialType::USERNAME) { 152 | return Cred::username("git"); 153 | } 154 | // SSH auth 155 | if allowed_types.contains(CredentialType::SSH_KEY) { 156 | return Cred::ssh_key_from_agent(username.unwrap_or("git")); 157 | } 158 | // HTTP auth 159 | if allowed_types.contains(CredentialType::USER_PASS_PLAINTEXT) { 160 | return Cred::credential_helper(&self.git_config, url, username); 161 | } 162 | Err(git2::Error::from_str("no valid authentication available")) 163 | }); 164 | 165 | callbacks.certificate_check(|certificate, host| self.check_certificate(certificate, host)); 166 | 167 | let mut fetch_options = FetchOptions::new(); 168 | fetch_options 169 | .remote_callbacks(callbacks) 170 | .download_tags(AutotagOption::None); 171 | 172 | Ok(fetch_options) 173 | } 174 | 175 | fn check_certificate( 176 | &self, 177 | certificate: &Cert<'_>, 178 | host: &str, 179 | ) -> Result { 180 | if let Some(hostkey) = certificate.as_hostkey().and_then(|h| h.hostkey()) { 181 | trace!("Loading {}", GLOBAL_KNOWN_HOSTS); 182 | match KnownHosts::read_file(GLOBAL_KNOWN_HOSTS) { 183 | Ok(entries) => { 184 | for entry in entries { 185 | if host_matches_patterns(host, entry.host_patterns()) { 186 | trace!( 187 | "Found known host entry for {} ({})", 188 | host, 189 | entry.public_key().algorithm() 190 | ); 191 | if entry.public_key().to_bytes().as_deref() == Ok(hostkey) { 192 | trace!("Known host entry matches the host key"); 193 | return Ok(CertificateCheckStatus::CertificateOk); 194 | } 195 | } 196 | } 197 | trace!("No know host entry matched the host key"); 198 | } 199 | Err(error) => trace!("Could not load {}: {}", GLOBAL_KNOWN_HOSTS, error), 200 | } 201 | } 202 | Ok(CertificateCheckStatus::CertificatePassthrough) 203 | } 204 | } 205 | 206 | fn host_matches_patterns(host: &str, patterns: &HostPatterns) -> bool { 207 | match patterns { 208 | HostPatterns::Patterns(patterns) => { 209 | let mut match_found = false; 210 | for pattern in patterns { 211 | let pattern = pattern.to_lowercase(); 212 | // * and ? wildcards are not yet supported 213 | if let Some(pattern) = pattern.strip_prefix('!') { 214 | if pattern == host { 215 | return false; 216 | } 217 | } else { 218 | match_found |= pattern == host; 219 | } 220 | } 221 | match_found 222 | } 223 | // Not yet supported 224 | HostPatterns::HashedName { .. } => false, 225 | } 226 | } 227 | -------------------------------------------------------------------------------- /src/git/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod cache; 2 | pub mod repository; 3 | -------------------------------------------------------------------------------- /src/git/repository.rs: -------------------------------------------------------------------------------- 1 | use std::{path::PathBuf, str::Utf8Error}; 2 | 3 | use crate::model::protofetch::{Descriptor, ModuleName, Revision, RevisionSpecification}; 4 | use git2::{Oid, Repository, ResetType, WorktreeAddOptions}; 5 | use log::{debug, warn}; 6 | use thiserror::Error; 7 | 8 | use super::cache::ProtofetchGitCache; 9 | 10 | #[derive(Error, Debug)] 11 | pub enum ProtoRepoError { 12 | #[error("Error while performing revparse in dep {0} for commit {1}: {2}")] 13 | Revparse(ModuleName, String, git2::Error), 14 | #[error("Git error: {0}")] 15 | GitError(#[from] git2::Error), 16 | #[error("Error while decoding utf8 bytes from blob")] 17 | BlobRead(#[from] Utf8Error), 18 | #[error("Error while parsing descriptor")] 19 | Parsing(#[from] crate::model::ParseError), 20 | #[error("Bad git object kind {kind} found for {commit_hash} (expected blob)")] 21 | BadObjectKind { kind: String, commit_hash: String }, 22 | #[error("Missing protofetch.toml for {commit_hash}")] 23 | MissingDescriptor { commit_hash: String }, 24 | #[error("Branch {branch} was not found.")] 25 | BranchNotFound { branch: String }, 26 | #[error("Revision {revision} does not belong to the branch {branch}.")] 27 | RevisionNotOnBranch { revision: String, branch: String }, 28 | #[error("Worktree with name {name} already exists at {existing_path} but we need it at {wanted_path}")] 29 | WorktreeExists { 30 | name: String, 31 | existing_path: String, 32 | wanted_path: String, 33 | }, 34 | #[error("Error while canonicalizing path {path}: {error}")] 35 | Canonicalization { path: String, error: std::io::Error }, 36 | #[error("IO error: {0}")] 37 | IO(#[from] std::io::Error), 38 | } 39 | 40 | pub struct ProtoGitRepository<'a> { 41 | cache: &'a ProtofetchGitCache, 42 | git_repo: Repository, 43 | origin: String, 44 | } 45 | 46 | impl ProtoGitRepository<'_> { 47 | pub fn new( 48 | cache: &ProtofetchGitCache, 49 | git_repo: Repository, 50 | origin: String, 51 | ) -> ProtoGitRepository { 52 | ProtoGitRepository { 53 | cache, 54 | git_repo, 55 | origin, 56 | } 57 | } 58 | 59 | pub fn fetch(&self, specification: &RevisionSpecification) -> anyhow::Result<()> { 60 | let mut remote = self.git_repo.find_remote("origin")?; 61 | let mut refspecs = Vec::with_capacity(3); 62 | if let Revision::Pinned { revision } = &specification.revision { 63 | refspecs.push(format!("+refs/tags/{}:refs/tags/{}", revision, revision)); 64 | // Some protofetch.toml files specify branch in the revision field, 65 | // or do not specify the branch at all, so we need to fetch all branches. 66 | refspecs.push("+refs/heads/*:refs/remotes/origin/*".to_owned()); 67 | } 68 | if let Some(branch) = &specification.branch { 69 | refspecs.push(format!( 70 | "+refs/heads/{}:refs/remotes/origin/{}", 71 | branch, branch 72 | )); 73 | } 74 | 75 | debug!("Fetching {:?} from {}", refspecs, self.origin); 76 | remote.fetch(&refspecs, Some(&mut self.cache.fetch_options()?), None)?; 77 | Ok(()) 78 | } 79 | 80 | pub fn fetch_commit( 81 | &self, 82 | specification: &RevisionSpecification, 83 | commit_hash: &str, 84 | ) -> anyhow::Result<()> { 85 | let oid = Oid::from_str(commit_hash)?; 86 | if self.git_repo.find_commit(oid).is_ok() { 87 | return Ok(()); 88 | } 89 | let mut remote = self.git_repo.find_remote("origin")?; 90 | 91 | debug!("Fetching {} from {}", commit_hash, self.origin); 92 | if let Err(error) = 93 | remote.fetch(&[commit_hash], Some(&mut self.cache.fetch_options()?), None) 94 | { 95 | warn!( 96 | "Failed to fetch a single commit {}, falling back to a full fetch: {}", 97 | commit_hash, error 98 | ); 99 | self.fetch(specification)?; 100 | } 101 | 102 | Ok(()) 103 | } 104 | 105 | pub fn extract_descriptor( 106 | &self, 107 | dep_name: &ModuleName, 108 | commit_hash: &str, 109 | ) -> Result { 110 | let result = self 111 | .git_repo 112 | .revparse_single(&format!("{commit_hash}:protofetch.toml")); 113 | 114 | match result { 115 | Err(e) if e.code() == git2::ErrorCode::NotFound => { 116 | log::debug!("Couldn't find protofetch.toml, assuming module has no dependencies"); 117 | Ok(Descriptor { 118 | name: dep_name.clone(), 119 | description: None, 120 | proto_out_dir: None, 121 | dependencies: Vec::new(), 122 | }) 123 | } 124 | Err(e) => Err(ProtoRepoError::Revparse( 125 | dep_name.to_owned(), 126 | commit_hash.to_owned(), 127 | e, 128 | )), 129 | Ok(obj) => match obj.kind() { 130 | Some(git2::ObjectType::Blob) => { 131 | let blob = obj.peel_to_blob()?; 132 | let content = std::str::from_utf8(blob.content())?; 133 | let descriptor = Descriptor::from_toml_str(content)?; 134 | 135 | Ok(descriptor) 136 | } 137 | Some(kind) => Err(ProtoRepoError::BadObjectKind { 138 | kind: kind.to_string(), 139 | commit_hash: commit_hash.to_owned(), 140 | }), 141 | None => Err(ProtoRepoError::MissingDescriptor { 142 | commit_hash: commit_hash.to_owned(), 143 | }), 144 | }, 145 | } 146 | } 147 | 148 | pub fn resolve_commit_hash( 149 | &self, 150 | specification: &RevisionSpecification, 151 | ) -> Result { 152 | let RevisionSpecification { branch, revision } = specification; 153 | let oid = match (branch, revision) { 154 | (None, Revision::Arbitrary) => self.commit_hash_for_obj_str("HEAD")?, 155 | (None, Revision::Pinned { revision }) => self.commit_hash_for_obj_str(revision)?, 156 | (Some(branch), Revision::Arbitrary) => self 157 | .commit_hash_for_obj_str(&format!("origin/{branch}")) 158 | .map_err(|_| ProtoRepoError::BranchNotFound { 159 | branch: branch.to_owned(), 160 | })?, 161 | (Some(branch), Revision::Pinned { revision }) => { 162 | let branch_commit = self 163 | .commit_hash_for_obj_str(&format!("origin/{branch}")) 164 | .map_err(|_| ProtoRepoError::BranchNotFound { 165 | branch: branch.to_owned(), 166 | })?; 167 | let revision_commit = self.commit_hash_for_obj_str(revision)?; 168 | if self.is_ancestor(revision_commit, branch_commit)? { 169 | revision_commit 170 | } else { 171 | return Err(ProtoRepoError::RevisionNotOnBranch { 172 | revision: revision.to_owned(), 173 | branch: branch.to_owned(), 174 | }); 175 | } 176 | } 177 | }; 178 | Ok(oid.to_string()) 179 | } 180 | 181 | pub fn create_worktree( 182 | &self, 183 | name: &ModuleName, 184 | commit_hash: &str, 185 | ) -> Result { 186 | let base_path = self.cache.worktrees_path().join(name.as_str()); 187 | 188 | if !base_path.exists() { 189 | std::fs::create_dir_all(&base_path)?; 190 | } 191 | 192 | let worktree_path = base_path.join(PathBuf::from(commit_hash)); 193 | let worktree_name = commit_hash; 194 | 195 | debug!("Finding worktree {} for {}.", worktree_name, name); 196 | 197 | match self.git_repo.find_worktree(worktree_name) { 198 | Ok(worktree) => { 199 | let canonical_existing_path = worktree.path().canonicalize().map_err(|e| { 200 | ProtoRepoError::Canonicalization { 201 | path: worktree.path().to_string_lossy().to_string(), 202 | error: e, 203 | } 204 | })?; 205 | 206 | let canonical_wanted_path = 207 | worktree_path 208 | .canonicalize() 209 | .map_err(|e| ProtoRepoError::Canonicalization { 210 | path: worktree_path.to_string_lossy().to_string(), 211 | error: e, 212 | })?; 213 | 214 | if canonical_existing_path != canonical_wanted_path { 215 | return Err(ProtoRepoError::WorktreeExists { 216 | name: worktree_name.to_string(), 217 | existing_path: worktree.path().to_str().unwrap_or("").to_string(), 218 | wanted_path: worktree_path.to_str().unwrap_or("").to_string(), 219 | }); 220 | } else { 221 | log::info!( 222 | "Found existing worktree for {} at {}.", 223 | name, 224 | canonical_wanted_path.to_string_lossy() 225 | ); 226 | } 227 | } 228 | Err(_) => { 229 | log::info!( 230 | "Creating new worktree for {} at {}.", 231 | name, 232 | worktree_path.to_string_lossy() 233 | ); 234 | 235 | // We need to create a branch-like reference to be able to create a worktree 236 | let reference = self.git_repo.reference( 237 | &format!("refs/heads/{}", commit_hash), 238 | self.git_repo.revparse_single(commit_hash)?.id(), 239 | true, 240 | "", 241 | )?; 242 | 243 | let mut options = WorktreeAddOptions::new(); 244 | options.reference(Some(&reference)); 245 | self.git_repo 246 | .worktree(worktree_name, &worktree_path, Some(&options))?; 247 | } 248 | }; 249 | 250 | let worktree_repo = Repository::open(&worktree_path)?; 251 | let worktree_head_object = worktree_repo.revparse_single(commit_hash)?; 252 | 253 | worktree_repo.reset(&worktree_head_object, ResetType::Hard, None)?; 254 | 255 | Ok(worktree_path) 256 | } 257 | 258 | fn commit_hash_for_obj_str(&self, str: &str) -> Result { 259 | Ok(self.git_repo.revparse_single(str)?.peel_to_commit()?.id()) 260 | } 261 | 262 | // Check if `a` is an ancestor of `b` 263 | fn is_ancestor(&self, a: Oid, b: Oid) -> Result { 264 | Ok(self.git_repo.merge_base(a, b)? == a) 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | mod api; 2 | mod cache; 3 | mod cli; 4 | mod config; 5 | mod fetch; 6 | mod flock; 7 | mod git; 8 | mod model; 9 | mod proto; 10 | mod resolver; 11 | 12 | pub use api::{LockMode, Protofetch, ProtofetchBuilder}; 13 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use clap::Parser; 4 | use env_logger::Target; 5 | 6 | use log::warn; 7 | use protofetch::{LockMode, Protofetch}; 8 | 9 | /// Dependency management tool for Protocol Buffers files. 10 | #[derive(Debug, Parser)] 11 | #[clap(version)] 12 | pub struct CliArgs { 13 | #[clap(subcommand)] 14 | pub cmd: Command, 15 | #[clap(short, long, default_value = "protofetch.toml")] 16 | /// Name of the protofetch configuration toml file 17 | pub module_location: String, 18 | #[clap(short, long, default_value = "protofetch.lock")] 19 | /// Name of the protofetch lock file 20 | pub lockfile_location: String, 21 | #[clap(short, long)] 22 | /// Location of the protofetch cache directory [default: platform-specific] 23 | pub cache_directory: Option, 24 | /// Name of the output directory for proto source files, 25 | /// this will override proto_out_dir from the module toml config 26 | #[clap(short, long)] 27 | pub output_proto_directory: Option, 28 | } 29 | 30 | #[derive(Debug, Parser)] 31 | pub enum Command { 32 | /// Fetches protodep dependencies defined in the toml configuration file 33 | Fetch { 34 | /// reqiure dependencies to match the lock file 35 | #[clap(long)] 36 | locked: bool, 37 | /// forces re-creation of lock file 38 | #[clap(short, long, hide(true))] 39 | force_lock: bool, 40 | }, 41 | /// Creates a lock file based on toml configuration file 42 | Lock, 43 | /// Updates the lock file 44 | Update, 45 | /// Creates an init protofetch setup in provided directory and name 46 | Init { 47 | #[clap(default_value = ".")] 48 | directory: String, 49 | #[clap(short, long)] 50 | name: Option, 51 | }, 52 | /// Migrates a protodep toml file to a protofetch format 53 | Migrate { 54 | #[clap(default_value = ".")] 55 | directory: String, 56 | #[clap(short, long)] 57 | name: Option, 58 | }, 59 | /// Cleans generated proto sources and lock file 60 | Clean, 61 | /// Clears cached dependencies. 62 | /// This will remove all cached dependencies and metadata hence making the next fetch operation slower. 63 | ClearCache, 64 | } 65 | 66 | fn main() { 67 | env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) 68 | .target(Target::Stdout) 69 | .format(move |buf, record| { 70 | use std::io::Write; 71 | 72 | let at_least_debug_log = log::log_enabled!(log::Level::Debug); 73 | let level = record.level(); 74 | let style = buf.default_level_style(level); 75 | 76 | write!(buf, "{style}{}{style:#}", level)?; 77 | 78 | if at_least_debug_log { 79 | write!( 80 | buf, 81 | " [{}:{}]", 82 | record.file().unwrap_or("unknown"), 83 | record.line().unwrap_or(0), 84 | )?; 85 | } 86 | writeln!(buf, " {}", record.args()) 87 | }) 88 | .init(); 89 | 90 | if let Err(e) = run() { 91 | log::error!("{}", e); 92 | std::process::exit(1); 93 | } 94 | } 95 | 96 | fn run() -> Result<(), Box> { 97 | let cli_args: CliArgs = CliArgs::parse(); 98 | 99 | let mut protofetch = Protofetch::builder() 100 | .module_file_name(&cli_args.module_location) 101 | .lock_file_name(&cli_args.lockfile_location); 102 | 103 | if let Some(output_directory_name) = &cli_args.output_proto_directory { 104 | protofetch = protofetch.output_directory_name(output_directory_name) 105 | } 106 | if let Some(cache_directory) = &cli_args.cache_directory { 107 | protofetch = protofetch.cache_directory(cache_directory); 108 | } 109 | 110 | match cli_args.cmd { 111 | Command::Fetch { locked, force_lock } => { 112 | let lock_mode = if force_lock { 113 | warn!("Specifying --force-lock is deprecated, please use \"protofetch update\" instead."); 114 | LockMode::Recreate 115 | } else if locked { 116 | LockMode::Locked 117 | } else { 118 | LockMode::Update 119 | }; 120 | 121 | protofetch.try_build()?.fetch(lock_mode) 122 | } 123 | Command::Lock => protofetch.try_build()?.lock(LockMode::Update), 124 | Command::Update => protofetch.try_build()?.lock(LockMode::Recreate), 125 | Command::Init { directory, name } => protofetch.root(directory).try_build()?.init(name), 126 | Command::Migrate { directory, name } => protofetch 127 | .root(&directory) 128 | .try_build()? 129 | .migrate(name, directory), 130 | Command::Clean => protofetch.try_build()?.clean(), 131 | Command::ClearCache => protofetch.try_build()?.clear_cache(), 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /src/model/mod.rs: -------------------------------------------------------------------------------- 1 | use std::num::ParseIntError; 2 | use thiserror::Error; 3 | 4 | pub mod protodep; 5 | pub mod protofetch; 6 | 7 | #[derive(Error, Debug)] 8 | pub enum ParseError { 9 | #[error("IO error reading configuration toml: {0}")] 10 | IO(#[from] std::io::Error), 11 | #[error("TOML parsing error: {0}")] 12 | Toml(#[from] toml::de::Error), 13 | #[error("Parse error")] 14 | Parse(#[from] ParseIntError), 15 | #[error("Invalid protocol: {0}")] 16 | InvalidProtocol(String), 17 | #[error("Missing TOML key `{0}` while parsing")] 18 | MissingKey(String), 19 | #[error("AllowList rule is invalid: `{0}`")] 20 | ParsePolicyRuleError(String), 21 | #[error("Missing url component `{0}` in string `{1}`")] 22 | MissingUrlComponent(String, String), 23 | #[error("Unsupported lock file version {0}")] 24 | UnsupportedLockFileVersion(toml::Value), 25 | #[error("Old lock file version {0}, consider running \"protofetch update\"")] 26 | OldLockFileVersion(i64), 27 | } 28 | -------------------------------------------------------------------------------- /src/model/protodep.rs: -------------------------------------------------------------------------------- 1 | use crate::model::{ 2 | protofetch::{ 3 | Coordinate, Dependency as ProtofetchDependency, Descriptor, ModuleName, Protocol, Revision, 4 | RevisionSpecification, Rules, 5 | }, 6 | ParseError, 7 | }; 8 | use log::{debug, error}; 9 | use serde::{Deserialize, Serialize}; 10 | use std::{collections::HashMap, path::Path, str::FromStr}; 11 | use toml::Value; 12 | 13 | #[derive(PartialEq, Eq, Hash, Debug, Clone, Serialize, Deserialize, Default)] 14 | #[serde(default)] 15 | pub struct Dependency { 16 | pub target: String, 17 | pub protocol: Option, 18 | pub revision: String, 19 | pub subgroup: Option, 20 | pub branch: Option, 21 | pub path: Option, 22 | pub ignores: Vec, 23 | pub includes: Vec, 24 | } 25 | 26 | #[derive(PartialEq, Eq, Hash, Debug, Clone, Serialize, Deserialize)] 27 | pub struct ProtodepDescriptor { 28 | #[serde(rename = "proto_outdir")] 29 | pub proto_out_dir: String, 30 | pub dependencies: Vec, 31 | } 32 | 33 | impl ProtodepDescriptor { 34 | pub fn from_file(path: &Path) -> Result { 35 | debug!( 36 | "Attempting to read descriptor from protodep file {}", 37 | path.display() 38 | ); 39 | let contents = std::fs::read_to_string(path)?; 40 | 41 | let descriptor = ProtodepDescriptor::from_toml_str(&contents); 42 | if let Err(err) = &descriptor { 43 | error!("Could not build a valid descriptor from a protodep toml file due to err {err}") 44 | } 45 | descriptor 46 | } 47 | 48 | pub fn from_toml_str(data: &str) -> Result { 49 | let mut toml_value = toml::from_str::>(data)?; 50 | 51 | let proto_out_dir = toml_value 52 | .remove("proto_outdir") 53 | .ok_or_else(|| ParseError::MissingKey("proto_outdir".to_string())) 54 | .and_then(|v| v.try_into::().map_err(|e| e.into()))?; 55 | 56 | let dependencies = toml_value 57 | .get("dependencies") 58 | .and_then(|x| x.as_array()) 59 | .get_or_insert(&vec![]) 60 | .iter() 61 | .cloned() 62 | .map(|v| v.try_into::()) 63 | .collect::, _>>()?; 64 | 65 | Ok(ProtodepDescriptor { 66 | proto_out_dir, 67 | dependencies, 68 | }) 69 | } 70 | 71 | pub fn into_proto_fetch(self) -> Result { 72 | fn convert_dependency(d: Dependency) -> Result { 73 | let protocol = match &d.protocol { 74 | None => None, 75 | Some(protocol) => Some(Protocol::from_str(protocol)?), 76 | }; 77 | let coordinate = Coordinate::from_url_protocol(d.target.as_str(), protocol)?; 78 | let specification = RevisionSpecification { 79 | revision: Revision::pinned(d.revision), 80 | branch: d.branch, 81 | }; 82 | let name = ModuleName::new(coordinate.repository.clone()); 83 | Ok(ProtofetchDependency { 84 | name, 85 | coordinate, 86 | specification, 87 | rules: Rules::default(), 88 | }) 89 | } 90 | 91 | let dependencies = self 92 | .dependencies 93 | .into_iter() 94 | .map(convert_dependency) 95 | .collect::, _>>()?; 96 | 97 | Ok(Descriptor { 98 | name: ModuleName::from("generated"), 99 | description: Some("Generated from protodep file".to_string()), 100 | proto_out_dir: self.proto_out_dir.into(), 101 | dependencies, 102 | }) 103 | } 104 | } 105 | 106 | #[cfg(test)] 107 | mod tests { 108 | use super::*; 109 | 110 | use pretty_assertions::assert_eq; 111 | 112 | #[test] 113 | fn load_valid_file_one_dep() { 114 | let str = r#" 115 | proto_outdir = "./proto_out" 116 | 117 | [[dependencies]] 118 | target = "github.com/opensaasstudio/plasma/protobuf" 119 | branch = "master" 120 | protocol = "ssh" 121 | revision = "1.0.0" 122 | "#; 123 | 124 | let expected = ProtodepDescriptor { 125 | proto_out_dir: "./proto_out".to_string(), 126 | dependencies: vec![Dependency { 127 | target: "github.com/opensaasstudio/plasma/protobuf".to_string(), 128 | subgroup: None, 129 | branch: Some("master".to_string()), 130 | revision: "1.0.0".to_string(), 131 | path: None, 132 | ignores: vec![], 133 | includes: vec![], 134 | protocol: Some("ssh".to_string()), 135 | }], 136 | }; 137 | 138 | assert_eq!(ProtodepDescriptor::from_toml_str(str).unwrap(), expected); 139 | } 140 | 141 | #[test] 142 | fn load_valid_file_multiple_dep() { 143 | let str = r#" 144 | proto_outdir = "./proto_out" 145 | 146 | [[dependencies]] 147 | target = "github.com/opensaasstudio/plasma/protobuf" 148 | branch = "master" 149 | protocol = "ssh" 150 | revision = "1.0.0" 151 | 152 | [[dependencies]] 153 | target = "github.com/opensaasstudio/plasma1/protobuf" 154 | branch = "master" 155 | protocol = "https" 156 | revision = "2.0.0" 157 | 158 | [[dependencies]] 159 | target = "github.com/opensaasstudio/plasma2/protobuf" 160 | revision = "3.0.0" 161 | "#; 162 | 163 | let expected = ProtodepDescriptor { 164 | proto_out_dir: "./proto_out".to_string(), 165 | dependencies: vec![ 166 | Dependency { 167 | target: "github.com/opensaasstudio/plasma/protobuf".to_string(), 168 | subgroup: None, 169 | branch: Some("master".to_string()), 170 | revision: "1.0.0".to_string(), 171 | path: None, 172 | ignores: vec![], 173 | includes: vec![], 174 | protocol: Some("ssh".to_string()), 175 | }, 176 | Dependency { 177 | target: "github.com/opensaasstudio/plasma1/protobuf".to_string(), 178 | subgroup: None, 179 | branch: Some("master".to_string()), 180 | revision: "2.0.0".to_string(), 181 | path: None, 182 | ignores: vec![], 183 | includes: vec![], 184 | protocol: Some("https".to_string()), 185 | }, 186 | Dependency { 187 | target: "github.com/opensaasstudio/plasma2/protobuf".to_string(), 188 | subgroup: None, 189 | branch: None, 190 | revision: "3.0.0".to_string(), 191 | path: None, 192 | ignores: vec![], 193 | includes: vec![], 194 | protocol: None, 195 | }, 196 | ], 197 | }; 198 | 199 | assert_eq!(ProtodepDescriptor::from_toml_str(str).unwrap(), expected); 200 | } 201 | 202 | #[test] 203 | fn load_valid_file_no_dep() { 204 | let str = r#"proto_outdir = "./proto_out""#; 205 | let expected = ProtodepDescriptor { 206 | proto_out_dir: "./proto_out".to_string(), 207 | dependencies: vec![], 208 | }; 209 | 210 | assert_eq!(ProtodepDescriptor::from_toml_str(str).unwrap(), expected); 211 | } 212 | 213 | #[test] 214 | fn migrate_protodep_to_protofetch_file() { 215 | let protodep_toml = r#" 216 | proto_outdir = "./proto_out" 217 | 218 | [[dependencies]] 219 | target = "github.com/opensaasstudio/plasma" 220 | branch = "master" 221 | protocol = "ssh" 222 | revision = "1.5.0" 223 | "#; 224 | 225 | let protofetch_toml = r#" 226 | name = "generated" 227 | description = "Generated from protodep file" 228 | proto_out_dir = "./proto_out" 229 | [plasma] 230 | url="github.com/opensaasstudio/plasma" 231 | protocol = "ssh" 232 | branch = "master" 233 | revision = "1.5.0" 234 | "#; 235 | let descriptor = ProtodepDescriptor::from_toml_str(protodep_toml) 236 | .unwrap() 237 | .into_proto_fetch() 238 | .unwrap(); 239 | let toml = toml::to_string(&descriptor.into_toml()).unwrap(); 240 | 241 | let expected = Descriptor::from_toml_str(protofetch_toml).unwrap(); 242 | let result = Descriptor::from_toml_str(&toml).unwrap(); 243 | assert_eq!(result, expected); 244 | } 245 | } 246 | -------------------------------------------------------------------------------- /src/model/protofetch/lock.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt::Display, path::Path}; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::model::ParseError; 6 | 7 | use super::{Coordinate, ModuleName, Protocol, RevisionSpecification}; 8 | 9 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] 10 | pub struct LockFile { 11 | pub dependencies: Vec, 12 | } 13 | 14 | const VERSION: i64 = 2; 15 | 16 | #[derive(Debug, Clone, Serialize, PartialEq, Eq)] 17 | struct VersionedLockFile<'a> { 18 | pub version: i64, 19 | #[serde(flatten)] 20 | pub content: &'a LockFile, 21 | } 22 | 23 | impl LockFile { 24 | pub fn from_file(file: &Path) -> Result { 25 | LockFile::from_str(&std::fs::read_to_string(file)?) 26 | } 27 | 28 | pub fn from_str(s: &str) -> Result { 29 | let mut table = toml::from_str::(s)?; 30 | match table.remove("version") { 31 | Some(toml::Value::Integer(VERSION)) => table.try_into::().map_err(Into::into), 32 | Some(other) => Err(ParseError::UnsupportedLockFileVersion(other)), 33 | None => Err(ParseError::OldLockFileVersion(1)), 34 | } 35 | } 36 | 37 | pub fn to_string(&self) -> Result { 38 | toml::to_string_pretty(&VersionedLockFile { 39 | version: VERSION, 40 | content: self, 41 | }) 42 | } 43 | } 44 | 45 | #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] 46 | pub struct LockedDependency { 47 | pub name: ModuleName, 48 | #[serde(flatten)] 49 | pub coordinate: LockedCoordinate, 50 | #[serde(flatten)] 51 | pub specification: RevisionSpecification, 52 | pub commit_hash: String, 53 | } 54 | 55 | #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] 56 | pub struct LockedCoordinate { 57 | pub url: String, 58 | pub protocol: Option, 59 | } 60 | 61 | impl Display for LockedCoordinate { 62 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 63 | write!(f, "{}", &self.url)?; 64 | if let Some(protocol) = &self.protocol { 65 | write!(f, " ({})", protocol)?; 66 | } 67 | Ok(()) 68 | } 69 | } 70 | 71 | impl From<&Coordinate> for LockedCoordinate { 72 | fn from(value: &Coordinate) -> Self { 73 | LockedCoordinate { 74 | url: format!( 75 | "{}/{}/{}", 76 | value.forge, value.organization, value.repository 77 | ), 78 | protocol: value.protocol, 79 | } 80 | } 81 | } 82 | 83 | #[cfg(test)] 84 | mod tests { 85 | use toml::toml; 86 | 87 | use crate::model::protofetch::{Protocol, Revision}; 88 | 89 | use super::*; 90 | 91 | use pretty_assertions::assert_eq; 92 | 93 | #[test] 94 | fn load_save_lock_file() { 95 | let text = toml::to_string_pretty(&toml! { 96 | version = 2 97 | 98 | [[dependencies]] 99 | name = "dep1" 100 | url = "example.com/org/dep1" 101 | protocol = "https" 102 | revision = "1.0.0" 103 | branch = "main" 104 | commit_hash = "hash1" 105 | 106 | [[dependencies]] 107 | name = "dep2" 108 | url = "example.com/org/dep2" 109 | commit_hash = "hash2" 110 | }) 111 | .unwrap(); 112 | let data = LockFile { 113 | dependencies: vec![ 114 | LockedDependency { 115 | name: ModuleName::new("dep1".to_string()), 116 | commit_hash: "hash1".to_string(), 117 | coordinate: LockedCoordinate { 118 | url: "example.com/org/dep1".to_owned(), 119 | protocol: Some(Protocol::Https), 120 | }, 121 | specification: RevisionSpecification { 122 | revision: Revision::pinned("1.0.0"), 123 | branch: Some("main".to_owned()), 124 | }, 125 | }, 126 | LockedDependency { 127 | name: ModuleName::new("dep2".to_string()), 128 | commit_hash: "hash2".to_string(), 129 | coordinate: LockedCoordinate { 130 | url: "example.com/org/dep2".to_owned(), 131 | protocol: None, 132 | }, 133 | specification: RevisionSpecification::default(), 134 | }, 135 | ], 136 | }; 137 | let parsed = LockFile::from_str(&text).unwrap(); 138 | let formatted = data.to_string().unwrap(); 139 | assert_eq!(parsed, data); 140 | assert_eq!(formatted, text); 141 | } 142 | 143 | #[test] 144 | fn load_lock_file_v1() { 145 | let text = toml::to_string_pretty(&toml! { 146 | module_name = "foo" 147 | }) 148 | .unwrap(); 149 | LockFile::from_str(&text).expect_err("should not parse v1 lock file"); 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /src/model/protofetch/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod lock; 2 | pub mod resolved; 3 | 4 | use regex_lite::Regex; 5 | use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; 6 | use std::{ 7 | collections::HashMap, 8 | fmt::{Debug, Display, Write}, 9 | path::{Path, PathBuf}, 10 | str::FromStr, 11 | }; 12 | 13 | use crate::model::ParseError; 14 | use log::{debug, error}; 15 | use std::{collections::BTreeSet, hash::Hash}; 16 | use toml::{map::Map, Value}; 17 | 18 | #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] 19 | pub struct Coordinate { 20 | pub forge: String, 21 | pub organization: String, 22 | pub repository: String, 23 | pub protocol: Option, 24 | } 25 | 26 | impl Coordinate { 27 | pub fn from_url_protocol( 28 | url: &str, 29 | protocol: Option, 30 | ) -> Result { 31 | let re: Regex = 32 | Regex::new(r"^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/?$") 33 | .unwrap(); 34 | let url_parse_results = re.captures(url); 35 | let url_parse_results = url_parse_results.as_ref(); 36 | 37 | Ok(Coordinate { 38 | forge: url_parse_results 39 | .and_then(|c| c.name("forge")) 40 | .map(|s| s.as_str().to_string()) 41 | .ok_or_else(|| { 42 | ParseError::MissingUrlComponent("forge".to_string(), url.to_string()) 43 | })?, 44 | organization: url_parse_results 45 | .and_then(|c| c.name("organization")) 46 | .map(|s| s.as_str().to_string()) 47 | .ok_or_else(|| { 48 | ParseError::MissingUrlComponent("organization".to_string(), url.to_string()) 49 | })?, 50 | repository: url_parse_results 51 | .and_then(|c| c.name("repository")) 52 | .map(|s| s.as_str().to_string()) 53 | .ok_or_else(|| { 54 | ParseError::MissingUrlComponent("repository".to_string(), url.to_string()) 55 | })?, 56 | protocol, 57 | }) 58 | } 59 | 60 | #[cfg(test)] 61 | pub fn from_url(url: &str) -> Result { 62 | Self::from_url_protocol(url, None) 63 | } 64 | 65 | pub fn to_path(&self) -> PathBuf { 66 | let mut result = PathBuf::new(); 67 | 68 | result.push(self.forge.clone()); 69 | result.push(self.organization.clone()); 70 | result.push(self.repository.clone()); 71 | 72 | result 73 | } 74 | 75 | pub fn to_git_url(&self, default_protocol: Protocol) -> String { 76 | match self.protocol.unwrap_or(default_protocol) { 77 | Protocol::Https => format!( 78 | "https://{}/{}/{}", 79 | self.forge, self.organization, self.repository 80 | ), 81 | Protocol::Ssh => format!( 82 | "ssh://git@{}/{}/{}.git", 83 | self.forge, self.organization, self.repository 84 | ), 85 | } 86 | } 87 | } 88 | 89 | impl Display for Coordinate { 90 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 91 | write!( 92 | f, 93 | "{}/{}/{}", 94 | self.forge, self.organization, self.repository 95 | ) 96 | } 97 | } 98 | 99 | #[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, Serialize, Deserialize, Ord, PartialOrd)] 100 | pub enum Protocol { 101 | #[serde(rename = "https")] 102 | Https, 103 | #[serde(rename = "ssh")] 104 | Ssh, 105 | } 106 | 107 | impl FromStr for Protocol { 108 | type Err = ParseError; 109 | 110 | fn from_str(value: &str) -> Result { 111 | let value = value.to_ascii_lowercase(); 112 | match value.as_str() { 113 | "https" => Ok(Protocol::Https), 114 | "ssh" => Ok(Protocol::Ssh), 115 | _ => Err(ParseError::InvalidProtocol(value)), 116 | } 117 | } 118 | } 119 | 120 | impl Display for Protocol { 121 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 122 | match self { 123 | Protocol::Https => f.write_str("https"), 124 | Protocol::Ssh => f.write_str("ssh"), 125 | } 126 | } 127 | } 128 | 129 | #[derive(Debug, Clone, Default, PartialEq, Eq, Ord, PartialOrd)] 130 | pub enum Revision { 131 | Pinned { 132 | revision: String, 133 | }, 134 | #[default] 135 | Arbitrary, 136 | } 137 | 138 | impl Revision { 139 | pub fn pinned(revision: impl Into) -> Revision { 140 | Revision::Pinned { 141 | revision: revision.into(), 142 | } 143 | } 144 | 145 | fn is_arbitrary(&self) -> bool { 146 | self == &Self::Arbitrary 147 | } 148 | } 149 | 150 | impl Display for Revision { 151 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 152 | match self { 153 | Revision::Pinned { revision } => f.write_str(revision), 154 | Revision::Arbitrary => f.write_char('*'), 155 | } 156 | } 157 | } 158 | 159 | impl Serialize for Revision { 160 | fn serialize(&self, serializer: S) -> Result 161 | where 162 | S: Serializer, 163 | { 164 | match self { 165 | Revision::Pinned { revision } => serializer.serialize_str(revision), 166 | Revision::Arbitrary => serializer.serialize_unit(), 167 | } 168 | } 169 | } 170 | 171 | impl<'de> Deserialize<'de> for Revision { 172 | fn deserialize(deserializer: D) -> Result 173 | where 174 | D: Deserializer<'de>, 175 | { 176 | struct RevisionVisitor; 177 | 178 | impl Visitor<'_> for RevisionVisitor { 179 | type Value = Revision; 180 | 181 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 182 | formatter.write_str("a string") 183 | } 184 | 185 | fn visit_unit(self) -> Result 186 | where 187 | E: serde::de::Error, 188 | { 189 | Ok(Revision::Arbitrary) 190 | } 191 | 192 | fn visit_str(self, v: &str) -> Result 193 | where 194 | E: serde::de::Error, 195 | { 196 | Ok(Revision::pinned(v)) 197 | } 198 | 199 | fn visit_string(self, v: String) -> Result 200 | where 201 | E: serde::de::Error, 202 | { 203 | Ok(Revision::pinned(v)) 204 | } 205 | } 206 | 207 | deserializer.deserialize_any(RevisionVisitor) 208 | } 209 | } 210 | 211 | #[derive(Debug, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] 212 | pub struct RevisionSpecification { 213 | #[serde(skip_serializing_if = "Revision::is_arbitrary", default)] 214 | pub revision: Revision, 215 | #[serde(skip_serializing_if = "Option::is_none", default)] 216 | pub branch: Option, 217 | } 218 | 219 | impl Display for RevisionSpecification { 220 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 221 | match self { 222 | RevisionSpecification { 223 | revision, 224 | branch: None, 225 | } => write!(f, "{}", revision), 226 | RevisionSpecification { 227 | revision, 228 | branch: Some(branch), 229 | } => write!(f, "{}@{}", branch, revision), 230 | } 231 | } 232 | } 233 | 234 | #[derive(Default, Clone, Debug, Ord, PartialOrd, PartialEq, Eq, Hash)] 235 | pub struct Rules { 236 | pub prune: bool, 237 | pub transitive: bool, 238 | pub content_roots: BTreeSet, 239 | pub allow_policies: AllowPolicies, 240 | pub deny_policies: DenyPolicies, 241 | } 242 | 243 | /// A content root path for a repository. 244 | #[derive(Ord, PartialOrd, PartialEq, Eq, Hash, Debug, Clone)] 245 | pub struct ContentRoot { 246 | pub value: PathBuf, 247 | } 248 | 249 | impl ContentRoot { 250 | pub fn from_string(s: &str) -> ContentRoot { 251 | let path = PathBuf::from(s); 252 | let path = path.strip_prefix("/").unwrap_or(&path).to_path_buf(); 253 | ContentRoot { value: path } 254 | } 255 | } 256 | 257 | #[derive(Default, Ord, PartialOrd, PartialEq, Eq, Hash, Debug, Clone, Serialize, Deserialize)] 258 | pub struct AllowPolicies { 259 | policies: BTreeSet, 260 | } 261 | 262 | impl AllowPolicies { 263 | pub fn new(policies: BTreeSet) -> Self { 264 | AllowPolicies { policies } 265 | } 266 | 267 | pub fn should_allow_file(allow_policies: &Self, file: &Path) -> bool { 268 | if allow_policies.policies.is_empty() { 269 | true 270 | } else { 271 | !Self::filter(allow_policies, &vec![file.to_path_buf()]).is_empty() 272 | } 273 | } 274 | 275 | pub fn filter(allow_policies: &Self, paths: &Vec) -> Vec { 276 | FilePolicy::apply_file_policies(&allow_policies.policies, paths) 277 | } 278 | } 279 | 280 | #[derive(Ord, PartialOrd, PartialEq, Eq, Hash, Debug, Clone, Serialize, Deserialize)] 281 | pub struct DenyPolicies { 282 | policies: BTreeSet, 283 | } 284 | 285 | impl DenyPolicies { 286 | pub fn new(policies: BTreeSet) -> Self { 287 | DenyPolicies { policies } 288 | } 289 | 290 | pub fn deny_files(deny_policies: &Self, files: &Vec) -> Vec { 291 | if deny_policies.policies.is_empty() { 292 | files.clone() 293 | } else { 294 | let filtered = FilePolicy::apply_file_policies(&deny_policies.policies, files); 295 | files 296 | .iter() 297 | .filter(|f| !filtered.contains(f)) 298 | .cloned() 299 | .collect() 300 | } 301 | } 302 | 303 | pub fn should_deny_file(deny_policies: &Self, file: &Path) -> bool { 304 | if deny_policies.policies.is_empty() { 305 | false 306 | } else { 307 | Self::deny_files(deny_policies, &vec![file.to_path_buf()]).is_empty() 308 | } 309 | } 310 | } 311 | 312 | impl Default for DenyPolicies { 313 | fn default() -> Self { 314 | DenyPolicies::new(BTreeSet::new()) 315 | } 316 | } 317 | 318 | #[derive(Ord, PartialOrd, PartialEq, Eq, Hash, Debug, Clone, Serialize, Deserialize)] 319 | /// Describes a policy to filter files or directories based on a policy kind and a path. 320 | /// The field kind is necessary due to a limitation in toml serialization. 321 | pub struct FilePolicy { 322 | pub kind: PolicyKind, 323 | pub path: PathBuf, 324 | } 325 | 326 | impl FilePolicy { 327 | pub fn new(kind: PolicyKind, path: PathBuf) -> Self { 328 | Self { kind, path } 329 | } 330 | 331 | pub fn try_from_str(s: &str) -> Result { 332 | if s.starts_with("*/") && s.ends_with("/*") { 333 | Ok(FilePolicy { 334 | kind: PolicyKind::SubPath, 335 | path: PathBuf::from( 336 | s.strip_prefix('*') 337 | .unwrap() 338 | .strip_suffix("/*") 339 | .unwrap() 340 | .to_string(), 341 | ), 342 | }) 343 | } else if s.ends_with("/*") { 344 | let path = PathBuf::from(s.strip_suffix("/*").unwrap()); 345 | let path = Self::add_leading_slash(&path); 346 | Ok(FilePolicy::new(PolicyKind::Prefix, path)) 347 | } else if s.ends_with(".proto") { 348 | let path = Self::add_leading_slash(&PathBuf::from(s)); 349 | Ok(FilePolicy::new(PolicyKind::File, path)) 350 | } else { 351 | Err(ParseError::ParsePolicyRuleError(s.to_string())) 352 | } 353 | } 354 | 355 | fn add_leading_slash(p: &Path) -> PathBuf { 356 | if !p.starts_with("/") { 357 | PathBuf::from(format!("/{}", p.to_string_lossy())) 358 | } else { 359 | p.to_path_buf() 360 | } 361 | } 362 | 363 | pub fn apply_file_policies( 364 | policies: &BTreeSet, 365 | paths: &Vec, 366 | ) -> Vec { 367 | if policies.is_empty() { 368 | return paths.clone(); 369 | } 370 | let mut result = Vec::new(); 371 | for path in paths { 372 | let path = Self::add_leading_slash(path); 373 | for policy in policies { 374 | match policy.kind { 375 | PolicyKind::File => { 376 | if path == policy.path { 377 | result.push(path.clone()); 378 | } 379 | } 380 | PolicyKind::Prefix => { 381 | if path.starts_with(&policy.path) { 382 | result.push(path.clone()); 383 | } 384 | } 385 | PolicyKind::SubPath => { 386 | if path 387 | .to_string_lossy() 388 | .contains(&policy.path.to_string_lossy().to_string()) 389 | { 390 | result.push(path.clone()); 391 | } 392 | } 393 | } 394 | } 395 | } 396 | result 397 | } 398 | } 399 | 400 | #[derive(Ord, PartialOrd, PartialEq, Eq, Hash, Debug, Clone, Serialize, Deserialize)] 401 | pub enum PolicyKind { 402 | /// /path/to/file.proto 403 | File, 404 | /// /prefix/* 405 | Prefix, 406 | /// */subpath/* 407 | SubPath, 408 | } 409 | 410 | #[derive(Clone, Hash, Deserialize, Serialize, Debug, PartialEq, Eq, Ord, PartialOrd)] 411 | pub struct ModuleName(String); 412 | 413 | impl ModuleName { 414 | pub fn new(s: String) -> Self { 415 | ModuleName(s) 416 | } 417 | 418 | pub fn as_str(&self) -> &str { 419 | &self.0 420 | } 421 | } 422 | 423 | impl Display for ModuleName { 424 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 425 | f.write_str(&self.0) 426 | } 427 | } 428 | 429 | impl From for ModuleName { 430 | fn from(s: String) -> Self { 431 | ModuleName(s) 432 | } 433 | } 434 | 435 | impl From<&str> for ModuleName { 436 | fn from(s: &str) -> Self { 437 | ModuleName(s.to_string()) 438 | } 439 | } 440 | 441 | #[derive(Debug, PartialEq, PartialOrd, Ord, Eq, Clone)] 442 | pub struct Dependency { 443 | pub name: ModuleName, 444 | pub coordinate: Coordinate, 445 | pub specification: RevisionSpecification, 446 | pub rules: Rules, 447 | } 448 | 449 | #[derive(PartialEq, Debug, PartialOrd, Ord, Eq, Clone)] 450 | pub struct Descriptor { 451 | pub name: ModuleName, 452 | pub description: Option, 453 | pub proto_out_dir: Option, 454 | pub dependencies: Vec, 455 | } 456 | 457 | impl Descriptor { 458 | pub fn from_file(path: &Path) -> Result { 459 | debug!( 460 | "Attempting to read descriptor from protofetch file {}", 461 | path.display() 462 | ); 463 | let contents = std::fs::read_to_string(path)?; 464 | 465 | let descriptor = Descriptor::from_toml_str(&contents); 466 | if let Err(err) = &descriptor { 467 | error!( 468 | "Could not build a valid descriptor from a protofetch toml file due to err {err}" 469 | ) 470 | } 471 | descriptor 472 | } 473 | 474 | pub fn from_toml_str(data: &str) -> Result { 475 | let mut toml_value = toml::from_str::>(data)?; 476 | 477 | let name = toml_value 478 | .remove("name") 479 | .ok_or_else(|| ParseError::MissingKey("name".to_string())) 480 | .and_then(|v| v.try_into::().map_err(|e| e.into()))?; 481 | 482 | let description = toml_value 483 | .remove("description") 484 | .map(|v| v.try_into::()) 485 | .map_or(Ok(None), |v| v.map(Some))?; 486 | 487 | let proto_out_dir = toml_value 488 | .remove("proto_out_dir") 489 | .map(|v| v.try_into::()) 490 | .map_or(Ok(None), |v| v.map(Some))?; 491 | 492 | let dependencies = toml_value 493 | .into_iter() 494 | .map(|(k, v)| parse_dependency(k, &v)) 495 | .collect::, _>>()?; 496 | 497 | Ok(Descriptor { 498 | name, 499 | description, 500 | proto_out_dir, 501 | dependencies, 502 | }) 503 | } 504 | 505 | pub fn into_toml(self) -> Value { 506 | let mut description = Map::new(); 507 | description.insert("name".to_string(), Value::String(self.name.to_string())); 508 | if let Some(d) = self.description { 509 | description.insert("description".to_string(), Value::String(d)); 510 | } 511 | if let Some(proto_out) = self.proto_out_dir { 512 | description.insert("proto_out_dir".to_string(), Value::String(proto_out)); 513 | } 514 | 515 | for d in self.dependencies { 516 | let mut dependency = Map::new(); 517 | dependency.insert("url".to_string(), Value::String(d.coordinate.to_string())); 518 | if let Some(protocol) = d.coordinate.protocol { 519 | dependency.insert("protocol".to_string(), Value::String(protocol.to_string())); 520 | } 521 | if let Revision::Pinned { revision } = d.specification.revision { 522 | dependency.insert("revision".to_owned(), Value::String(revision)); 523 | } 524 | if let Some(branch) = d.specification.branch { 525 | dependency.insert("branch".to_owned(), Value::String(branch)); 526 | } 527 | description.insert(d.name.to_string(), Value::Table(dependency)); 528 | } 529 | Value::Table(description) 530 | } 531 | } 532 | 533 | fn parse_dependency(name: String, value: &toml::Value) -> Result { 534 | let protocol = match value.get("protocol") { 535 | None => None, 536 | Some(toml) => Some(toml.clone().try_into::()?), 537 | }; 538 | 539 | let name = ModuleName::new(name); 540 | 541 | let branch = value 542 | .get("branch") 543 | .map(|v| v.clone().try_into::()) 544 | .map_or(Ok(None), |v| v.map(Some))?; 545 | 546 | let coordinate = value 547 | .get("url") 548 | .ok_or_else(|| ParseError::MissingKey("url".to_string())) 549 | .and_then(|x| x.clone().try_into::().map_err(|e| e.into())) 550 | .and_then(|url| Coordinate::from_url_protocol(&url, protocol))?; 551 | 552 | let revision = match value.get("revision") { 553 | Some(revision) => parse_revision(revision)?, 554 | None => Revision::Arbitrary, 555 | }; 556 | 557 | let specification = RevisionSpecification { revision, branch }; 558 | 559 | let prune = value 560 | .get("prune") 561 | .map(|v| v.clone().try_into::()) 562 | .map_or(Ok(None), |v| v.map(Some))? 563 | .unwrap_or(false); 564 | 565 | let content_roots = value 566 | .get("content_roots") 567 | .map(|v| v.clone().try_into::>()) 568 | .map_or(Ok(None), |v| v.map(Some))? 569 | .unwrap_or_default() 570 | .into_iter() 571 | .map(|str| ContentRoot::from_string(&str)) 572 | .collect::>(); 573 | 574 | let transitive = value 575 | .get("transitive") 576 | .map(|v| v.clone().try_into::()) 577 | .map_or(Ok(None), |v| v.map(Some))? 578 | .unwrap_or(false); 579 | 580 | let allow_policies = AllowPolicies::new(parse_policies(value, "allow_policies")?); 581 | let deny_policies = DenyPolicies::new(parse_policies(value, "deny_policies")?); 582 | 583 | let rules = Rules { 584 | prune, 585 | transitive, 586 | content_roots, 587 | allow_policies, 588 | deny_policies, 589 | }; 590 | 591 | Ok(Dependency { 592 | name, 593 | coordinate, 594 | specification, 595 | rules, 596 | }) 597 | } 598 | 599 | fn parse_policies(toml: &Value, source: &str) -> Result, ParseError> { 600 | toml.get(source) 601 | .map(|v| v.clone().try_into::>()) 602 | .map_or(Ok(None), |v| v.map(Some))? 603 | .unwrap_or_default() 604 | .into_iter() 605 | .map(|s| FilePolicy::try_from_str(&s)) 606 | .collect::, _>>() 607 | } 608 | 609 | fn parse_revision(value: &toml::Value) -> Result { 610 | let revstring = value.clone().try_into::()?; 611 | 612 | Ok(Revision::Pinned { 613 | revision: revstring, 614 | }) 615 | } 616 | 617 | #[cfg(test)] 618 | mod tests { 619 | use std::str::FromStr; 620 | 621 | use super::*; 622 | use pretty_assertions::assert_eq; 623 | 624 | #[test] 625 | fn load_valid_file_one_dep() { 626 | let str = r#" 627 | name = "test_file" 628 | description = "this is a description" 629 | proto_out_dir= "./path/to/proto_out" 630 | [dependency1] 631 | protocol = "https" 632 | url = "github.com/org/repo" 633 | revision = "1.0.0" 634 | "#; 635 | let expected = Descriptor { 636 | name: ModuleName::from("test_file"), 637 | description: Some("this is a description".to_string()), 638 | proto_out_dir: Some("./path/to/proto_out".to_string()), 639 | dependencies: vec![Dependency { 640 | name: ModuleName::new("dependency1".to_string()), 641 | coordinate: Coordinate { 642 | forge: "github.com".to_string(), 643 | organization: "org".to_string(), 644 | repository: "repo".to_string(), 645 | protocol: Some(Protocol::Https), 646 | }, 647 | specification: RevisionSpecification { 648 | revision: Revision::pinned("1.0.0"), 649 | branch: None, 650 | }, 651 | rules: Default::default(), 652 | }], 653 | }; 654 | assert_eq!(Descriptor::from_toml_str(str).unwrap(), expected); 655 | } 656 | 657 | #[test] 658 | fn load_valid_file_no_revision() { 659 | let str = r#" 660 | name = "test_file" 661 | description = "this is a description" 662 | proto_out_dir= "./path/to/proto_out" 663 | [dependency1] 664 | protocol = "https" 665 | url = "github.com/org/repo" 666 | "#; 667 | let expected = Descriptor { 668 | name: ModuleName::from("test_file"), 669 | description: Some("this is a description".to_string()), 670 | proto_out_dir: Some("./path/to/proto_out".to_string()), 671 | dependencies: vec![Dependency { 672 | name: ModuleName::new("dependency1".to_string()), 673 | coordinate: Coordinate { 674 | forge: "github.com".to_string(), 675 | organization: "org".to_string(), 676 | repository: "repo".to_string(), 677 | protocol: Some(Protocol::Https), 678 | }, 679 | specification: RevisionSpecification { 680 | revision: Revision::Arbitrary, 681 | branch: None, 682 | }, 683 | rules: Default::default(), 684 | }], 685 | }; 686 | assert_eq!(Descriptor::from_toml_str(str).unwrap(), expected); 687 | assert_eq!(expected.into_toml(), toml::Value::from_str(str).unwrap()) 688 | } 689 | 690 | #[test] 691 | fn load_valid_file_one_dep_with_rules() { 692 | let str = r#" 693 | name = "test_file" 694 | description = "this is a description" 695 | proto_out_dir= "./path/to/proto_out" 696 | [dependency1] 697 | protocol = "https" 698 | url = "github.com/org/repo" 699 | revision = "1.0.0" 700 | prune = true 701 | content_roots = ["src"] 702 | allow_policies = ["/foo/proto/file.proto", "/foo/other/*", "*/some/path/*"] 703 | "#; 704 | let expected = Descriptor { 705 | name: ModuleName::from("test_file"), 706 | description: Some("this is a description".to_string()), 707 | proto_out_dir: Some("./path/to/proto_out".to_string()), 708 | dependencies: vec![Dependency { 709 | name: ModuleName::new("dependency1".to_string()), 710 | coordinate: Coordinate { 711 | forge: "github.com".to_string(), 712 | organization: "org".to_string(), 713 | repository: "repo".to_string(), 714 | protocol: Some(Protocol::Https), 715 | }, 716 | specification: RevisionSpecification { 717 | revision: Revision::pinned("1.0.0"), 718 | branch: None, 719 | }, 720 | rules: Rules { 721 | prune: true, 722 | content_roots: BTreeSet::from([ContentRoot::from_string("src")]), 723 | transitive: false, 724 | allow_policies: AllowPolicies::new(BTreeSet::from([ 725 | FilePolicy::new(PolicyKind::File, PathBuf::from("/foo/proto/file.proto")), 726 | FilePolicy::new(PolicyKind::Prefix, PathBuf::from("/foo/other")), 727 | FilePolicy::new(PolicyKind::SubPath, PathBuf::from("/some/path")), 728 | ])), 729 | deny_policies: DenyPolicies::default(), 730 | }, 731 | }], 732 | }; 733 | assert_eq!(Descriptor::from_toml_str(str).unwrap(), expected); 734 | } 735 | 736 | #[test] 737 | #[should_panic] 738 | fn load_invalid_file_invalid_rule() { 739 | let str = r#" 740 | name = "test_file" 741 | description = "this is a description" 742 | proto_out_dir= "./path/to/proto_out" 743 | [dependency1] 744 | protocol = "https" 745 | url = "github.com/org/repo" 746 | revision = "1.0.0" 747 | prune = true 748 | content_roots = ["src"] 749 | allow_policies = ["/foo/proto/file.java"] 750 | "#; 751 | Descriptor::from_toml_str(str).unwrap(); 752 | } 753 | 754 | #[test] 755 | fn load_valid_file_multiple_dep() { 756 | let str = r#" 757 | name = "test_file" 758 | proto_out_dir= "./path/to/proto_out" 759 | 760 | [dependency1] 761 | protocol = "https" 762 | url = "github.com/org/repo" 763 | revision = "1.0.0" 764 | [dependency2] 765 | protocol = "https" 766 | url = "github.com/org/repo" 767 | revision = "2.0.0" 768 | [dependency3] 769 | protocol = "https" 770 | url = "github.com/org/repo" 771 | revision = "3.0.0" 772 | "#; 773 | let expected = Descriptor { 774 | name: ModuleName::from("test_file"), 775 | description: None, 776 | proto_out_dir: Some("./path/to/proto_out".to_string()), 777 | dependencies: vec![ 778 | Dependency { 779 | name: ModuleName::new("dependency1".to_string()), 780 | coordinate: Coordinate { 781 | forge: "github.com".to_string(), 782 | organization: "org".to_string(), 783 | repository: "repo".to_string(), 784 | protocol: Some(Protocol::Https), 785 | }, 786 | specification: RevisionSpecification { 787 | revision: Revision::pinned("1.0.0"), 788 | branch: None, 789 | }, 790 | rules: Default::default(), 791 | }, 792 | Dependency { 793 | name: ModuleName::new("dependency2".to_string()), 794 | coordinate: Coordinate { 795 | forge: "github.com".to_string(), 796 | organization: "org".to_string(), 797 | repository: "repo".to_string(), 798 | protocol: Some(Protocol::Https), 799 | }, 800 | specification: RevisionSpecification { 801 | revision: Revision::pinned("2.0.0"), 802 | branch: None, 803 | }, 804 | rules: Default::default(), 805 | }, 806 | Dependency { 807 | name: ModuleName::new("dependency3".to_string()), 808 | coordinate: Coordinate { 809 | forge: "github.com".to_string(), 810 | organization: "org".to_string(), 811 | repository: "repo".to_string(), 812 | protocol: Some(Protocol::Https), 813 | }, 814 | specification: RevisionSpecification { 815 | revision: Revision::pinned("3.0.0"), 816 | branch: None, 817 | }, 818 | rules: Default::default(), 819 | }, 820 | ], 821 | }; 822 | 823 | let mut res = Descriptor::from_toml_str(str).unwrap().dependencies; 824 | res.sort(); 825 | 826 | let mut exp = expected.dependencies; 827 | exp.sort(); 828 | 829 | assert_eq!(res, exp); 830 | } 831 | 832 | #[test] 833 | fn load_file_no_deps() { 834 | let str = r#" 835 | name = "test_file" 836 | proto_out_dir = "./path/to/proto_out" 837 | "#; 838 | let expected = Descriptor { 839 | name: ModuleName::from("test_file"), 840 | description: None, 841 | proto_out_dir: Some("./path/to/proto_out".to_string()), 842 | dependencies: vec![], 843 | }; 844 | assert_eq!(Descriptor::from_toml_str(str).unwrap(), expected); 845 | assert_eq!(expected.into_toml(), toml::Value::from_str(str).unwrap()) 846 | } 847 | 848 | #[test] 849 | fn load_invalid_protocol() { 850 | let str = r#" 851 | name = "test_file" 852 | proto_out_dir = "./path/to/proto_out" 853 | [dependency1] 854 | protocol = "ftp" 855 | url = "github.com/org/repo" 856 | revision = "1.0.0" 857 | "#; 858 | assert!(Descriptor::from_toml_str(str).is_err()); 859 | } 860 | 861 | #[test] 862 | fn load_invalid_url() { 863 | let str = r#" 864 | name = "test_file" 865 | proto_out_dir = "./path/to/proto_out" 866 | [dependency1] 867 | protocol = "ftp" 868 | url = "github.com/org" 869 | revision = "1.0.0" 870 | "#; 871 | assert!(Descriptor::from_toml_str(str).is_err()); 872 | } 873 | 874 | #[test] 875 | fn build_coordinate() { 876 | let str = "github.com/coralogix/cx-api-users"; 877 | assert_eq!( 878 | Coordinate::from_url(str).unwrap(), 879 | Coordinate { 880 | forge: "github.com".to_owned(), 881 | organization: "coralogix".to_owned(), 882 | repository: "cx-api-users".to_owned(), 883 | protocol: None, 884 | } 885 | ); 886 | } 887 | 888 | #[test] 889 | fn build_coordinate_slash() { 890 | let str = "github.com/coralogix/cx-api-users/"; 891 | assert_eq!( 892 | Coordinate::from_url(str).unwrap(), 893 | Coordinate { 894 | forge: "github.com".to_owned(), 895 | organization: "coralogix".to_owned(), 896 | repository: "cx-api-users".to_owned(), 897 | protocol: None, 898 | } 899 | ); 900 | } 901 | 902 | #[test] 903 | fn test_allow_policies_rule_filter() { 904 | let rules = AllowPolicies::new(BTreeSet::from([ 905 | FilePolicy::try_from_str("/foo/proto/file.proto").unwrap(), 906 | FilePolicy::try_from_str("/foo/other/*").unwrap(), 907 | FilePolicy::try_from_str("*/path/*").unwrap(), 908 | ])); 909 | 910 | let path = vec![ 911 | PathBuf::from("/foo/proto/file.proto"), 912 | PathBuf::from("/foo/other/file1.proto"), 913 | PathBuf::from("/some/path/file.proto"), 914 | ]; 915 | 916 | let res = AllowPolicies::filter(&rules, &path); 917 | assert_eq!(res.len(), 3); 918 | } 919 | 920 | #[test] 921 | fn test_allow_policies_rule_filter_edge_case_slash_path() { 922 | let rules = AllowPolicies::new(BTreeSet::from([ 923 | FilePolicy::try_from_str("/foo/proto/file.proto").unwrap(), 924 | FilePolicy::try_from_str("/foo/other/*").unwrap(), 925 | FilePolicy::try_from_str("*/path/*").unwrap(), 926 | ])); 927 | 928 | let path = vec![ 929 | PathBuf::from("foo/proto/file.proto"), 930 | PathBuf::from("foo/other/file2.proto"), 931 | ]; 932 | 933 | let res = AllowPolicies::filter(&rules, &path); 934 | assert_eq!(res.len(), 2); 935 | } 936 | 937 | #[test] 938 | fn test_allow_policies_rule_filter_edge_case_slash_rule() { 939 | let allow_policies = AllowPolicies::new(BTreeSet::from([ 940 | FilePolicy::try_from_str("foo/proto/file.proto").unwrap(), 941 | FilePolicy::try_from_str("foo/other/*").unwrap(), 942 | FilePolicy::try_from_str("*/path/*").unwrap(), 943 | ])); 944 | 945 | let files = vec![ 946 | PathBuf::from("/foo/proto/file.proto"), 947 | PathBuf::from("/foo/other/file2.proto"), 948 | PathBuf::from("/path/dep/file3.proto"), 949 | ]; 950 | 951 | let res = AllowPolicies::filter(&allow_policies, &files); 952 | assert_eq!(res.len(), 3); 953 | } 954 | 955 | #[test] 956 | fn test_deny_policies_rule_filter() { 957 | let rules = DenyPolicies::new(BTreeSet::from([ 958 | FilePolicy::try_from_str("/foo/proto/file.proto").unwrap(), 959 | FilePolicy::try_from_str("/foo/other/*").unwrap(), 960 | FilePolicy::try_from_str("*/path/*").unwrap(), 961 | ])); 962 | 963 | let files = vec![ 964 | PathBuf::from("/foo/proto/file.proto"), 965 | PathBuf::from("/foo/other/file1.proto"), 966 | PathBuf::from("/some/path/file.proto"), 967 | ]; 968 | 969 | let res = DenyPolicies::deny_files(&rules, &files); 970 | assert_eq!(res.len(), 0); 971 | } 972 | 973 | #[test] 974 | fn test_deny_policies_rule_filter_file() { 975 | let rules = DenyPolicies::new(BTreeSet::from([ 976 | FilePolicy::try_from_str("/foo/proto/file.proto").unwrap(), 977 | FilePolicy::try_from_str("/foo/other/*").unwrap(), 978 | FilePolicy::try_from_str("*/path/*").unwrap(), 979 | ])); 980 | 981 | let file = PathBuf::from("/foo/proto/file.proto"); 982 | 983 | let res = DenyPolicies::should_deny_file(&rules, &file); 984 | assert!(res); 985 | } 986 | } 987 | -------------------------------------------------------------------------------- /src/model/protofetch/resolved.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeSet; 2 | 3 | use super::{Coordinate, ModuleName, RevisionSpecification, Rules}; 4 | 5 | pub struct ResolvedModule { 6 | pub module_name: ModuleName, 7 | pub dependencies: Vec, 8 | } 9 | 10 | #[derive(Debug, Clone, Eq, PartialEq)] 11 | pub struct ResolvedDependency { 12 | pub name: ModuleName, 13 | pub commit_hash: String, 14 | pub coordinate: Coordinate, 15 | pub specification: RevisionSpecification, 16 | pub rules: Rules, 17 | pub dependencies: BTreeSet, 18 | } 19 | -------------------------------------------------------------------------------- /src/proto.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashSet, 3 | fs::File, 4 | io::{BufRead, BufReader}, 5 | path::{Path, PathBuf}, 6 | }; 7 | 8 | use log::{debug, info, trace}; 9 | use thiserror::Error; 10 | 11 | use crate::{ 12 | cache::RepositoryCache, 13 | model::protofetch::{ 14 | resolved::{ResolvedDependency, ResolvedModule}, 15 | AllowPolicies, DenyPolicies, ModuleName, 16 | }, 17 | }; 18 | 19 | #[derive(Error, Debug)] 20 | pub enum ProtoError { 21 | #[error("Bad proto path. {0}")] 22 | BadPath(String), 23 | #[error("IO error: {0}")] 24 | IO(#[from] std::io::Error), 25 | #[error(transparent)] 26 | Cache(anyhow::Error), 27 | } 28 | 29 | /// Represents a mapping for a proto file between the source repo directory and the desired target. 30 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 31 | struct ProtoFileMapping { 32 | from: PathBuf, 33 | to: PathBuf, 34 | } 35 | 36 | /// Proto file canonical representation 37 | /// * full_path: the full path to the proto file 38 | /// * package_path: the package path of the proto file 39 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 40 | struct ProtoFileCanonicalMapping { 41 | full_path: PathBuf, 42 | package_path: PathBuf, 43 | } 44 | 45 | /// proto_dir: Base path to the directory where the proto files are to be copied to 46 | /// cache_src_dir: Base path to the directory where the dependencies sources are cached 47 | /// lockfile: The lockfile that contains the dependencies to be copied 48 | pub fn copy_proto_files( 49 | cache: &impl RepositoryCache, 50 | resolved: &ResolvedModule, 51 | proto_dir: &Path, 52 | ) -> Result<(), ProtoError> { 53 | info!( 54 | "Copying proto files from {} descriptor...", 55 | resolved.module_name 56 | ); 57 | if !proto_dir.exists() { 58 | std::fs::create_dir_all(proto_dir)?; 59 | } 60 | 61 | let deps = collect_all_root_dependencies(resolved); 62 | 63 | for dep in &deps { 64 | let dep_cache_dir = cache 65 | .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) 66 | .map_err(ProtoError::Cache)?; 67 | let sources_to_copy: HashSet = if !dep.rules.prune { 68 | copy_all_proto_files_for_dep(&dep_cache_dir, dep)? 69 | } else { 70 | pruned_transitive_dependencies(cache, dep, resolved)? 71 | }; 72 | let without_denied_files = sources_to_copy 73 | .into_iter() 74 | .filter(|m| !DenyPolicies::should_deny_file(&dep.rules.deny_policies, &m.to)) 75 | .collect(); 76 | copy_proto_sources_for_dep(proto_dir, &dep_cache_dir, dep, &without_denied_files)?; 77 | } 78 | Ok(()) 79 | } 80 | 81 | /// Copy all proto files for a dependency to the proto_dir 82 | /// Takes into account content_roots and Allow list rules 83 | fn copy_all_proto_files_for_dep( 84 | dep_cache_dir: &Path, 85 | dep: &ResolvedDependency, 86 | ) -> Result, ProtoError> { 87 | let mut proto_mapping: Vec = Vec::new(); 88 | for file in dep_cache_dir.read_dir()? { 89 | let path = file?.path(); 90 | let proto_files = find_proto_files(path.as_path())?; 91 | for proto_file_source in proto_files { 92 | let proto_src = path_strip_prefix(&proto_file_source, dep_cache_dir)?; 93 | let proto_package_path = zoom_in_content_root(dep, &proto_src)?; 94 | if !AllowPolicies::should_allow_file(&dep.rules.allow_policies, &proto_package_path) { 95 | trace!( 96 | "Filtering out proto file {} based on allow_policies rules.", 97 | &proto_file_source.to_string_lossy() 98 | ); 99 | continue; 100 | } 101 | proto_mapping.push(ProtoFileMapping { 102 | from: proto_src, 103 | to: proto_package_path, 104 | }); 105 | } 106 | } 107 | Ok(proto_mapping.into_iter().collect()) 108 | } 109 | 110 | /// Returns an HashSet of ProtoFileMapping to the proto files that `dep` depends on. It recursively 111 | /// iterates all the dependencies of `dep` and its transitive dependencies based on imports 112 | /// until no new dependencies are found. 113 | fn pruned_transitive_dependencies( 114 | cache: &impl RepositoryCache, 115 | dep: &ResolvedDependency, 116 | lockfile: &ResolvedModule, 117 | ) -> Result, ProtoError> { 118 | fn process_mapping_file( 119 | cache: &impl RepositoryCache, 120 | mapping: ProtoFileCanonicalMapping, 121 | dep: &ResolvedDependency, 122 | lockfile: &ResolvedModule, 123 | visited: &mut HashSet, 124 | deps: &mut HashSet, 125 | ) -> Result<(), ProtoError> { 126 | visited.insert(mapping.package_path.clone()); 127 | let file_deps = extract_proto_dependencies_from_file(mapping.full_path.as_path())?; 128 | let mut dependencies = collect_transitive_dependencies(dep, lockfile); 129 | dependencies.push(dep.clone()); 130 | let mut new_mappings = canonical_mapping_for_proto_files(cache, &file_deps, &dependencies)?; 131 | trace!("Adding {:?}.", &new_mappings); 132 | new_mappings.push(mapping); 133 | deps.extend(new_mappings.clone()); 134 | Ok(()) 135 | } 136 | 137 | /// Recursively loop through all the file dependencies based on imports 138 | /// Looks in own repository and in transitive dependencies. 139 | fn inner_loop( 140 | cache: &impl RepositoryCache, 141 | dep: &ResolvedDependency, 142 | lockfile: &ResolvedModule, 143 | visited: &mut HashSet, 144 | found_proto_deps: &mut HashSet, 145 | ) -> Result<(), ProtoError> { 146 | let dep_dir = cache 147 | .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) 148 | .map_err(ProtoError::Cache)?; 149 | for dir in dep_dir.read_dir()? { 150 | let proto_files = find_proto_files(&dir?.path())?; 151 | let filtered_mapping = filtered_proto_files(proto_files, &dep_dir, dep, false) 152 | .into_iter() 153 | .collect(); 154 | let file_dependencies: HashSet = found_proto_deps 155 | .intersection(&filtered_mapping) 156 | .cloned() 157 | .collect(); 158 | let file_dependencies_not_visited: HashSet = 159 | file_dependencies 160 | .into_iter() 161 | .filter(|p| !visited.contains(&p.package_path)) 162 | .collect(); 163 | for mapping in file_dependencies_not_visited { 164 | process_mapping_file(cache, mapping, dep, lockfile, visited, found_proto_deps)?; 165 | inner_loop(cache, dep, lockfile, visited, found_proto_deps)?; 166 | } 167 | } 168 | Ok(()) 169 | } 170 | 171 | let mut found_proto_deps: HashSet = HashSet::new(); 172 | let mut visited: HashSet = HashSet::new(); 173 | let mut visited_dep: HashSet = HashSet::new(); 174 | debug!("Extracting proto files for {}", &dep.name); 175 | 176 | let dep_dir = cache 177 | .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) 178 | .map_err(ProtoError::Cache)?; 179 | for dir in dep_dir.read_dir()? { 180 | let proto_files = find_proto_files(&dir?.path())?; 181 | let filtered_mapping = filtered_proto_files(proto_files, &dep_dir, dep, true); 182 | trace!("Filtered size {:?}.", &filtered_mapping.len()); 183 | for mapping in filtered_mapping { 184 | process_mapping_file( 185 | cache, 186 | mapping, 187 | dep, 188 | lockfile, 189 | &mut visited, 190 | &mut found_proto_deps, 191 | )?; 192 | inner_loop(cache, dep, lockfile, &mut visited, &mut found_proto_deps)?; 193 | } 194 | } 195 | 196 | // Select proto files for the transitive dependencies of this dependency 197 | let t_deps = collect_transitive_dependencies(dep, lockfile); 198 | for t_dep in t_deps { 199 | trace!( 200 | "Extracting transitive proto dependencies from {} for dependency {} ", 201 | &t_dep.name, 202 | &dep.name 203 | ); 204 | visited_dep.insert(t_dep.name.clone()); 205 | inner_loop(cache, &t_dep, lockfile, &mut visited, &mut found_proto_deps)?; 206 | } 207 | debug!( 208 | "Found {:?} proto files for dependency {}", 209 | found_proto_deps.len(), 210 | dep.name 211 | ); 212 | Ok(found_proto_deps 213 | .into_iter() 214 | .map(|p| ProtoFileMapping { 215 | from: p.full_path, 216 | to: p.package_path, 217 | }) 218 | .collect()) 219 | } 220 | 221 | fn copy_proto_sources_for_dep( 222 | proto_dir: &Path, 223 | dep_cache_dir: &Path, 224 | dep: &ResolvedDependency, 225 | sources_to_copy: &HashSet, 226 | ) -> Result<(), ProtoError> { 227 | debug!( 228 | "Copying {:?} proto files for dependency {}", 229 | sources_to_copy.len(), 230 | dep.name 231 | ); 232 | for mapping in sources_to_copy { 233 | trace!( 234 | "Copying proto file from {} to {}", 235 | &mapping.from.to_string_lossy(), 236 | &mapping.to.to_string_lossy() 237 | ); 238 | let proto_file_source = dep_cache_dir.join(&mapping.from); 239 | let proto_file_out = proto_dir.join(&mapping.to); 240 | let prefix = proto_file_out.parent().ok_or_else(|| { 241 | ProtoError::BadPath(format!( 242 | "Bad parent dest file for {}", 243 | &proto_file_out.to_string_lossy() 244 | )) 245 | })?; 246 | std::fs::create_dir_all(prefix)?; 247 | std::fs::copy(proto_file_source, proto_file_out.as_path())?; 248 | } 249 | Ok(()) 250 | } 251 | 252 | /// Extracts the dependencies from a proto file 253 | fn extract_proto_dependencies_from_file(file: &Path) -> Result, ProtoError> { 254 | let mut dependencies = Vec::new(); 255 | let mut reader = BufReader::new(File::open(file)?); 256 | let mut line = String::new(); 257 | while reader.read_line(&mut line)? > 0 { 258 | if line.starts_with("import ") { 259 | if let Some(dependency) = line.split_whitespace().nth(1) { 260 | let dependency = dependency.to_string().replace([';', '\"'], ""); 261 | dependencies.push(PathBuf::from(dependency)); 262 | } 263 | } 264 | line.clear(); 265 | } 266 | Ok(dependencies) 267 | } 268 | 269 | /// Find proto files in a directory 270 | fn find_proto_files(dir: &Path) -> Result, ProtoError> { 271 | let mut files: Vec = Vec::new(); 272 | if dir.is_dir() { 273 | for entry in std::fs::read_dir(dir)? { 274 | let entry = entry?; 275 | let path = entry.path(); 276 | if path.is_dir() { 277 | let rec_call = find_proto_files(&path)?; 278 | files.append(&mut rec_call.clone()); 279 | } else if let Some(extension) = path.extension() { 280 | if extension == "proto" { 281 | files.push(path); 282 | } 283 | } 284 | } 285 | } 286 | Ok(files) 287 | } 288 | 289 | ///From a dep and a lockfile, returns the transitive dependencies of the dep 290 | fn collect_transitive_dependencies( 291 | dep: &ResolvedDependency, 292 | lockfile: &ResolvedModule, 293 | ) -> Vec { 294 | lockfile 295 | .dependencies 296 | .clone() 297 | .into_iter() 298 | .filter(|x| dep.dependencies.contains(&x.name) || x.rules.transitive) 299 | .collect::>() 300 | } 301 | 302 | /// Collects all root dependencies based on pruning rules and transitive dependencies 303 | /// This still has a limitation. At the moment. 304 | /// If a dependency is flagged as transitive it will only be included in transitive fetching which uses pruning. 305 | fn collect_all_root_dependencies(resolved: &ResolvedModule) -> Vec { 306 | let mut deps = Vec::new(); 307 | 308 | for dep in &resolved.dependencies { 309 | let pruned = resolved 310 | .dependencies 311 | .iter() 312 | .any(|iter_dep| iter_dep.dependencies.contains(&dep.name) && iter_dep.rules.prune); 313 | 314 | let non_pruned = resolved 315 | .dependencies 316 | .iter() 317 | .any(|iter_dep| iter_dep.dependencies.contains(&dep.name) && !iter_dep.rules.prune); 318 | 319 | if (!pruned && !dep.rules.transitive) || non_pruned { 320 | deps.push(dep.clone()); 321 | } 322 | } 323 | deps 324 | } 325 | 326 | /// This is removing the prefix which is needed to actually load file to extract protos from imports 327 | fn filtered_proto_files( 328 | proto_files: Vec, 329 | dep_dir: &Path, 330 | dep: &ResolvedDependency, 331 | should_filter: bool, 332 | ) -> Vec { 333 | proto_files 334 | .into_iter() 335 | .filter_map(|p| { 336 | let path = path_strip_prefix(&p, dep_dir).ok()?; 337 | let zoom = zoom_in_content_root(dep, &path).ok()?; 338 | if AllowPolicies::should_allow_file(&dep.rules.allow_policies, &zoom) || !should_filter 339 | { 340 | Some(ProtoFileCanonicalMapping { 341 | full_path: p, 342 | package_path: zoom, 343 | }) 344 | } else { 345 | None 346 | } 347 | }) 348 | .collect() 349 | } 350 | 351 | /// Takes a slice of proto files, cache source directory and a slice of dependencies associated with these files 352 | /// and builds the full proto file paths from the package path returning a ProtoFileCanonicalMapping. 353 | /// This is used to be able to later on copy the files from the source directory to the user defined output directory. 354 | fn canonical_mapping_for_proto_files( 355 | cache: &impl RepositoryCache, 356 | proto_files: &[PathBuf], 357 | deps: &[ResolvedDependency], 358 | ) -> Result, ProtoError> { 359 | let r: Result, ProtoError> = proto_files 360 | .iter() 361 | .map(|p| { 362 | let zoom_out = zoom_out_content_root(cache, deps, p)?; 363 | Ok(ProtoFileCanonicalMapping { 364 | full_path: zoom_out, 365 | package_path: p.to_path_buf(), 366 | }) 367 | }) 368 | .collect::, _>>(); 369 | r 370 | } 371 | 372 | /// Remove content_root part of path if found 373 | fn zoom_in_content_root( 374 | dep: &ResolvedDependency, 375 | proto_file_source: &Path, 376 | ) -> Result { 377 | let mut proto_src = proto_file_source.to_path_buf(); 378 | if !dep.rules.content_roots.is_empty() { 379 | let root = dep 380 | .rules 381 | .content_roots 382 | .iter() 383 | .find(|c_root| proto_file_source.starts_with(&c_root.value)); 384 | if let Some(c_root) = root { 385 | trace!( 386 | "[Zoom in] Found valid content root {} for {}.", 387 | c_root.value.to_string_lossy(), 388 | proto_file_source.to_string_lossy() 389 | ); 390 | proto_src = path_strip_prefix(proto_file_source, &c_root.value)?; 391 | } 392 | } 393 | Ok(proto_src) 394 | } 395 | 396 | fn zoom_out_content_root( 397 | cache: &impl RepositoryCache, 398 | deps: &[ResolvedDependency], 399 | proto_file_source: &Path, 400 | ) -> Result { 401 | let mut proto_src = proto_file_source.to_path_buf(); 402 | for dep in deps { 403 | let dep_dir = cache 404 | .create_worktree(&dep.coordinate, &dep.commit_hash, &dep.name) 405 | .map_err(ProtoError::Cache)?; 406 | for dir in dep_dir.read_dir()? { 407 | let proto_files = find_proto_files(&dir?.path())?; 408 | if let Some(path) = proto_files 409 | .into_iter() 410 | .find(|p| p.ends_with(proto_file_source)) 411 | { 412 | trace!( 413 | "[Zoom out] Found path root {} for {}.", 414 | path.to_string_lossy(), 415 | proto_file_source.to_string_lossy() 416 | ); 417 | proto_src = path; 418 | } 419 | } 420 | } 421 | Ok(proto_src) 422 | } 423 | 424 | fn path_strip_prefix(path: &Path, prefix: &Path) -> Result { 425 | path.strip_prefix(prefix) 426 | .map_err(|_err| { 427 | { 428 | ProtoError::BadPath(format!( 429 | "Could not create proto source file path in {}. Wrong base dir {}", 430 | path.to_string_lossy(), 431 | prefix.to_string_lossy() 432 | )) 433 | } 434 | }) 435 | .map(|s| s.to_path_buf()) 436 | } 437 | 438 | #[cfg(test)] 439 | mod tests { 440 | use std::{ 441 | collections::{BTreeSet, HashSet}, 442 | path::{Path, PathBuf}, 443 | }; 444 | 445 | use crate::model::protofetch::{ 446 | ContentRoot, Coordinate, FilePolicy, RevisionSpecification, Rules, 447 | }; 448 | 449 | use super::*; 450 | 451 | use pretty_assertions::assert_eq; 452 | 453 | struct FakeCache { 454 | root: PathBuf, 455 | } 456 | 457 | impl RepositoryCache for FakeCache { 458 | fn fetch(&self, _: &Coordinate, _: &RevisionSpecification, _: &str) -> anyhow::Result<()> { 459 | Ok(()) 460 | } 461 | 462 | fn create_worktree( 463 | &self, 464 | _: &Coordinate, 465 | commit_hash: &str, 466 | name: &ModuleName, 467 | ) -> anyhow::Result { 468 | Ok(self.root.join(name.as_str()).join(commit_hash)) 469 | } 470 | } 471 | 472 | #[test] 473 | fn content_root_dependencies() { 474 | let cache_dir = project_root::get_project_root() 475 | .unwrap() 476 | .join(Path::new("resources/cache/dep3/hash3")); 477 | let lock_file = ResolvedDependency { 478 | name: ModuleName::new("dep3".to_string()), 479 | commit_hash: "hash3".to_string(), 480 | coordinate: Coordinate::from_url("example.com/org/dep3").unwrap(), 481 | specification: RevisionSpecification::default(), 482 | dependencies: BTreeSet::new(), 483 | rules: Rules { 484 | content_roots: BTreeSet::from([ContentRoot::from_string("root")]), 485 | ..Default::default() 486 | }, 487 | }; 488 | let expected_dep_1: HashSet = vec![ 489 | PathBuf::from("proto/example.proto"), 490 | PathBuf::from("proto/root.proto"), 491 | ] 492 | .into_iter() 493 | .collect(); 494 | 495 | let result: HashSet = copy_all_proto_files_for_dep(&cache_dir, &lock_file) 496 | .unwrap() 497 | .into_iter() 498 | .map(|p| p.to) 499 | .collect(); 500 | 501 | assert_eq!(result, expected_dep_1); 502 | } 503 | 504 | #[test] 505 | fn pruned_dependencies() { 506 | let cache_dir = project_root::get_project_root() 507 | .unwrap() 508 | .join("resources/cache"); 509 | let lock_file = ResolvedModule { 510 | module_name: ModuleName::from("test"), 511 | dependencies: vec![ 512 | ResolvedDependency { 513 | name: ModuleName::new("dep1".to_string()), 514 | commit_hash: "hash1".to_string(), 515 | coordinate: Coordinate::from_url("example.com/org/dep1").unwrap(), 516 | specification: RevisionSpecification::default(), 517 | dependencies: BTreeSet::from([ModuleName::new("dep2".to_string())]), 518 | rules: Rules { 519 | prune: true, 520 | allow_policies: AllowPolicies::new(BTreeSet::from([ 521 | FilePolicy::try_from_str("/proto/example.proto").unwrap(), 522 | ])), 523 | ..Default::default() 524 | }, 525 | }, 526 | ResolvedDependency { 527 | name: ModuleName::new("dep2".to_string()), 528 | commit_hash: "hash2".to_string(), 529 | coordinate: Coordinate::from_url("example.com/org/dep2").unwrap(), 530 | specification: RevisionSpecification::default(), 531 | dependencies: BTreeSet::new(), 532 | rules: Rules::default(), 533 | }, 534 | ], 535 | }; 536 | let expected_dep_1: HashSet = vec![ 537 | PathBuf::from("proto/example.proto"), 538 | PathBuf::from("proto/example2.proto"), 539 | PathBuf::from("proto/example3.proto"), 540 | PathBuf::from("proto/example5.proto"), 541 | PathBuf::from("scalapb/scalapb.proto"), 542 | PathBuf::from("google/protobuf/descriptor.proto"), 543 | PathBuf::from("google/protobuf/struct.proto"), 544 | ] 545 | .into_iter() 546 | .collect(); 547 | 548 | let pruned1: HashSet = pruned_transitive_dependencies( 549 | &FakeCache { root: cache_dir }, 550 | lock_file.dependencies.first().unwrap(), 551 | &lock_file, 552 | ) 553 | .unwrap() 554 | .into_iter() 555 | .map(|p| p.to) 556 | .collect(); 557 | 558 | assert_eq!(pruned1, expected_dep_1); 559 | } 560 | 561 | #[test] 562 | fn extract_dependencies_test() { 563 | let path = project_root::get_project_root() 564 | .unwrap() 565 | .join(Path::new("resources/proto_out/example2.proto")); 566 | let dependencies = extract_proto_dependencies_from_file(&path).unwrap(); 567 | assert_eq!(dependencies.len(), 3); 568 | assert_eq!(dependencies[0].to_string_lossy(), "scalapb/scalapb.proto"); 569 | assert_eq!( 570 | dependencies[1].to_string_lossy(), 571 | "google/protobuf/descriptor.proto" 572 | ); 573 | assert_eq!( 574 | dependencies[2].to_string_lossy(), 575 | "google/protobuf/struct.proto" 576 | ); 577 | } 578 | 579 | #[test] 580 | fn collect_transitive_dependencies_test() { 581 | let lock_file = ResolvedModule { 582 | module_name: ModuleName::from("test"), 583 | dependencies: vec![ 584 | ResolvedDependency { 585 | name: ModuleName::new("dep1".to_string()), 586 | commit_hash: "hash1".to_string(), 587 | coordinate: Coordinate::from_url("example.com/org/dep1").unwrap(), 588 | specification: RevisionSpecification::default(), 589 | dependencies: BTreeSet::from([ 590 | ModuleName::new("dep2".to_string()), 591 | ModuleName::new("dep3".to_string()), 592 | ]), 593 | rules: Rules::default(), 594 | }, 595 | ResolvedDependency { 596 | name: ModuleName::new("dep2".to_string()), 597 | commit_hash: "hash2".to_string(), 598 | coordinate: Coordinate::from_url("example.com/org/dep2").unwrap(), 599 | specification: RevisionSpecification::default(), 600 | dependencies: BTreeSet::new(), 601 | rules: Rules::default(), 602 | }, 603 | ResolvedDependency { 604 | name: ModuleName::new("dep3".to_string()), 605 | commit_hash: "hash3".to_string(), 606 | coordinate: Coordinate::from_url("example.com/org/dep3").unwrap(), 607 | specification: RevisionSpecification::default(), 608 | dependencies: BTreeSet::new(), 609 | rules: Rules::default(), 610 | }, 611 | ResolvedDependency { 612 | name: ModuleName::new("dep4".to_string()), 613 | commit_hash: "hash4".to_string(), 614 | coordinate: Coordinate::from_url("example.com/org/dep4").unwrap(), 615 | specification: RevisionSpecification::default(), 616 | dependencies: BTreeSet::new(), 617 | rules: Rules { 618 | transitive: true, 619 | ..Default::default() 620 | }, 621 | }, 622 | ], 623 | }; 624 | 625 | let mut it = lock_file.dependencies.iter(); 626 | let result = collect_transitive_dependencies(it.next().unwrap(), &lock_file); 627 | assert_eq!(result.len(), 3); 628 | assert!(result.contains(it.next().unwrap())); 629 | assert!(result.contains(it.next().unwrap())); 630 | assert!(result.contains(it.next().unwrap())); 631 | } 632 | 633 | #[test] 634 | fn collect_all_root_dependencies_() { 635 | let lock_file = ResolvedModule { 636 | module_name: ModuleName::from("test"), 637 | dependencies: vec![ 638 | ResolvedDependency { 639 | name: ModuleName::new("dep1".to_string()), 640 | commit_hash: "hash1".to_string(), 641 | coordinate: Coordinate::from_url("example.com/org/dep1").unwrap(), 642 | specification: RevisionSpecification::default(), 643 | dependencies: BTreeSet::new(), 644 | rules: Rules::default(), 645 | }, 646 | ResolvedDependency { 647 | name: ModuleName::new("dep2".to_string()), 648 | commit_hash: "hash2".to_string(), 649 | coordinate: Coordinate::from_url("example.com/org/dep2").unwrap(), 650 | specification: RevisionSpecification::default(), 651 | dependencies: BTreeSet::new(), 652 | rules: Rules::default(), 653 | }, 654 | ResolvedDependency { 655 | name: ModuleName::new("dep3".to_string()), 656 | commit_hash: "hash3".to_string(), 657 | coordinate: Coordinate::from_url("example.com/org/dep3").unwrap(), 658 | specification: RevisionSpecification::default(), 659 | dependencies: BTreeSet::new(), 660 | rules: Rules::default(), 661 | }, 662 | ], 663 | }; 664 | 665 | let result = collect_all_root_dependencies(&lock_file); 666 | assert_eq!(result.len(), 3); 667 | } 668 | 669 | #[test] 670 | fn collect_all_root_dependencies_filtered() { 671 | let lock_file = ResolvedModule { 672 | module_name: ModuleName::from("test"), 673 | dependencies: vec![ 674 | ResolvedDependency { 675 | name: ModuleName::new("dep1".to_string()), 676 | commit_hash: "hash1".to_string(), 677 | coordinate: Coordinate::from_url("example.com/org/dep1").unwrap(), 678 | specification: RevisionSpecification::default(), 679 | dependencies: BTreeSet::from([ModuleName::new("dep2".to_string())]), 680 | rules: Rules::default(), 681 | }, 682 | ResolvedDependency { 683 | name: ModuleName::new("dep2".to_string()), 684 | commit_hash: "hash2".to_string(), 685 | coordinate: Coordinate::from_url("example.com/org/dep2").unwrap(), 686 | specification: RevisionSpecification::default(), 687 | dependencies: BTreeSet::new(), 688 | rules: Rules::default(), 689 | }, 690 | ResolvedDependency { 691 | name: ModuleName::new("dep3".to_string()), 692 | commit_hash: "hash3".to_string(), 693 | coordinate: Coordinate::from_url("example.com/org/dep3").unwrap(), 694 | specification: RevisionSpecification::default(), 695 | dependencies: BTreeSet::from([ 696 | ModuleName::new("dep2".to_string()), 697 | ModuleName::new("dep5".to_string()), 698 | ]), 699 | rules: Rules { 700 | prune: true, 701 | transitive: false, 702 | ..Default::default() 703 | }, 704 | }, 705 | ResolvedDependency { 706 | name: ModuleName::new("dep4".to_string()), 707 | commit_hash: "hash4".to_string(), 708 | coordinate: Coordinate::from_url("example.com/org/dep4").unwrap(), 709 | specification: RevisionSpecification::default(), 710 | dependencies: BTreeSet::new(), 711 | rules: Rules::default(), 712 | }, 713 | ResolvedDependency { 714 | name: ModuleName::new("dep5".to_string()), 715 | commit_hash: "hash5".to_string(), 716 | coordinate: Coordinate::from_url("example.com/org/dep5").unwrap(), 717 | specification: RevisionSpecification::default(), 718 | dependencies: BTreeSet::new(), 719 | rules: Rules { 720 | prune: false, 721 | transitive: true, 722 | ..Default::default() 723 | }, 724 | }, 725 | ], 726 | }; 727 | 728 | let result = collect_all_root_dependencies(&lock_file); 729 | assert_eq!(result.len(), 4); 730 | } 731 | } 732 | -------------------------------------------------------------------------------- /src/resolver/git.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | git::cache::ProtofetchGitCache, 3 | model::protofetch::{Coordinate, ModuleName, RevisionSpecification}, 4 | }; 5 | 6 | use super::{CommitAndDescriptor, ModuleResolver}; 7 | 8 | impl ModuleResolver for ProtofetchGitCache { 9 | fn resolve( 10 | &self, 11 | coordinate: &Coordinate, 12 | specification: &RevisionSpecification, 13 | commit_hash: Option<&str>, 14 | name: &ModuleName, 15 | ) -> anyhow::Result { 16 | let repository = self.repository(coordinate)?; 17 | let commit_hash = if let Some(commit_hash) = commit_hash { 18 | repository.fetch_commit(specification, commit_hash)?; 19 | commit_hash.to_owned() 20 | } else { 21 | repository.fetch(specification)?; 22 | repository.resolve_commit_hash(specification)? 23 | }; 24 | let descriptor = repository.extract_descriptor(name, &commit_hash)?; 25 | Ok(CommitAndDescriptor { 26 | commit_hash, 27 | descriptor, 28 | }) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/resolver/lock.rs: -------------------------------------------------------------------------------- 1 | use anyhow::bail; 2 | use log::debug; 3 | 4 | use crate::model::protofetch::{ 5 | lock::{LockFile, LockedCoordinate}, 6 | Coordinate, ModuleName, RevisionSpecification, 7 | }; 8 | 9 | use super::{CommitAndDescriptor, ModuleResolver}; 10 | 11 | pub struct LockFileModuleResolver<'a, R> { 12 | inner: R, 13 | lock_file: &'a LockFile, 14 | locked: bool, 15 | } 16 | 17 | impl<'a, R> LockFileModuleResolver<'a, R> { 18 | pub fn new(inner: R, lock_file: &'a LockFile, locked: bool) -> Self { 19 | Self { 20 | inner, 21 | lock_file, 22 | locked, 23 | } 24 | } 25 | } 26 | 27 | impl ModuleResolver for LockFileModuleResolver<'_, R> 28 | where 29 | R: ModuleResolver, 30 | { 31 | fn resolve( 32 | &self, 33 | coordinate: &Coordinate, 34 | specification: &RevisionSpecification, 35 | commit_hash: Option<&str>, 36 | name: &ModuleName, 37 | ) -> anyhow::Result { 38 | let locked_coordinate = LockedCoordinate::from(coordinate); 39 | let dependency = self.lock_file.dependencies.iter().find(|dependency| { 40 | dependency.coordinate == locked_coordinate && &dependency.specification == specification 41 | }); 42 | match dependency { 43 | Some(dependency) => { 44 | debug!( 45 | "Dependency {} {} found in the lock file with commit {}", 46 | coordinate, specification, dependency.commit_hash 47 | ); 48 | let resolved = self.inner.resolve( 49 | coordinate, 50 | specification, 51 | commit_hash.or(Some(&dependency.commit_hash)), 52 | name, 53 | )?; 54 | if resolved.commit_hash != dependency.commit_hash { 55 | bail!( 56 | "Commit hash of {} {} changed: the lock file specifies {}, but the actual commit hash is {}", 57 | coordinate, 58 | specification, 59 | dependency.commit_hash, 60 | resolved.commit_hash 61 | ); 62 | } 63 | Ok(resolved) 64 | } 65 | None if self.locked => { 66 | bail!( 67 | "No entry for {} {} in the lock file", 68 | coordinate, 69 | specification 70 | ); 71 | } 72 | None => { 73 | debug!( 74 | "Dependency {} {} not found in the lock file", 75 | coordinate, specification 76 | ); 77 | self.inner 78 | .resolve(coordinate, specification, commit_hash, name) 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/resolver/mod.rs: -------------------------------------------------------------------------------- 1 | mod git; 2 | mod lock; 3 | 4 | use crate::model::protofetch::{Coordinate, Descriptor, ModuleName, RevisionSpecification}; 5 | 6 | pub use lock::LockFileModuleResolver; 7 | 8 | pub trait ModuleResolver { 9 | fn resolve( 10 | &self, 11 | coordinate: &Coordinate, 12 | specification: &RevisionSpecification, 13 | commit_hash: Option<&str>, 14 | name: &ModuleName, 15 | ) -> anyhow::Result; 16 | } 17 | 18 | #[derive(Clone)] 19 | pub struct CommitAndDescriptor { 20 | pub commit_hash: String, 21 | pub descriptor: Descriptor, 22 | } 23 | 24 | impl ModuleResolver for &T 25 | where 26 | T: ModuleResolver, 27 | { 28 | fn resolve( 29 | &self, 30 | coordinate: &Coordinate, 31 | specification: &RevisionSpecification, 32 | commit_hash: Option<&str>, 33 | name: &ModuleName, 34 | ) -> anyhow::Result { 35 | T::resolve(self, coordinate, specification, commit_hash, name) 36 | } 37 | } 38 | --------------------------------------------------------------------------------