├── .github ├── CODEOWNERS ├── dependabot.yml ├── workflows.disabled │ ├── documentation.yml │ └── release.yml └── workflows │ ├── commisery.yml │ ├── dependabot.yml │ ├── lint.yml │ ├── test.yml │ └── update.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── Drawbridge.toml.example ├── LICENSE ├── api └── api.yml ├── crates ├── byte │ ├── Cargo.toml │ ├── LICENSE │ └── src │ │ └── lib.rs ├── client │ ├── Cargo.toml │ ├── LICENSE │ └── src │ │ ├── entity.rs │ │ ├── lib.rs │ │ ├── repo.rs │ │ ├── tag.rs │ │ ├── tree.rs │ │ └── user.rs ├── jose │ ├── Cargo.toml │ ├── LICENSE │ └── src │ │ ├── b64.rs │ │ ├── jwk.rs │ │ ├── jws.rs │ │ └── lib.rs ├── server │ ├── Cargo.toml │ ├── LICENSE │ └── src │ │ ├── auth │ │ ├── mod.rs │ │ ├── oidc.rs │ │ └── tls.rs │ │ ├── builder.rs │ │ ├── handle.rs │ │ ├── lib.rs │ │ ├── repos │ │ ├── get.rs │ │ ├── head.rs │ │ ├── mod.rs │ │ └── put.rs │ │ ├── store │ │ ├── entity.rs │ │ ├── mod.rs │ │ ├── repo.rs │ │ ├── tag.rs │ │ ├── tree.rs │ │ └── user.rs │ │ ├── tags │ │ ├── get.rs │ │ ├── head.rs │ │ ├── mod.rs │ │ ├── put.rs │ │ └── query.rs │ │ ├── trees │ │ ├── get.rs │ │ ├── head.rs │ │ ├── mod.rs │ │ └── put.rs │ │ └── users │ │ ├── get.rs │ │ ├── head.rs │ │ ├── mod.rs │ │ └── put.rs └── type │ ├── Cargo.toml │ ├── LICENSE │ └── src │ ├── digest │ ├── algorithm.rs │ ├── algorithms.rs │ ├── digests.rs │ ├── mod.rs │ ├── reader.rs │ ├── verifier.rs │ └── writer.rs │ ├── lib.rs │ ├── meta.rs │ ├── repository │ ├── config.rs │ ├── context.rs │ ├── mod.rs │ └── name.rs │ ├── tag │ ├── context.rs │ ├── entry.rs │ ├── mod.rs │ └── name.rs │ ├── tree │ ├── context.rs │ ├── directory.rs │ ├── entry.rs │ ├── mod.rs │ ├── name.rs │ └── path.rs │ └── user │ ├── context.rs │ ├── mod.rs │ ├── name.rs │ └── record.rs ├── flake.lock ├── flake.nix ├── rust-toolchain.toml ├── src ├── lib.rs └── main.rs ├── testdata ├── ca.conf ├── ca.crt ├── ca.key ├── ca.srl ├── client.conf ├── client.crt ├── client.csr ├── client.key ├── generate.sh ├── server.conf ├── server.crt ├── server.csr └── server.key └── tests └── mod.rs /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @profianinc/drawbridge @bstrie 2 | *.rs @profianinc/drawbridge @bstrie 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "cargo" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | - package-ecosystem: "github-actions" 8 | directory: "/" 9 | schedule: 10 | interval: "daily" 11 | -------------------------------------------------------------------------------- /.github/workflows.disabled/documentation.yml: -------------------------------------------------------------------------------- 1 | name: Documentation 2 | 3 | on: [ push, pull_request ] 4 | 5 | concurrency: 6 | group: ${{ github.workflow }}-${{ github.ref }} 7 | cancel-in-progress: true 8 | 9 | jobs: 10 | documentation: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: cachix/install-nix-action@v31 15 | with: 16 | extra_nix_config: | 17 | access-tokens = github.com=${{ github.token }} 18 | - uses: cachix/cachix-action@v16 19 | with: 20 | name: enarx 21 | authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' 22 | 23 | - run: nix build -L --show-trace '.#doc' -o doc/index.html 24 | - run: cp --remove-destination $(readlink -f doc/index.html) doc/index.html 25 | - uses: actions/upload-artifact@v4 26 | with: 27 | name: doc 28 | path: doc/index.html 29 | - uses: JamesIves/github-pages-deploy-action@v4.7.3 30 | if: github.ref == 'refs/heads/main' && github.event_name == 'push' 31 | with: 32 | branch: gh-pages 33 | folder: doc 34 | -------------------------------------------------------------------------------- /.github/workflows.disabled/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | branches: 7 | - main 8 | push: 9 | branches: 10 | - main 11 | tags: 12 | - "v*.*.*" 13 | 14 | concurrency: 15 | group: ${{ github.workflow }}-${{ github.ref }} 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | build: 20 | strategy: 21 | matrix: 22 | platform: 23 | - host: ubuntu-latest 24 | target: x86_64-unknown-linux-musl 25 | test-bin: ./result/bin/drawbridge --help 26 | test-oci: | 27 | docker load < ./result 28 | docker run --rm drawbridge:$(nix eval --raw .#drawbridge-x86_64-unknown-linux-musl-oci.imageTag) drawbridge --help 29 | 30 | - host: ubuntu-latest 31 | target: aarch64-unknown-linux-musl 32 | test-bin: nix shell --inputs-from . 'nixpkgs#qemu' -c qemu-aarch64 ./result/bin/drawbridge --help 33 | test-oci: docker load < ./result 34 | # TODO: Run the aarch64 binary within OCI 35 | 36 | runs-on: ${{ matrix.platform.host }} 37 | steps: 38 | - uses: actions/checkout@v4 39 | - uses: cachix/install-nix-action@v31 40 | with: 41 | extra_nix_config: | 42 | access-tokens = github.com=${{ github.token }} 43 | - uses: cachix/cachix-action@v16 44 | with: 45 | name: enarx 46 | authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' 47 | 48 | - run: nix build -L --show-trace '.#drawbridge-${{ matrix.platform.target }}' 49 | - run: nix run --inputs-from . 'nixpkgs#coreutils' -- --coreutils-prog=ginstall -p ./result/bin/drawbridge "drawbridge-${{ matrix.platform.target }}" 50 | - uses: actions/upload-artifact@v4 51 | with: 52 | name: drawbridge-${{ matrix.platform.target }} 53 | path: drawbridge-${{ matrix.platform.target }} 54 | - run: ${{ matrix.platform.test-bin }} 55 | 56 | - run: nix build -L --show-trace '.#drawbridge-${{ matrix.platform.target }}-oci' 57 | - run: nix run --inputs-from . 'nixpkgs#coreutils' -- --coreutils-prog=ginstall -p ./result "drawbridge-${{ matrix.platform.target }}-oci" 58 | - uses: actions/upload-artifact@v4 59 | with: 60 | name: drawbridge-${{ matrix.platform.target }}-oci 61 | path: drawbridge-${{ matrix.platform.target }}-oci 62 | - run: ${{ matrix.platform.test-oci }} 63 | 64 | push_oci: 65 | needs: build 66 | permissions: 67 | actions: read 68 | packages: write 69 | runs-on: ubuntu-latest 70 | steps: 71 | - uses: actions/download-artifact@v4 72 | with: 73 | name: drawbridge-aarch64-unknown-linux-musl-oci 74 | - uses: actions/download-artifact@v4 75 | with: 76 | name: drawbridge-x86_64-unknown-linux-musl-oci 77 | - run: skopeo copy docker-archive:./drawbridge-aarch64-unknown-linux-musl-oci containers-storage:localhost/drawbridge:aarch64 78 | - run: skopeo copy docker-archive:./drawbridge-x86_64-unknown-linux-musl-oci containers-storage:localhost/drawbridge:x86_64 79 | - run: podman image ls 80 | - run: podman manifest create drawbridge:manifest 81 | - run: podman manifest add drawbridge:manifest containers-storage:localhost/drawbridge:aarch64 --arch=arm64 82 | - run: podman manifest add drawbridge:manifest containers-storage:localhost/drawbridge:x86_64 --arch=amd64 83 | - run: podman manifest inspect drawbridge:manifest 84 | - name: metadata 85 | id: metadata 86 | uses: docker/metadata-action@v5 87 | with: 88 | images: ghcr.io/profianinc/drawbridge 89 | tags: | 90 | type=ref,event=branch 91 | type=semver,pattern={{version}} 92 | type=semver,pattern={{major}}.{{minor}} 93 | type=semver,pattern={{major}},enable=${{ !startsWith(github.ref, 'refs/tags/v0.') }} 94 | sep-tags: " " 95 | - name: add tags 96 | if: github.event_name == 'push' 97 | run: podman tag drawbridge:manifest ${{ steps.metadata.outputs.tags }} 98 | - name: push to GitHub Packages 99 | if: github.event_name == 'push' 100 | uses: redhat-actions/push-to-registry@v2 101 | with: 102 | tags: ${{ steps.metadata.outputs.tags }} 103 | username: ${{ github.actor }} 104 | password: ${{ github.token }} 105 | 106 | release: 107 | needs: build 108 | permissions: 109 | contents: write 110 | if: startsWith(github.ref, 'refs/tags/') && github.event_name == 'push' 111 | runs-on: ubuntu-latest 112 | steps: 113 | - uses: actions/download-artifact@v4 114 | with: 115 | name: drawbridge-aarch64-unknown-linux-musl 116 | - uses: actions/download-artifact@v4 117 | with: 118 | name: drawbridge-aarch64-unknown-linux-musl-oci 119 | - uses: actions/download-artifact@v4 120 | with: 121 | name: drawbridge-x86_64-unknown-linux-musl 122 | - uses: actions/download-artifact@v4 123 | with: 124 | name: drawbridge-x86_64-unknown-linux-musl-oci 125 | 126 | - uses: softprops/action-gh-release@v2 127 | with: 128 | draft: true 129 | prerelease: true 130 | files: | 131 | drawbridge-aarch64-unknown-linux-musl 132 | drawbridge-aarch64-unknown-linux-musl-oci 133 | drawbridge-x86_64-unknown-linux-musl 134 | drawbridge-x86_64-unknown-linux-musl-oci 135 | -------------------------------------------------------------------------------- /.github/workflows/commisery.yml: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2020-2020, TomTom (http://tomtom.com). 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | name: Commisery 16 | on: 17 | pull_request: 18 | types: [edited, opened, synchronize, reopened] 19 | 20 | concurrency: 21 | group: ${{ github.workflow }}-${{ github.ref }} 22 | cancel-in-progress: true 23 | 24 | permissions: 25 | contents: read 26 | pull-requests: read 27 | 28 | jobs: 29 | commit-message: 30 | name: Conventional Commit Message Checker (Commisery) 31 | runs-on: ubuntu-latest 32 | steps: 33 | - name: Check-out the repo under $GITHUB_WORKSPACE 34 | uses: actions/checkout@v4 35 | 36 | - name: Run Commisery 37 | uses: enarx/commisery-action@master 38 | with: 39 | token: ${{ secrets.GITHUB_TOKEN }} 40 | pull_request: ${{ github.event.number }} 41 | -------------------------------------------------------------------------------- /.github/workflows/dependabot.yml: -------------------------------------------------------------------------------- 1 | name: Dependabot auto-approve 2 | on: pull_request_target 3 | permissions: 4 | pull-requests: write 5 | jobs: 6 | dependabot: 7 | runs-on: ubuntu-latest 8 | if: ${{ github.event.pull_request.user.login == 'dependabot[bot]' }} 9 | steps: 10 | - name: Dependabot metadata 11 | id: dependabot-metadata 12 | uses: dependabot/fetch-metadata@v2 13 | - uses: actions/checkout@v4 14 | - name: Approve a PR if not already approved 15 | run: | 16 | gh pr checkout "$PR_URL" # sets the upstream metadata for `gh pr status` 17 | if [ "$(gh pr status --json reviewDecision -q .currentBranch.reviewDecision)" != "APPROVED" ]; 18 | then gh pr review --approve "$PR_URL" 19 | else echo "PR already approved, skipping additional approvals to minimize emails/notification noise."; 20 | fi 21 | env: 22 | PR_URL: ${{github.event.pull_request.html_url}} 23 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 24 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | on: [ push, pull_request ] 3 | jobs: 4 | rust: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v4 8 | - name: Setup Rust toolchain 9 | run: rustup show 10 | - name: cargo fmt 11 | run: cargo fmt -- --check 12 | - name: cargo clippy 13 | run: cargo clippy --workspace --all-features --tests -- -D warnings 14 | 15 | # nix-fmt: 16 | # name: nix fmt 17 | # runs-on: ubuntu-latest 18 | # steps: 19 | # - uses: actions/checkout@v4 20 | # - uses: cachix/install-nix-action@v31 21 | # with: 22 | # extra_nix_config: | 23 | # access-tokens = github.com=${{ github.token }} 24 | # - uses: cachix/cachix-action@v16 25 | # with: 26 | # name: enarx 27 | # authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' 28 | # - run: nix fmt 29 | 30 | # nix-flake-check: 31 | # name: nix flake check 32 | # runs-on: ubuntu-latest 33 | # steps: 34 | # - uses: actions/checkout@v4 35 | # - uses: cachix/install-nix-action@v31 36 | # with: 37 | # extra_nix_config: | 38 | # access-tokens = github.com=${{ github.token }} 39 | # - uses: cachix/cachix-action@v16 40 | # with: 41 | # name: enarx 42 | # authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' 43 | # - run: nix flake check -L --show-trace --keep-going 44 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | on: [ push, pull_request ] 3 | jobs: 4 | test: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v4 8 | - name: Setup Rust toolchain 9 | run: rustup show 10 | - name: cargo test 11 | run: cargo test --workspace 12 | -------------------------------------------------------------------------------- /.github/workflows/update.yml: -------------------------------------------------------------------------------- 1 | name: Update Nix flake 2 | 3 | on: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | workflow_dispatch: 7 | 8 | jobs: 9 | nix-update: 10 | permissions: 11 | contents: write 12 | pull-requests: write 13 | secrets: inherit 14 | uses: enarx/.github/.github/workflows/nix-update.yml@main 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /doc 2 | /target 3 | Drawbridge.toml 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "drawbridge" 3 | version = "0.4.3" 4 | authors = ["Profian Inc", "The Enarx Project Developers"] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | homepage = "https://github.com/enarx/drawbridge" 8 | repository = "https://github.com/enarx/drawbridge" 9 | description = "Drawbridge library." 10 | keywords = ["drawbridge"] 11 | 12 | [workspace] 13 | members = [ 14 | "crates/byte", 15 | "crates/client", 16 | "crates/jose", 17 | "crates/server", 18 | "crates/type", 19 | ] 20 | 21 | [workspace.dependencies] 22 | # Internal dependencies 23 | drawbridge-byte = { path = "./crates/byte", version = "0.4.3" } 24 | drawbridge-client = { path = "./crates/client", version = "0.4.3" } 25 | drawbridge-jose = { path = "./crates/jose", version = "0.4.3" } 26 | drawbridge-server = { path = "./crates/server", version = "0.4.3" } 27 | drawbridge-type = { path = "./crates/type", version = "0.4.3" } 28 | 29 | # External dependencies 30 | anyhow = { version = "1.0.98", default-features = false } 31 | async-h1 = { version = "2.3.4", default-features = false } 32 | async-std = { version = "1.13.0", default-features = false } 33 | axum = { version = "0.5.17", default-features = false } 34 | base64 = { version = "0.22.1", default-features = false } 35 | camino = { version = "1.1.10", default-features = false } 36 | cap-async-std = { version = "0.26.1", default-features = true, features = ["fs_utf8"] } 37 | clap = { version = "4.5.39", default-features = false, features = ["derive", "error-context", "help", "std", "usage", "wrap_help"] } 38 | confargs = { version = "0.1.3", default-features = false } 39 | futures = { version = "0.3.31", default-features = false } 40 | futures-rustls = { version = "0.26.0", default-features = false } 41 | headers = { version = "0.3.9", default-features = false } 42 | http = { version = "0.2.12", default-features = false } 43 | http-types = { version = "2.12.0", default-features = false } 44 | hyper = { version = "0.14.32", default-features = false } 45 | jsonwebtoken = { version = "9.3.1", default-features = false } 46 | mediatype = { version = "0.19.20", default-features = false } 47 | mime = { version = "0.3.17", default-features = false } 48 | once_cell = { version = "1.21.3", default-features = false } 49 | openidconnect = { version = "3.5.0", default-features = false } 50 | rsa = { version = "0.9.8", default-features = false } 51 | rustls = { version = "0.23.27", default-features = false } 52 | rustls-pemfile = { version = "2.2.0", default-features = false } 53 | rustls-pki-types = { version = "1.12.0", default-features = false } 54 | semver = { version = "1.0.26", default-features = false } 55 | serde = { version = "1.0.219", default-features = false } 56 | serde_json = { version = "1.0.140", default-features = false } 57 | sha2 = { version = "0.10.9", default-features = false } 58 | tempfile = { version = "3.20.0", default-features = false } 59 | tokio-util = { version = "0.7.15", default-features = false } 60 | tower = { version = "0.4.13", default-features = false } 61 | tower-http = { version = "0.4.4", default-features = false } 62 | tracing = { version = "0.1.41", default-features = false, features = ["release_max_level_debug"] } 63 | tracing-subscriber = { version = "0.3.19", default-features = false, features = ["ansi", "env-filter", "std", "tracing-log", "json"] } 64 | ureq = { version = "2.12.1", default-features = false } 65 | url = { version = "2.5.4", default-features = false } 66 | uuid = { version = "1.17.0", default-features = false, features = ["v4"] } 67 | walkdir = { version = "2.5.0", default-features = false } 68 | webpki-roots = { version = "1.0.0", default-features = false } 69 | zeroize = { version = "1.8.1", default-features = false } 70 | 71 | [dependencies] 72 | # Internal dependencies 73 | drawbridge-byte = { workspace = true } 74 | drawbridge-client = { workspace = true, optional = true } 75 | drawbridge-jose = { workspace = true } 76 | drawbridge-server = { workspace = true } 77 | drawbridge-type = { workspace = true, features = ["server"] } 78 | 79 | # External dependencies 80 | anyhow = { workspace = true } 81 | async-std = { workspace = true, features = ["attributes"] } 82 | clap = { workspace = true } 83 | confargs = { workspace = true } 84 | futures = { workspace = true } 85 | tracing = { workspace = true } 86 | tracing-subscriber = { workspace = true } 87 | 88 | [dev-dependencies] 89 | # Internal dependencies 90 | drawbridge-client = { workspace = true } 91 | 92 | # External dependencies 93 | async-h1 = { workspace = true } 94 | async-std = { workspace = true, features = ["attributes", "default"] } 95 | http-types = { workspace = true } 96 | jsonwebtoken = { workspace = true } 97 | openidconnect = { workspace = true } 98 | rsa = { workspace = true } 99 | rustls = { workspace = true } 100 | rustls-pemfile = { workspace = true } 101 | rustls-pki-types = { workspace = true } 102 | serde = { workspace = true } 103 | serde_json = { workspace = true, features = ["std"] } 104 | tempfile = { workspace = true } 105 | 106 | [features] 107 | client = ["drawbridge-client"] 108 | -------------------------------------------------------------------------------- /Drawbridge.toml.example: -------------------------------------------------------------------------------- 1 | # Any TOML file can be used to provide arguments to the Drawbridge CLI as follows: 2 | # drawbridge @Drawbridge.toml 3 | # or equivalently, during development: 4 | # cargo run -- @Drawbridge.toml 5 | ca = "testdata/ca.crt" 6 | cert = "testdata/server.crt" 7 | key = "testdata/server.key" 8 | # Drawbridge will persist files to the following path. 9 | # If you'd prefer to use temporary files that will be cleaned up automatically, 10 | # for the sake of testing and development, then use the `mktemp` command: 11 | # cargo run -- @Drawbridge.toml --store $(mktemp -d) 12 | #store = "PATH-TO-STORE" 13 | oidc-client = "9SVWiB3sQQdzKqpZmMNvsb9rzd8Ha21F" 14 | oidc-label = "auth.profian.com" 15 | oidc-issuer = "https://auth.profian.com" 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright(c) 2022 The Enarx Project 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /crates/byte/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "drawbridge-byte" 3 | version = "0.4.3" 4 | authors = ["Profian Inc", "The Enarx Project Developers"] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | homepage = "https://github.com/enarx/drawbridge" 8 | repository = "https://github.com/enarx/drawbridge" 9 | description = "Utilities for working with contiguous arrays of bytes and easy conversions to and from Base64 representations in string contexts." 10 | keywords = ["drawbridge", "base64"] 11 | categories = ["no-std"] 12 | 13 | [dependencies] 14 | base64 = { workspace = true, features = ["alloc"] } 15 | serde = { workspace = true, optional = true } 16 | 17 | [features] 18 | alloc = ["serde?/alloc"] 19 | std = ["serde?/std"] 20 | -------------------------------------------------------------------------------- /crates/byte/src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | //! This crate provides a [`Bytes`] type which wraps most types that represent 5 | //! a contiguous array of bytes. It provides implementations for easy 6 | //! conversions to and from Base64 representations in string contexts. 7 | 8 | #![no_std] 9 | #![forbid(unsafe_code, clippy::expect_used, clippy::panic)] 10 | #![deny( 11 | clippy::all, 12 | absolute_paths_not_starting_with_crate, 13 | deprecated_in_future, 14 | missing_copy_implementations, 15 | missing_debug_implementations, 16 | missing_docs, 17 | noop_method_call, 18 | rust_2018_compatibility, 19 | rust_2018_idioms, 20 | rust_2021_compatibility, 21 | single_use_lifetimes, 22 | trivial_bounds, 23 | trivial_casts, 24 | trivial_numeric_casts, 25 | unreachable_code, 26 | unreachable_patterns, 27 | unreachable_pub, 28 | unstable_features, 29 | unused, 30 | unused_crate_dependencies, 31 | unused_import_braces, 32 | unused_lifetimes, 33 | unused_qualifications, 34 | unused_results, 35 | variant_size_differences 36 | )] 37 | 38 | extern crate alloc; 39 | 40 | use alloc::vec::Vec; 41 | 42 | use base64::Engine; 43 | use core::fmt::{Debug, Display, Formatter}; 44 | use core::marker::PhantomData; 45 | use core::ops::{Deref, DerefMut}; 46 | use core::str::FromStr; 47 | 48 | use base64::engine::general_purpose::GeneralPurpose; 49 | use base64::engine::general_purpose::{STANDARD, STANDARD_NO_PAD, URL_SAFE, URL_SAFE_NO_PAD}; 50 | 51 | mod sealed { 52 | pub trait Config { 53 | const CONFIG: base64::engine::general_purpose::GeneralPurpose; 54 | } 55 | } 56 | 57 | use sealed::Config; 58 | 59 | /// Standard Base64 encoding with padding 60 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 61 | pub struct Standard(()); 62 | 63 | impl Config for Standard { 64 | const CONFIG: GeneralPurpose = STANDARD; 65 | } 66 | 67 | /// Standard Base64 encoding without padding 68 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 69 | pub struct StandardNoPad(()); 70 | 71 | impl Config for StandardNoPad { 72 | const CONFIG: GeneralPurpose = STANDARD_NO_PAD; 73 | } 74 | 75 | /// URL-safe Base64 encoding with padding 76 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 77 | pub struct UrlSafe(()); 78 | 79 | impl Config for UrlSafe { 80 | const CONFIG: GeneralPurpose = URL_SAFE; 81 | } 82 | 83 | /// URL-safe Base64 encoding without padding 84 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 85 | pub struct UrlSafeNoPad(()); 86 | 87 | impl Config for UrlSafeNoPad { 88 | const CONFIG: GeneralPurpose = URL_SAFE_NO_PAD; 89 | } 90 | 91 | /// A wrapper for bytes which provides base64 encoding in string contexts 92 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 93 | pub struct Bytes(T, PhantomData); 94 | 95 | impl Debug for Bytes { 96 | fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { 97 | f.debug_tuple("Bytes").field(&self.0).finish() 98 | } 99 | } 100 | 101 | impl Default for Bytes { 102 | fn default() -> Self { 103 | Self(Default::default(), PhantomData) 104 | } 105 | } 106 | 107 | impl Bytes { 108 | /// Consumes the outer type, returning the inner type 109 | pub fn into_inner(self) -> T { 110 | self.0 111 | } 112 | } 113 | 114 | impl From for Bytes { 115 | fn from(value: T) -> Self { 116 | Self(value, PhantomData) 117 | } 118 | } 119 | 120 | impl, U: ?Sized, C> AsRef for Bytes { 121 | fn as_ref(&self) -> &U { 122 | self.0.as_ref() 123 | } 124 | } 125 | 126 | impl, U: ?Sized, C> AsMut for Bytes { 127 | fn as_mut(&mut self) -> &mut U { 128 | self.0.as_mut() 129 | } 130 | } 131 | 132 | impl Deref for Bytes { 133 | type Target = T; 134 | 135 | fn deref(&self) -> &Self::Target { 136 | &self.0 137 | } 138 | } 139 | 140 | impl DerefMut for Bytes { 141 | fn deref_mut(&mut self) -> &mut Self::Target { 142 | &mut self.0 143 | } 144 | } 145 | 146 | impl, C: Config> Display for Bytes { 147 | fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { 148 | f.write_str(&C::CONFIG.encode(self.0.as_ref())) 149 | } 150 | } 151 | 152 | impl>, C: Config> FromStr for Bytes { 153 | type Err = base64::DecodeError; 154 | 155 | fn from_str(s: &str) -> Result { 156 | C::CONFIG.decode(s).map(|x| Self(x.into(), PhantomData)) 157 | } 158 | } 159 | 160 | #[cfg(feature = "serde")] 161 | impl, C: Config> serde::Serialize for Bytes { 162 | fn serialize(&self, serializer: S) -> Result { 163 | if serializer.is_human_readable() { 164 | C::CONFIG.encode(self.0.as_ref()).serialize(serializer) 165 | } else { 166 | serializer.serialize_bytes(self.0.as_ref()) 167 | } 168 | } 169 | } 170 | 171 | #[cfg(feature = "serde")] 172 | impl<'de, T: From>, C: Config> serde::Deserialize<'de> for Bytes { 173 | fn deserialize>(deserializer: D) -> Result { 174 | use serde::de::Error; 175 | 176 | if deserializer.is_human_readable() { 177 | let b64 = alloc::borrow::Cow::<'de, str>::deserialize(deserializer)?; 178 | let buf = C::CONFIG 179 | .decode(b64.as_ref()) 180 | .map_err(|_| Error::custom("invalid base64"))?; 181 | Ok(Self(buf.into(), PhantomData)) 182 | } else { 183 | Ok(Self(Vec::deserialize(deserializer)?.into(), PhantomData)) 184 | } 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /crates/client/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "drawbridge-client" 3 | version = "0.4.3" 4 | authors = ["Profian Inc", "The Enarx Project Developers"] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | homepage = "https://github.com/enarx/drawbridge" 8 | repository = "https://github.com/enarx/drawbridge" 9 | description = "Client library for interacting with the Drawbridge." 10 | keywords = ["drawbridge"] 11 | 12 | [dependencies] 13 | # Internal dependencies 14 | drawbridge-jose = { workspace = true } 15 | drawbridge-type = { workspace = true } 16 | 17 | # External dependencies 18 | anyhow = { workspace = true, features = ["std"] } 19 | http = { workspace = true } 20 | mime = { workspace = true } 21 | rustls = { workspace = true } 22 | rustls-pki-types = { workspace = true } 23 | serde_json = { workspace = true, features = ["std"] } 24 | ureq = { workspace = true, features = ["json", "tls"] } 25 | url = { workspace = true, features = ["serde"] } 26 | webpki-roots = { workspace = true } 27 | -------------------------------------------------------------------------------- /crates/client/src/entity.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{scope, Client, Result, Scope}; 5 | 6 | use std::io::{copy, Read, Write}; 7 | use std::marker::PhantomData; 8 | use std::str::FromStr; 9 | 10 | use drawbridge_type::digest::{Algorithms, ContentDigest}; 11 | use drawbridge_type::Meta; 12 | 13 | use anyhow::{anyhow, bail, ensure, Context}; 14 | use http::header::{CONTENT_LENGTH, CONTENT_TYPE}; 15 | use http::StatusCode; 16 | use mime::Mime; 17 | use ureq::serde::{Deserialize, Serialize}; 18 | use ureq::{Request, Response}; 19 | 20 | fn parse_header(req: &Response, name: &str) -> Result 21 | where 22 | T: FromStr, 23 | T::Err: 'static + Sync + Send + std::error::Error, 24 | { 25 | req.header(name) 26 | .ok_or_else(|| anyhow!("missing `{name}` header"))? 27 | .parse() 28 | .context(format!("failed to parse `{name}` header")) 29 | } 30 | 31 | #[derive(Clone, Debug)] 32 | pub struct Entity<'a, C: Scope, E: Scope> { 33 | client: &'a Client, 34 | path: String, 35 | phantom: PhantomData, 36 | } 37 | 38 | fn parse_ureq_error(e: ureq::Error) -> anyhow::Error { 39 | match e { 40 | ureq::Error::Status(code, msg) => match msg.into_string() { 41 | Ok(msg) if !msg.is_empty() => { 42 | anyhow!(msg).context(format!("request failed with status code `{code}`")) 43 | } 44 | _ => anyhow!("request failed with status code `{code}`"), 45 | }, 46 | 47 | ureq::Error::Transport(e) => anyhow::Error::new(e).context("transport layer failure"), 48 | } 49 | } 50 | 51 | impl<'a, C: Scope> Entity<'a, C, C> { 52 | pub fn new(client: &'a Client) -> Self { 53 | Self { 54 | client, 55 | path: Default::default(), 56 | phantom: PhantomData, 57 | } 58 | } 59 | } 60 | 61 | impl<'a> Entity<'a, scope::Unknown, scope::Unknown> { 62 | /// Changes the scope of the entity. 63 | pub fn scope(self) -> Entity<'a, scope::Unknown, O> { 64 | Entity { 65 | client: self.client, 66 | path: self.path, 67 | phantom: PhantomData, 68 | } 69 | } 70 | } 71 | 72 | impl<'a, C: Scope, E: Scope> Entity<'a, C, E> { 73 | /// Returns a child [Entity] rooted at `path`. 74 | pub fn child(&self, path: &str) -> Entity<'a, C, O> { 75 | Entity { 76 | client: self.client, 77 | path: format!("{}/{}", self.path, path), 78 | phantom: PhantomData, 79 | } 80 | } 81 | 82 | pub(super) fn create_request(&self, hash: &ContentDigest, mime: &Mime) -> Result { 83 | let token = self.client.token.as_ref().ok_or_else(|| { 84 | anyhow!("endpoint requires authorization, but no token was configured") 85 | })?; 86 | let url = self.client.url(&self.path)?; 87 | Ok(self 88 | .client 89 | .inner 90 | .put(url.as_str()) 91 | .set("Authorization", &format!("Bearer {token}")) 92 | .set("Content-Digest", &hash.to_string()) 93 | .set(CONTENT_TYPE.as_str(), mime.as_ref())) 94 | } 95 | 96 | pub(super) fn create_bytes(&self, mime: &Mime, data: impl AsRef<[u8]>) -> Result { 97 | let data = data.as_ref(); 98 | let (n, hash) = Algorithms::default() 99 | .read_sync(data) 100 | .context("failed to compute content digest")?; 101 | ensure!( 102 | n == data.len() as u64, 103 | "invalid amount of bytes read, expected {}, read {n}", 104 | data.len(), 105 | ); 106 | let res = self 107 | .create_request(&hash, mime)? 108 | .send_bytes(data) 109 | .map_err(parse_ureq_error)?; 110 | match StatusCode::from_u16(res.status()) { 111 | Ok(StatusCode::CREATED) => Ok(true), 112 | Ok(StatusCode::OK) => Ok(false), 113 | _ => bail!("unexpected status code: {}", res.status()), 114 | } 115 | } 116 | 117 | pub(super) fn create_json(&self, mime: &Mime, val: &impl Serialize) -> Result { 118 | let buf = serde_json::to_vec(val).context("failed to encode value to JSON")?; 119 | self.create_bytes(mime, buf) 120 | } 121 | 122 | pub(super) fn create_from( 123 | &self, 124 | Meta { hash, size, mime }: &Meta, 125 | rdr: impl Read, 126 | ) -> Result { 127 | let res = self 128 | .create_request(hash, mime)? 129 | .set(CONTENT_LENGTH.as_str(), &size.to_string()) 130 | .send(rdr) 131 | .map_err(parse_ureq_error)?; 132 | match StatusCode::from_u16(res.status()) { 133 | Ok(StatusCode::CREATED) => Ok(true), 134 | Ok(StatusCode::OK) => Ok(false), 135 | _ => bail!("unexpected status code: {}", res.status()), 136 | } 137 | } 138 | 139 | pub fn get(&self, limit: u64) -> Result<(Meta, impl Read)> { 140 | let url = self.client.url(&self.path)?; 141 | let mut req = self.client.inner.get(url.as_str()); 142 | if let Some(ref token) = self.client.token { 143 | req = req.set("Authorization", &format!("Bearer {token}")) 144 | } 145 | let res = req 146 | .set("Accept-Encoding", "") 147 | .call() 148 | .map_err(parse_ureq_error) 149 | .context("GET request failed")?; 150 | 151 | let hash: ContentDigest = parse_header(&res, "Content-Digest")?; 152 | let mime = parse_header(&res, CONTENT_TYPE.as_str())?; 153 | let size = parse_header(&res, CONTENT_LENGTH.as_str())?; 154 | ensure!( 155 | size <= limit, 156 | "response size of `{size}` exceeds the limit of `{limit}`" 157 | ); 158 | match StatusCode::from_u16(res.status()) { 159 | Ok(StatusCode::OK) => Ok(( 160 | Meta { 161 | hash: hash.clone(), 162 | size, 163 | mime, 164 | }, 165 | hash.verifier(res.into_reader().take(size)), 166 | )), 167 | _ => bail!("unexpected status code: {}", res.status()), 168 | } 169 | } 170 | 171 | pub fn get_to(&self, limit: u64, dst: &mut impl Write) -> Result { 172 | let (meta @ Meta { size, .. }, mut rdr) = self.get(limit)?; 173 | let n = copy(&mut rdr, dst)?; 174 | ensure!( 175 | n == size, 176 | "invalid amount of bytes read, expected {size}, read {n}" 177 | ); 178 | Ok(meta) 179 | } 180 | 181 | #[allow(single_use_lifetimes)] 182 | pub fn get_json(&self, limit: u64) -> Result<(Meta, T)> 183 | where 184 | for<'de> T: Deserialize<'de>, 185 | { 186 | let (meta, rdr) = self.get(limit)?; 187 | let v = serde_json::from_reader(rdr).context("failed to decode JSON")?; 188 | Ok((meta, v)) 189 | } 190 | 191 | pub fn get_bytes(&self, limit: u64) -> Result<(Meta, Vec)> { 192 | let (meta @ Meta { size, .. }, rdr) = self.get(limit)?; 193 | let mut rdr = rdr.take(limit); 194 | let mut buf = 195 | Vec::with_capacity(size.try_into().context("failed to convert u64 to usize")?); 196 | let n = copy(&mut rdr, &mut buf).context("I/O failure")?; 197 | ensure!( 198 | n == size, 199 | "invalid amount of bytes read, expected {size}, read {n}" 200 | ); 201 | Ok((meta, buf)) 202 | } 203 | 204 | pub fn get_string(&self, limit: u64) -> Result<(Meta, String)> { 205 | let (meta @ Meta { size, .. }, mut rdr) = self.get(limit)?; 206 | let size = size.try_into().context("failed to convert u64 to usize")?; 207 | let mut s = String::with_capacity(size); 208 | let n = rdr.read_to_string(&mut s).context("I/O failure")?; 209 | ensure!( 210 | n == size, 211 | "invalid amount of bytes read, expected {size}, read {n}" 212 | ); 213 | Ok((meta, s)) 214 | } 215 | } 216 | -------------------------------------------------------------------------------- /crates/client/src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | #![forbid(unsafe_code)] 5 | #![deny( 6 | clippy::all, 7 | absolute_paths_not_starting_with_crate, 8 | deprecated_in_future, 9 | missing_copy_implementations, 10 | missing_debug_implementations, 11 | noop_method_call, 12 | rust_2018_compatibility, 13 | rust_2018_idioms, 14 | rust_2021_compatibility, 15 | single_use_lifetimes, 16 | trivial_bounds, 17 | trivial_casts, 18 | trivial_numeric_casts, 19 | unreachable_code, 20 | unreachable_patterns, 21 | unreachable_pub, 22 | unstable_features, 23 | unused, 24 | unused_crate_dependencies, 25 | unused_import_braces, 26 | unused_lifetimes, 27 | unused_results, 28 | variant_size_differences 29 | )] 30 | 31 | mod entity; 32 | mod repo; 33 | mod tag; 34 | mod tree; 35 | mod user; 36 | 37 | pub use entity::*; 38 | pub use repo::*; 39 | pub use tag::*; 40 | pub use tree::*; 41 | pub use user::*; 42 | 43 | pub use drawbridge_jose as jose; 44 | pub use drawbridge_type as types; 45 | 46 | pub use anyhow::{Context, Result}; 47 | pub use mime; 48 | pub use url::Url; 49 | 50 | use std::marker::PhantomData; 51 | use std::sync::Arc; 52 | 53 | use drawbridge_type::{RepositoryContext, TagContext, TreeContext, UserContext}; 54 | 55 | use rustls::RootCertStore; 56 | use rustls_pki_types::CertificateDer; 57 | use rustls_pki_types::PrivateKeyDer; 58 | 59 | /// API version used by this crate 60 | pub const API_VERSION: &str = "0.1.0"; 61 | 62 | mod private { 63 | pub trait Scope: Copy + Clone {} 64 | } 65 | 66 | pub trait Scope: private::Scope {} 67 | 68 | impl Scope for T where T: private::Scope {} 69 | 70 | pub mod scope { 71 | use super::private::Scope; 72 | 73 | #[repr(transparent)] 74 | #[derive(Debug, Clone, Copy)] 75 | pub struct Root; 76 | impl Scope for Root {} 77 | 78 | #[repr(transparent)] 79 | #[derive(Debug, Clone, Copy)] 80 | pub struct User; 81 | impl Scope for User {} 82 | 83 | #[repr(transparent)] 84 | #[derive(Debug, Clone, Copy)] 85 | pub struct Repository; 86 | impl Scope for Repository {} 87 | 88 | #[repr(transparent)] 89 | #[derive(Debug, Clone, Copy)] 90 | pub struct Tag; 91 | impl Scope for Tag {} 92 | 93 | #[repr(transparent)] 94 | #[derive(Debug, Clone, Copy)] 95 | pub struct Node; 96 | impl Scope for Node {} 97 | 98 | #[repr(transparent)] 99 | #[derive(Debug, Clone, Copy)] 100 | pub struct Unknown; 101 | impl Scope for Unknown {} 102 | } 103 | 104 | #[derive(Clone, Debug)] 105 | pub struct Client { 106 | inner: ureq::Agent, 107 | root: Url, 108 | token: Option, 109 | scope: PhantomData, 110 | } 111 | 112 | impl Client { 113 | pub fn builder(url: Url) -> ClientBuilder { 114 | ClientBuilder::new(url) 115 | } 116 | 117 | pub fn new_scoped(url: Url) -> Result { 118 | Self::builder(url).build_scoped() 119 | } 120 | 121 | fn url(&self, path: &str) -> Result { 122 | format!("{}{path}", self.root) 123 | .parse() 124 | .context("failed to construct URL") 125 | } 126 | } 127 | 128 | impl Client { 129 | pub fn new(url: Url) -> Result { 130 | Self::builder(url).build() 131 | } 132 | 133 | pub fn user(&self, UserContext { name }: &UserContext) -> User<'_, scope::Root> { 134 | User::new(Entity::new(self), name) 135 | } 136 | 137 | pub fn repository<'a>( 138 | &'a self, 139 | RepositoryContext { owner, name }: &'a RepositoryContext, 140 | ) -> Repository<'a, scope::Root> { 141 | self.user(owner).repository(name) 142 | } 143 | 144 | pub fn tag<'a>( 145 | &'a self, 146 | TagContext { repository, name }: &'a TagContext, 147 | ) -> Tag<'a, scope::Root> { 148 | self.repository(repository).tag(name) 149 | } 150 | 151 | pub fn tree<'a>(&'a self, TreeContext { tag, path }: &'a TreeContext) -> Node<'a, scope::Root> { 152 | self.tag(tag).path(path) 153 | } 154 | } 155 | 156 | #[derive(Debug)] 157 | pub struct ClientBuilder { 158 | url: Url, 159 | credentials: Option<(Vec>, PrivateKeyDer<'static>)>, 160 | roots: Option, 161 | token: Option, 162 | user_agent: Option, 163 | scope: PhantomData, 164 | } 165 | 166 | impl Clone for ClientBuilder { 167 | fn clone(&self) -> Self { 168 | let credentials = if let Some((creds, key)) = self.credentials.as_ref() { 169 | Some((creds.clone(), key.clone_key())) 170 | } else { 171 | None 172 | }; 173 | 174 | Self { 175 | url: self.url.clone(), 176 | credentials, 177 | roots: self.roots.clone(), 178 | token: self.token.clone(), 179 | user_agent: self.user_agent.clone(), 180 | scope: self.scope, 181 | } 182 | } 183 | } 184 | 185 | impl ClientBuilder { 186 | pub fn new(url: Url) -> Self { 187 | Self { 188 | url, 189 | credentials: None, 190 | roots: None, 191 | token: None, 192 | user_agent: None, 193 | scope: PhantomData, 194 | } 195 | } 196 | 197 | pub fn user_agent(self, user_agent: impl Into) -> Self { 198 | Self { 199 | user_agent: Some(user_agent.into()), 200 | ..self 201 | } 202 | } 203 | 204 | pub fn credentials( 205 | self, 206 | cert: Vec>, 207 | key: PrivateKeyDer<'static>, 208 | ) -> Self { 209 | Self { 210 | credentials: Some((cert, key)), 211 | ..self 212 | } 213 | } 214 | 215 | pub fn roots(self, roots: RootCertStore) -> Self { 216 | Self { 217 | roots: Some(roots), 218 | ..self 219 | } 220 | } 221 | 222 | pub fn token(self, token: impl Into) -> Self { 223 | Self { 224 | token: Some(token.into()), 225 | ..self 226 | } 227 | } 228 | 229 | pub fn build_scoped(self) -> Result> { 230 | let tls = rustls::ClientConfig::builder().with_root_certificates( 231 | if let Some(roots) = self.roots { 232 | roots 233 | } else { 234 | RootCertStore { 235 | roots: webpki_roots::TLS_SERVER_ROOTS.to_vec(), 236 | } 237 | }, 238 | ); 239 | let tls = if let Some((cert, key)) = self.credentials { 240 | tls.with_client_auth_cert(cert, key)? 241 | } else { 242 | tls.with_no_client_auth() 243 | }; 244 | 245 | let user_agent = self.user_agent.unwrap_or_else(|| { 246 | format!("{}/{}", env!("CARGO_CRATE_NAME"), env!("CARGO_PKG_VERSION")) 247 | }); 248 | 249 | Ok(Client { 250 | inner: ureq::AgentBuilder::new() 251 | .tls_config(Arc::new(tls)) 252 | .user_agent(&user_agent) 253 | .build(), 254 | root: self.url, 255 | token: self.token, 256 | scope: self.scope, 257 | }) 258 | } 259 | } 260 | 261 | impl ClientBuilder { 262 | pub fn build(self) -> Result> { 263 | let url = self 264 | .url 265 | .join(&format!("api/v{API_VERSION}")) 266 | .context("failed to construct URL")?; 267 | Self { url, ..self }.build_scoped() 268 | } 269 | } 270 | -------------------------------------------------------------------------------- /crates/client/src/repo.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{scope, Entity, Result, Scope, Tag}; 5 | 6 | use std::ops::Deref; 7 | 8 | use drawbridge_type::{RepositoryConfig, RepositoryName, TagName}; 9 | 10 | use mime::APPLICATION_JSON; 11 | 12 | #[derive(Clone, Debug)] 13 | pub struct Repository<'a, S: Scope>(Entity<'a, S, scope::Repository>); 14 | 15 | impl<'a, S: Scope> Deref for Repository<'a, S> { 16 | type Target = Entity<'a, S, scope::Repository>; 17 | 18 | fn deref(&self) -> &Self::Target { 19 | &self.0 20 | } 21 | } 22 | 23 | impl<'a, S: Scope> Repository<'a, S> { 24 | pub fn new(entity: Entity<'a, S, scope::User>, name: &RepositoryName) -> Repository<'a, S> { 25 | Repository(entity.child(name.as_ref())) 26 | } 27 | 28 | pub fn create(&self, conf: &RepositoryConfig) -> Result { 29 | self.0.create_json(&APPLICATION_JSON, conf) 30 | } 31 | 32 | pub fn get(&self) -> Result { 33 | // TODO: Use a reasonable byte limit 34 | self.0.get_json(u64::MAX).map(|(_, v)| v) 35 | } 36 | 37 | pub fn tags(&self) -> Result> { 38 | self.0 39 | .child::("_tag") 40 | .get_json(u64::MAX) 41 | .map(|(_, v)| v) 42 | } 43 | 44 | pub fn tag(&self, name: &TagName) -> Tag<'a, S> { 45 | Tag::new(self.child("_tag"), name) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /crates/client/src/tag.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{scope, Entity, Node, Result, Scope}; 5 | 6 | use std::collections::BTreeMap; 7 | use std::io::Seek; 8 | use std::ops::Deref; 9 | use std::path::Path; 10 | 11 | use drawbridge_jose::jws::Jws; 12 | use drawbridge_jose::MediaTyped; 13 | use drawbridge_type::TreeContent::{Directory, File}; 14 | use drawbridge_type::{TagEntry, TagName, Tree, TreeEntry, TreePath}; 15 | 16 | use anyhow::Context; 17 | use ureq::serde::Serialize; 18 | 19 | #[derive(Clone, Debug)] 20 | pub struct Tag<'a, S: Scope>(Entity<'a, S, scope::Tag>); 21 | 22 | impl<'a, S: Scope> Deref for Tag<'a, S> { 23 | type Target = Entity<'a, S, scope::Tag>; 24 | 25 | fn deref(&self) -> &Self::Target { 26 | &self.0 27 | } 28 | } 29 | 30 | impl<'a, S: Scope> Tag<'a, S> { 31 | pub fn new(entity: Entity<'a, S, scope::Repository>, name: &TagName) -> Self { 32 | Tag(entity.child(&name.to_string())) 33 | } 34 | 35 | pub fn create(&self, entry: &TagEntry) -> Result { 36 | let mime = match entry { 37 | TagEntry::Unsigned(..) => TreeEntry::<()>::TYPE, 38 | TagEntry::Signed(..) => Jws::TYPE, 39 | } 40 | .parse() 41 | .expect("failed to parse tag entry media type"); 42 | self.0.create_json(&mime, entry) 43 | } 44 | 45 | // TODO: Support signed tags 46 | pub fn create_from_path_unsigned( 47 | &self, 48 | path: impl AsRef, 49 | ) -> Result<(bool, BTreeMap)> { 50 | let tree = Tree::from_path_sync(path)?; 51 | let tag_created = self.create(&TagEntry::Unsigned(tree.root()))?; 52 | let tree_created = tree 53 | .into_iter() 54 | .map(|(path, TreeEntry { meta, content, .. })| { 55 | let node = Node::new(self.child("tree"), &path); 56 | let created = match content { 57 | File(mut file) => { 58 | file.rewind().context("failed to rewind file")?; 59 | node.create_from(&meta, file)? 60 | } 61 | Directory(buf) => node.create_from(&meta, buf.as_slice())?, 62 | }; 63 | Ok((path, created)) 64 | }) 65 | .collect::>()?; 66 | Ok((tag_created, tree_created)) 67 | } 68 | 69 | pub fn get(&self) -> Result { 70 | // TODO: Validate MIME type 71 | // TODO: Use a reasonable byte limit 72 | self.0.get_json(u64::MAX).map(|(_, v)| v) 73 | } 74 | 75 | pub fn path(&self, path: &TreePath) -> Node<'a, S> { 76 | Node::new(self.child("tree"), path) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /crates/client/src/tree.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{scope, Entity, Result, Scope}; 5 | 6 | use std::io::Read; 7 | use std::ops::Deref; 8 | 9 | use drawbridge_type::{Meta, TreeDirectory, TreeEntry, TreePath}; 10 | 11 | use mime::Mime; 12 | use ureq::serde::Serialize; 13 | 14 | #[derive(Clone, Debug)] 15 | pub struct Node<'a, S: Scope>(Entity<'a, S, scope::Node>); 16 | 17 | impl<'a, S: Scope> Deref for Node<'a, S> { 18 | type Target = Entity<'a, S, scope::Node>; 19 | 20 | fn deref(&self) -> &Self::Target { 21 | &self.0 22 | } 23 | } 24 | 25 | impl<'a, S: Scope> Node<'a, S> { 26 | pub fn new(entity: Entity<'a, S, scope::Node>, path: &TreePath) -> Self { 27 | if path.is_empty() { 28 | Self(entity) 29 | } else { 30 | Self(entity.child(&path.to_string())) 31 | } 32 | } 33 | 34 | pub fn create_bytes(&self, mime: &Mime, data: impl AsRef<[u8]>) -> Result { 35 | self.0.create_bytes(mime, data) 36 | } 37 | 38 | pub fn create_json(&self, mime: &Mime, val: &impl Serialize) -> Result { 39 | self.0.create_json(mime, val) 40 | } 41 | 42 | pub fn create_from(&self, meta: &Meta, rdr: impl Read) -> Result { 43 | self.0.create_from(meta, rdr) 44 | } 45 | 46 | pub fn create_directory(&self, dir: &TreeDirectory>) -> Result { 47 | let mime = TreeDirectory::::TYPE 48 | .parse() 49 | .expect("failed to parse tree directory media type"); 50 | self.create_json(&mime, dir) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /crates/client/src/user.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{scope, Entity, Repository, Result, Scope}; 5 | 6 | use std::ops::Deref; 7 | 8 | use drawbridge_type::{RepositoryName, UserName, UserRecord}; 9 | 10 | use mime::APPLICATION_JSON; 11 | 12 | #[derive(Clone, Debug)] 13 | #[repr(transparent)] 14 | pub struct User<'a, S: Scope>(Entity<'a, S, scope::User>); 15 | 16 | impl<'a, S: Scope> Deref for User<'a, S> { 17 | type Target = Entity<'a, S, scope::User>; 18 | 19 | fn deref(&self) -> &Self::Target { 20 | &self.0 21 | } 22 | } 23 | 24 | impl<'a, S: Scope> User<'a, S> { 25 | pub fn new(entity: Entity<'a, S, scope::Root>, name: &UserName) -> Self { 26 | User(entity.child(&name.to_string())) 27 | } 28 | 29 | pub fn create(&self, conf: &UserRecord) -> Result { 30 | self.0.create_json(&APPLICATION_JSON, conf) 31 | } 32 | 33 | pub fn get(&self) -> Result { 34 | // TODO: Use a reasonable byte limit 35 | self.0.get_json(u64::MAX).map(|(_, v)| v) 36 | } 37 | 38 | pub fn repository(&self, name: &RepositoryName) -> Repository<'a, S> { 39 | Repository::new(self.0.clone(), name) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /crates/jose/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "drawbridge-jose" 3 | version = "0.4.3" 4 | authors = ["Profian Inc", "The Enarx Project Developers"] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | homepage = "https://github.com/enarx/drawbridge" 8 | repository = "https://github.com/enarx/drawbridge" 9 | description = "JOSE implementation" 10 | keywords = ["drawbridge"] 11 | 12 | [dependencies] 13 | # Internal dependencies 14 | drawbridge-byte = { workspace = true, features = ["serde"] } 15 | 16 | # External dependencies 17 | mediatype = { workspace = true, features = ["serde"] } 18 | serde = { workspace = true, features = ["derive"] } 19 | serde_json = { workspace = true, features = ["std"] } 20 | url = { workspace = true, features = ["serde"] } 21 | zeroize = { workspace = true, features = ["alloc"] } 22 | -------------------------------------------------------------------------------- /crates/jose/src/b64.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use std::ops::{Deref, DerefMut}; 5 | 6 | use drawbridge_byte::UrlSafeNoPad; 7 | 8 | use serde::de::DeserializeOwned; 9 | use serde::{ser::Error as _, Deserialize, Serialize}; 10 | 11 | pub type Bytes, C = UrlSafeNoPad> = drawbridge_byte::Bytes; 12 | 13 | #[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] 14 | pub struct Json(pub T); 15 | 16 | impl From for Json { 17 | fn from(value: T) -> Self { 18 | Self(value) 19 | } 20 | } 21 | 22 | impl Deref for Json { 23 | type Target = T; 24 | 25 | fn deref(&self) -> &Self::Target { 26 | &self.0 27 | } 28 | } 29 | 30 | impl DerefMut for Json { 31 | fn deref_mut(&mut self) -> &mut Self::Target { 32 | &mut self.0 33 | } 34 | } 35 | 36 | impl Serialize for Json { 37 | fn serialize(&self, serializer: S) -> Result { 38 | let buf = serde_json::to_vec(self).map_err(|_| S::Error::custom("encoding error"))?; 39 | Bytes::<_, UrlSafeNoPad>::from(buf).serialize(serializer) 40 | } 41 | } 42 | 43 | impl<'de, T: DeserializeOwned> Deserialize<'de> for Json { 44 | fn deserialize>(deserializer: D) -> Result { 45 | let buf = Bytes::>::deserialize(deserializer)?; 46 | let val = serde_json::from_slice(&buf).unwrap(); 47 | Ok(Self(val)) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /crates/jose/src/jws.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use crate::b64::{Bytes, Json}; 5 | use crate::{MediaTyped, Thumbprint}; 6 | 7 | use mediatype::MediaTypeBuf; 8 | use serde::{de::DeserializeOwned, Deserialize, Serialize}; 9 | use url::Url; 10 | 11 | #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] 12 | pub struct Parameters { 13 | #[serde(skip_serializing_if = "Option::is_none", default)] 14 | pub alg: Option, 15 | 16 | #[serde(skip_serializing_if = "Option::is_none", default)] 17 | pub jku: Option, 18 | 19 | #[serde(skip_serializing_if = "Option::is_none", default)] 20 | pub jwk: Option>, 21 | 22 | #[serde(skip_serializing_if = "Option::is_none", default)] 23 | pub kid: Option, 24 | 25 | #[serde(skip_serializing_if = "Option::is_none", default)] 26 | pub x5u: Option, 27 | 28 | #[serde(skip_serializing_if = "Option::is_none", default)] 29 | pub x5c: Option>>>, // base64, not base64url 30 | 31 | #[serde(flatten)] 32 | pub x5t: Thumbprint, 33 | 34 | #[serde(skip_serializing_if = "Option::is_none", default)] 35 | pub typ: Option, 36 | 37 | #[serde(skip_serializing_if = "Option::is_none", default)] 38 | pub cty: Option, 39 | 40 | #[serde(skip_serializing_if = "Option::is_none", default)] 41 | pub crit: Option>, 42 | } 43 | 44 | impl MediaTyped for Jws { 45 | const TYPE: &'static str = "application/jose+json"; 46 | } 47 | 48 | #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] 49 | #[serde(bound(deserialize = "P: DeserializeOwned, H: Deserialize<'de>"))] 50 | #[serde(untagged)] 51 | pub enum Jws

{ 52 | General(General), 53 | Flattened(Flattened), 54 | } 55 | 56 | #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] 57 | #[serde(bound(deserialize = "P: DeserializeOwned, H: Deserialize<'de>"))] 58 | pub struct General

{ 59 | pub payload: Option, 60 | pub signatures: Vec>, 61 | } 62 | 63 | #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] 64 | #[serde(bound(deserialize = "P: DeserializeOwned, H: Deserialize<'de>"))] 65 | pub struct Flattened

{ 66 | pub payload: Option, 67 | 68 | #[serde(flatten)] 69 | pub signature: Signature, 70 | } 71 | 72 | #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] 73 | #[serde(bound(deserialize = "P: DeserializeOwned, H: Deserialize<'de>"))] 74 | pub struct Signature

{ 75 | pub protected: Option>, 76 | pub header: Option, 77 | pub signature: Bytes, 78 | } 79 | 80 | #[cfg(test)] 81 | mod tests { 82 | use super::*; 83 | use serde_json::json; 84 | 85 | // Example from RFC 7515 A.6.4 86 | #[test] 87 | fn general() { 88 | let payload = "eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ"; 89 | let signature0 = "cC4hiUPoj9Eetdgtv3hF80EGrhuB__dzERat0XF9g2VtQgr9PJbu3XOiZj5RZmh7AAuHIm4Bh-0Qc_lF5YKt_O8W2Fp5jujGbds9uJdbF9CUAr7t1dnZcAcQjbKBYNX4BAynRFdiuB--f_nZLgrnbyTyWzO75vRK5h6xBArLIARNPvkSjtQBMHlb1L07Qe7K0GarZRmB_eSN9383LcOLn6_dO--xi12jzDwusC-eOkHWEsqtFZESc6BfI7noOPqvhJ1phCnvWh6IeYI2w9QOYEUipUTI8np6LbgGY9Fs98rqVt5AXLIhWkWywlVmtVrBp0igcN_IoypGlUPQGe77Rw"; 90 | let signature1 = "DtEhU3ljbEg8L38VWAfUAqOyKAM6-Xx-F4GawxaepmXFCgfTjDxw5djxLa8ISlSApmWQxfKTUJqPP3-Kg6NU1Q"; 91 | 92 | let raw = json!({ 93 | "payload": payload, 94 | "signatures": [ 95 | { 96 | "protected":"eyJhbGciOiJSUzI1NiJ9", 97 | "header": { "kid": "2010-12-29" }, 98 | "signature": signature0, 99 | }, 100 | { 101 | "protected":"eyJhbGciOiJFUzI1NiJ9", 102 | "header": { "kid":"e9bc097a-ce51-4036-9562-d2ade882db0d" }, 103 | "signature": signature1, 104 | } 105 | ] 106 | }); 107 | 108 | let sig0 = Signature { 109 | header: Some(Parameters { 110 | kid: Some("2010-12-29".to_string()), 111 | ..Default::default() 112 | }), 113 | protected: Some(Json(Parameters { 114 | alg: Some("RS256".to_string()), 115 | ..Default::default() 116 | })), 117 | signature: signature0.parse().unwrap(), 118 | }; 119 | 120 | let sig1 = Signature { 121 | header: Some(Parameters { 122 | kid: Some("e9bc097a-ce51-4036-9562-d2ade882db0d".to_string()), 123 | ..Default::default() 124 | }), 125 | protected: Some(Json(Parameters { 126 | alg: Some("ES256".to_string()), 127 | ..Default::default() 128 | })), 129 | signature: signature1.parse().unwrap(), 130 | }; 131 | 132 | let exp = Jws::General(General { 133 | payload: Some(payload.parse().unwrap()), 134 | signatures: vec![sig0, sig1], 135 | }); 136 | 137 | assert_eq!(exp, serde_json::from_value(raw).unwrap()); 138 | } 139 | 140 | // Example from RFC 7515 A.7 141 | #[test] 142 | fn flattened() { 143 | let payload = "eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ"; 144 | let signature = "DtEhU3ljbEg8L38VWAfUAqOyKAM6-Xx-F4GawxaepmXFCgfTjDxw5djxLa8ISlSApmWQxfKTUJqPP3-Kg6NU1Q"; 145 | 146 | let raw = json!({ 147 | "payload": payload, 148 | "protected": "eyJhbGciOiJFUzI1NiJ9", 149 | "header": { "kid": "e9bc097a-ce51-4036-9562-d2ade882db0d" }, 150 | "signature": signature, 151 | }); 152 | 153 | let exp = Jws::Flattened(Flattened { 154 | payload: Some(payload.parse().unwrap()), 155 | signature: Signature { 156 | header: Some(Parameters { 157 | kid: Some("e9bc097a-ce51-4036-9562-d2ade882db0d".to_string()), 158 | ..Default::default() 159 | }), 160 | protected: Some(Json(Parameters { 161 | alg: Some("ES256".to_string()), 162 | ..Default::default() 163 | })), 164 | signature: signature.parse().unwrap(), 165 | }, 166 | }); 167 | 168 | assert_eq!(exp, serde_json::from_value(raw).unwrap()); 169 | } 170 | 171 | // Example from RFC 7515 A.7 172 | #[test] 173 | fn detached() { 174 | let signature = "DtEhU3ljbEg8L38VWAfUAqOyKAM6-Xx-F4GawxaepmXFCgfTjDxw5djxLa8ISlSApmWQxfKTUJqPP3-Kg6NU1Q"; 175 | 176 | let raw = json!({ 177 | "protected": "eyJhbGciOiJFUzI1NiJ9", 178 | "header": { "kid": "e9bc097a-ce51-4036-9562-d2ade882db0d" }, 179 | "signature": signature, 180 | }); 181 | 182 | let exp = Jws::Flattened(Flattened { 183 | payload: None, 184 | signature: Signature { 185 | header: Some(Parameters { 186 | kid: Some("e9bc097a-ce51-4036-9562-d2ade882db0d".to_string()), 187 | ..Default::default() 188 | }), 189 | protected: Some(Json(Parameters { 190 | alg: Some("ES256".to_string()), 191 | ..Default::default() 192 | })), 193 | signature: signature.parse().unwrap(), 194 | }, 195 | }); 196 | 197 | assert_eq!(exp, serde_json::from_value(raw).unwrap()); 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /crates/jose/src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | #![forbid(unsafe_code, clippy::expect_used, clippy::panic)] 5 | #![deny( 6 | clippy::all, 7 | absolute_paths_not_starting_with_crate, 8 | deprecated_in_future, 9 | missing_copy_implementations, 10 | missing_debug_implementations, 11 | noop_method_call, 12 | rust_2018_compatibility, 13 | rust_2018_idioms, 14 | rust_2021_compatibility, 15 | single_use_lifetimes, 16 | trivial_bounds, 17 | trivial_casts, 18 | trivial_numeric_casts, 19 | unreachable_code, 20 | unreachable_patterns, 21 | unreachable_pub, 22 | unstable_features, 23 | unused, 24 | unused_crate_dependencies, 25 | unused_import_braces, 26 | unused_lifetimes, 27 | unused_qualifications, 28 | unused_results, 29 | variant_size_differences 30 | )] 31 | 32 | pub mod b64; 33 | pub mod jwk; 34 | pub mod jws; 35 | 36 | use b64::Bytes; 37 | use serde::{Deserialize, Serialize}; 38 | 39 | pub trait MediaTyped { 40 | const TYPE: &'static str; 41 | } 42 | 43 | #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] 44 | pub struct Thumbprint { 45 | #[serde(skip_serializing_if = "Option::is_none", default, rename = "x5t")] 46 | s1: Option, 47 | 48 | #[serde(skip_serializing_if = "Option::is_none", default, rename = "x5t#S256")] 49 | s256: Option, 50 | } 51 | -------------------------------------------------------------------------------- /crates/server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "drawbridge-server" 3 | version = "0.4.3" 4 | authors = ["Profian Inc", "The Enarx Project Developers"] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | homepage = "https://github.com/enarx/drawbridge" 8 | repository = "https://github.com/enarx/drawbridge" 9 | description = "Drawbridge server application." 10 | keywords = ["drawbridge"] 11 | 12 | [dependencies] 13 | # Internal dependencies 14 | drawbridge-jose = { workspace = true } 15 | drawbridge-type = { workspace = true, features = ["server"] } 16 | 17 | # External dependencies 18 | anyhow = { workspace = true, features = ["std"] } 19 | async-std = { workspace = true } 20 | axum = { workspace = true, features = ["json"] } 21 | camino = { workspace = true } 22 | cap-async-std = { workspace = true, features = ["fs_utf8"] } 23 | futures = { workspace = true, features = ["async-await"] } 24 | futures-rustls = { workspace = true } 25 | hyper = { workspace = true, features = ["http1", "server"] } 26 | jsonwebtoken = { workspace = true } 27 | mime = { workspace = true } 28 | once_cell = { workspace = true } 29 | openidconnect = { workspace = true, features = ["ureq"] } 30 | rustls = { workspace = true, features = ["ring"] } 31 | rustls-pemfile = { workspace = true, features = ["std"] } 32 | rustls-pki-types = { workspace = true } 33 | semver = { workspace = true } 34 | serde = { workspace = true } 35 | serde_json = { workspace = true, features = ["std"] } 36 | tokio-util = { workspace = true, features = ["compat"] } 37 | tower = { workspace = true } 38 | tower-http = { workspace = true, features = ["trace"] } 39 | tracing = { workspace = true } 40 | uuid = { workspace = true } 41 | -------------------------------------------------------------------------------- /crates/server/src/auth/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | mod oidc; 4 | mod tls; 5 | 6 | pub use oidc::{Claims as OidcClaims, ScopeContext, ScopeLevel, Verifier as OidcVerifier}; 7 | pub use tls::{Config as TlsConfig, TrustedCertificate}; 8 | 9 | use super::{Repository, Store, User}; 10 | 11 | use drawbridge_type::RepositoryContext; 12 | 13 | use axum::body::Body; 14 | use axum::extract::RequestParts; 15 | use axum::http::Request; 16 | use axum::response::IntoResponse; 17 | 18 | pub async fn assert_repository_read<'a>( 19 | store: &'a Store, 20 | cx: &'a RepositoryContext, 21 | req: Request, 22 | ) -> Result<(Repository<'a>, Option>), impl IntoResponse> { 23 | let repo = store.repository(cx); 24 | if repo 25 | .is_public() 26 | .await 27 | .map_err(IntoResponse::into_response)? 28 | { 29 | Ok((repo, None)) 30 | } else { 31 | RequestParts::new(req) 32 | .extract::() 33 | .await? 34 | .assert_user(store, &cx.owner, ScopeContext::Repository, ScopeLevel::Read) 35 | .await 36 | .map_err(IntoResponse::into_response) 37 | .map(|user| (repo, Some(user))) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /crates/server/src/auth/oidc.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{GetError, OidcConfig, Store, User}; 5 | 6 | use drawbridge_type::{UserContext, UserRecord}; 7 | 8 | use std::collections::{HashMap, HashSet}; 9 | use std::sync::Arc; 10 | 11 | use anyhow::{anyhow, bail, Context}; 12 | use axum::extract::rejection::{TypedHeaderRejection, TypedHeaderRejectionReason}; 13 | use axum::extract::{Extension, FromRequest, RequestParts}; 14 | use axum::headers::authorization::Bearer; 15 | use axum::headers::Authorization; 16 | use axum::http::StatusCode; 17 | use axum::response::{IntoResponse, Response}; 18 | use axum::{async_trait, TypedHeader}; 19 | use jsonwebtoken::jwk::{AlgorithmParameters, JwkSet}; 20 | use jsonwebtoken::{decode, decode_header, Algorithm, DecodingKey, Validation}; 21 | use openidconnect::core::CoreProviderMetadata; 22 | use openidconnect::ureq::http_client; 23 | use openidconnect::IssuerUrl; 24 | use serde::{Deserialize, Deserializer}; 25 | use tracing::{error, info, trace, warn}; 26 | 27 | pub struct Verifier { 28 | keyset: HashMap, 29 | validator: Validation, 30 | } 31 | 32 | impl std::fmt::Debug for Verifier { 33 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 34 | f.debug_struct("Verifier") 35 | .field("validator", &self.validator) 36 | .finish() 37 | } 38 | } 39 | 40 | #[derive(Clone, Debug, Deserialize)] 41 | struct VerifiedInfo { 42 | #[serde(rename = "sub")] 43 | subject: String, 44 | #[serde(rename = "scope", deserialize_with = "deserialize_scopes")] 45 | scopes: HashSet, 46 | } 47 | 48 | #[allow(single_use_lifetimes)] 49 | fn deserialize_scopes<'de, D>(deserializer: D) -> Result, D::Error> 50 | where 51 | D: Deserializer<'de>, 52 | { 53 | let s: &str = Deserialize::deserialize(deserializer)?; 54 | Ok(HashSet::from_iter(s.split(' ').map(|s| s.to_owned()))) 55 | } 56 | 57 | impl Verifier { 58 | pub fn new(config: OidcConfig) -> Result { 59 | let mut validator = Validation::new(Algorithm::RS256); 60 | validator.set_audience(&[config.audience]); 61 | validator.set_issuer(&[config.issuer.as_str()]); 62 | validator.set_required_spec_claims(&["exp", "iat", "scope", "aud"]); 63 | validator.validate_exp = true; 64 | 65 | let oidc_md = 66 | CoreProviderMetadata::discover(&IssuerUrl::from_url(config.issuer), http_client) 67 | .context("failed to discover provider metadata")?; 68 | let jwks = oidc_md.jwks(); 69 | let jwks = serde_json::to_string(&jwks).context("failed to serialize jwks")?; 70 | let keyset: JwkSet = serde_json::from_str(&jwks).context("failed to parse jwks")?; 71 | let keyset = keyset 72 | .keys 73 | .into_iter() 74 | .map(|jwk| { 75 | let kid = jwk.common.key_id.ok_or_else(|| anyhow!("missing kid"))?; 76 | let key = match jwk.algorithm { 77 | AlgorithmParameters::RSA(ref rsa) => { 78 | DecodingKey::from_rsa_components(&rsa.n, &rsa.e) 79 | .context("Error creating DecodingKey") 80 | } 81 | _ => bail!("Unsupported algorithm encountered: {:?}", jwk.algorithm), 82 | }?; 83 | Ok((kid, key)) 84 | }) 85 | .collect::, anyhow::Error>>() 86 | .context("failed to parse jwks")?; 87 | 88 | Ok(Self { keyset, validator }) 89 | } 90 | 91 | fn verify_token(&self, token: &str) -> Result { 92 | let header = decode_header(token).context("Error decoding header")?; 93 | let kid = match header.kid { 94 | Some(k) => k, 95 | None => bail!("Token doesn't have a `kid` header field"), 96 | }; 97 | let key = self 98 | .keyset 99 | .get(&kid) 100 | .ok_or_else(|| anyhow!("No key found for kid: {}", kid))?; 101 | let decoded_token = 102 | decode::(token, key, &self.validator).context("Error decoding token")?; 103 | Ok(decoded_token.claims) 104 | } 105 | } 106 | 107 | #[derive(Debug, Clone, Copy)] 108 | pub enum ScopeContext { 109 | User, 110 | Repository, 111 | Tag, 112 | } 113 | 114 | impl std::fmt::Display for ScopeContext { 115 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 116 | match self { 117 | ScopeContext::User => write!(f, "drawbridge_users"), 118 | ScopeContext::Repository => write!(f, "drawbridge_repositories"), 119 | ScopeContext::Tag => write!(f, "drawbridge_tags"), 120 | } 121 | } 122 | } 123 | 124 | #[derive(Debug, Clone, Copy)] 125 | pub enum ScopeLevel { 126 | Read, 127 | Write, 128 | } 129 | 130 | impl std::fmt::Display for ScopeLevel { 131 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 132 | match self { 133 | ScopeLevel::Read => write!(f, "read"), 134 | ScopeLevel::Write => write!(f, "write"), 135 | } 136 | } 137 | } 138 | 139 | impl ScopeLevel { 140 | fn sufficient_levels(&self) -> &[&str] { 141 | match self { 142 | ScopeLevel::Read => &["read", "manage"], 143 | ScopeLevel::Write => &["write", "manage"], 144 | } 145 | } 146 | } 147 | 148 | #[repr(transparent)] 149 | #[derive(Clone, Debug)] 150 | pub struct Claims(VerifiedInfo); 151 | 152 | impl Claims { 153 | pub fn subject(&self) -> &str { 154 | &self.0.subject 155 | } 156 | 157 | fn check_scope( 158 | &self, 159 | context: ScopeContext, 160 | level: ScopeLevel, 161 | ) -> Result<(), (StatusCode, String)> { 162 | for level in level.sufficient_levels() { 163 | let scope = format!("{level}:{context}"); 164 | if self.0.scopes.contains(&scope) { 165 | return Ok(()); 166 | } 167 | } 168 | Err(( 169 | StatusCode::UNAUTHORIZED, 170 | format!("Token is missing a scope for level {level}, context {context}"), 171 | )) 172 | } 173 | 174 | /// Asserts that the token has a scope that satisfies the given context and level. 175 | #[allow(clippy::result_large_err)] 176 | pub fn assert_scope( 177 | &self, 178 | context: ScopeContext, 179 | level: ScopeLevel, 180 | ) -> Result<(), impl IntoResponse> { 181 | self.check_scope(context, level) 182 | .map_err(|e| e.into_response()) 183 | } 184 | 185 | /// Assert that the client is the user identified by `cx`, and that the token has a scope that 186 | /// satisfies the given context and level. 187 | pub async fn assert_user<'a>( 188 | &self, 189 | store: &'a Store, 190 | cx: &UserContext, 191 | scope_context: ScopeContext, 192 | scope_level: ScopeLevel, 193 | ) -> Result, impl IntoResponse> { 194 | let subj = self.subject(); 195 | let oidc_record = UserRecord { 196 | subject: subj.to_string(), 197 | }; 198 | 199 | let user = store.user(cx); 200 | let owner_record: UserRecord = user.get_content_json().await.map_err(|e|{ 201 | match e { 202 | GetError::NotFound => (StatusCode::UNAUTHORIZED, format!("User `{cx}` not found")).into_response(), 203 | _ => { 204 | warn!(target: "app::auth::oidc", ?oidc_record, error = ?e, "failed to get user by OpenID Connect subject"); 205 | e.into_response() 206 | }, 207 | }})?; 208 | 209 | if oidc_record != owner_record { 210 | warn!(target: "app::auth::oidc", ?oidc_record, user = ?cx, ?owner_record, "User access not authorized"); 211 | return Err(( 212 | StatusCode::UNAUTHORIZED, 213 | format!("You are logged in as `{subj}`, and not authorized for user `{cx}`"), 214 | ) 215 | .into_response()); 216 | } 217 | 218 | self.check_scope(scope_context, scope_level) 219 | .map_err(|e| e.into_response())?; 220 | 221 | Ok(user) 222 | } 223 | } 224 | 225 | #[async_trait] 226 | impl FromRequest for Claims { 227 | type Rejection = Response; 228 | 229 | async fn from_request(req: &mut RequestParts) -> Result { 230 | let TypedHeader(Authorization::(token)) = 231 | req.extract() 232 | .await 233 | .map_err(|e: TypedHeaderRejection| match e.reason() { 234 | TypedHeaderRejectionReason::Missing => { 235 | (StatusCode::UNAUTHORIZED, "Bearer token header missing").into_response() 236 | } 237 | _ => e.into_response(), 238 | })?; 239 | warn!(target: "app::auth::oidc", ?token, "got token"); 240 | 241 | let Extension(verifier) = req 242 | .extract::>>() 243 | .await 244 | .map_err(|e| { 245 | error!(target: "app::auth::oidc", "OpenID Connect verifier extension missing"); 246 | e.into_response() 247 | })?; 248 | 249 | trace!(target: "app:auth::oidc", "verifying token"); 250 | 251 | let claims = verifier 252 | .verify_token(token.token()) 253 | .map_err(|e| { 254 | error!(target: "app::auth::oidc", error = ?e, "failed to verify token"); 255 | (StatusCode::UNAUTHORIZED, "Invalid token provided").into_response() 256 | }) 257 | .map(Self); 258 | info!(target: "app::auth::oidc", ?claims, "verified token"); 259 | claims 260 | } 261 | } 262 | -------------------------------------------------------------------------------- /crates/server/src/auth/tls.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use std::io::BufRead; 5 | use std::ops::Deref; 6 | 7 | use anyhow::{bail, Context}; 8 | use rustls::server::WebPkiClientVerifier; 9 | use rustls::{RootCertStore, ServerConfig}; 10 | use rustls_pemfile::Item::{Pkcs1Key, Pkcs8Key, Sec1Key, X509Certificate}; 11 | use rustls_pki_types::CertificateDer; 12 | use rustls_pki_types::PrivateKeyDer; 13 | 14 | #[derive(Clone, Copy, Debug)] 15 | #[repr(transparent)] 16 | pub struct TrustedCertificate; 17 | 18 | #[repr(transparent)] 19 | #[allow(missing_debug_implementations)] 20 | #[derive(Clone)] 21 | pub struct Config(ServerConfig); 22 | 23 | impl Deref for Config { 24 | type Target = ServerConfig; 25 | 26 | fn deref(&self) -> &Self::Target { 27 | &self.0 28 | } 29 | } 30 | 31 | impl From for ServerConfig { 32 | fn from(conf: Config) -> Self { 33 | conf.0 34 | } 35 | } 36 | 37 | fn read_certificates(mut rd: impl BufRead) -> anyhow::Result>> { 38 | rustls_pemfile::read_all(&mut rd) 39 | .map(|item| match item? { 40 | X509Certificate(buf) => Ok(buf), 41 | _ => bail!("unsupported certificate type"), 42 | }) 43 | .collect() 44 | } 45 | 46 | impl Config { 47 | pub fn read( 48 | mut certs: impl BufRead, 49 | mut key: impl BufRead, 50 | mut cas: impl BufRead, 51 | ) -> anyhow::Result { 52 | let certs = 53 | read_certificates(&mut certs).context("failed to read server certificate chain")?; 54 | let key = { 55 | if let Some(key) = rustls_pemfile::read_all(&mut key).next() { 56 | match key? { 57 | Pkcs1Key(inner) => PrivateKeyDer::from(inner), 58 | Pkcs8Key(inner) => PrivateKeyDer::from(inner), 59 | Sec1Key(inner) => PrivateKeyDer::from(inner), 60 | _ => { 61 | bail!("Unexpected key type found"); 62 | } 63 | } 64 | } else { 65 | bail!("No key found") 66 | } 67 | }; 68 | 69 | let client_verifier = { 70 | let mut roots = RootCertStore::empty(); 71 | read_certificates(&mut cas) 72 | .context("failed to read CA certificates")? 73 | .into_iter() 74 | .try_for_each(|cert| roots.add(cert)) 75 | .context("failed to construct root certificate store")?; 76 | // TODO: Allow client certificates signed by unknown CAs. 77 | WebPkiClientVerifier::builder(roots.into()) 78 | .allow_unauthenticated() 79 | .build()? 80 | }; 81 | 82 | ServerConfig::builder() 83 | .with_client_cert_verifier(client_verifier) 84 | .with_single_cert(certs, key) 85 | .context("invalid server certificate key") 86 | .map(Self) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /crates/server/src/builder.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{handle, App, Store, TlsConfig}; 5 | use std::ops::Deref; 6 | 7 | use anyhow::{anyhow, Context}; 8 | use async_std::fs::File; 9 | use async_std::path::Path; 10 | use async_std::sync::Arc; 11 | use axum::handler::Handler; 12 | use axum::routing::any; 13 | use axum::{Extension, Router}; 14 | use cap_async_std::fs_utf8::Dir; 15 | use futures::lock::Mutex; 16 | use futures::TryFutureExt; 17 | use futures_rustls::TlsAcceptor; 18 | use openidconnect::url::Url; 19 | use tower_http::{ 20 | trace::{ 21 | DefaultOnBodyChunk, DefaultOnEos, DefaultOnFailure, DefaultOnRequest, DefaultOnResponse, 22 | TraceLayer, 23 | }, 24 | LatencyUnit, 25 | }; 26 | use tracing::Level; 27 | 28 | /// OpenID Connect client configuration. 29 | #[derive(Debug)] 30 | pub struct OidcConfig { 31 | pub audience: String, 32 | pub issuer: Url, 33 | } 34 | 35 | #[derive(Debug, Clone, Default)] 36 | struct SpanMaker; 37 | 38 | impl tower_http::trace::MakeSpan for SpanMaker { 39 | fn make_span(&mut self, request: &axum::http::request::Request) -> tracing::span::Span { 40 | let reqid = uuid::Uuid::new_v4(); 41 | tracing::span!( 42 | Level::INFO, 43 | "request", 44 | method = %request.method(), 45 | uri = %request.uri(), 46 | version = ?request.version(), 47 | headers = ?request.headers(), 48 | request_id = %reqid, 49 | ) 50 | } 51 | } 52 | 53 | /// [App] builder. 54 | pub struct Builder { 55 | store: S, 56 | tls: TlsConfig, 57 | oidc: OidcConfig, 58 | } 59 | 60 | impl std::fmt::Debug for Builder { 61 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 62 | f.debug_struct("Builder") 63 | .field("store", &self.store) 64 | .field("oidc", &self.oidc) 65 | .finish() 66 | } 67 | } 68 | 69 | impl> Builder { 70 | /// Constructs a new [Builder]. 71 | pub fn new(store: S, tls: TlsConfig, oidc: OidcConfig) -> Self { 72 | Self { store, tls, oidc } 73 | } 74 | 75 | /// Builds the application and returns Drawbridge instance as a [tower::MakeService]. 76 | pub async fn build(self) -> anyhow::Result { 77 | let Self { store, tls, oidc } = self; 78 | let store_path = store.as_ref(); 79 | let store = File::open(store_path) 80 | .and_then(|f| Store::new(Dir::from_std_file(f))) 81 | .await 82 | .context(anyhow!( 83 | "failed to open store at `{}`", 84 | store_path.to_string_lossy() 85 | ))?; 86 | 87 | let oidc_verifier = 88 | crate::auth::OidcVerifier::new(oidc).context("failed to create OIDC verifier")?; 89 | 90 | Ok(App { 91 | make_service: Mutex::new( 92 | Router::new() 93 | .fallback(handle.into_service()) 94 | .route("/health", any(|| async {})) 95 | .layer(Extension(Arc::new(store))) 96 | .layer(Extension(Arc::new(oidc_verifier))) 97 | .layer( 98 | TraceLayer::new_for_http() 99 | .make_span_with(SpanMaker) 100 | .on_request(DefaultOnRequest::new().level(Level::INFO)) 101 | .on_response( 102 | DefaultOnResponse::new() 103 | .level(Level::INFO) 104 | .latency_unit(LatencyUnit::Micros), 105 | ) 106 | .on_body_chunk(DefaultOnBodyChunk::new()) 107 | .on_eos( 108 | DefaultOnEos::new() 109 | .level(Level::INFO) 110 | .latency_unit(LatencyUnit::Micros), 111 | ) 112 | .on_failure( 113 | DefaultOnFailure::new() 114 | .level(Level::INFO) 115 | .latency_unit(LatencyUnit::Micros), 116 | ), 117 | ) 118 | .into_make_service(), 119 | ), 120 | tls: TlsAcceptor::from(Arc::new(tls.deref().clone())), 121 | }) 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /crates/server/src/handle.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{repos, tags, trees, users}; 5 | 6 | use drawbridge_type::{RepositoryName, TagName, TreePath, UserName}; 7 | 8 | use axum::body::Body; 9 | use axum::handler::Handler; 10 | use axum::http::{Method, Request, StatusCode}; 11 | use axum::response::IntoResponse; 12 | use once_cell::sync::Lazy; 13 | use tower::Service; 14 | use tracing::trace; 15 | 16 | /// Server API version 17 | pub(crate) static API_VERSION: Lazy = Lazy::new(|| { 18 | env!("CARGO_PKG_VERSION").parse().unwrap_or_else(|_| { 19 | panic!( 20 | "failed to parse CARGO_PKG_VERSION `{}`", 21 | env!("CARGO_PKG_VERSION") 22 | ) 23 | }) 24 | }); 25 | 26 | /// Parses the URI of `req` and routes it to respective component. 27 | pub(crate) async fn handle(mut req: Request) -> impl IntoResponse { 28 | #[inline] 29 | fn not_found(path: &str) -> (StatusCode, String) { 30 | (StatusCode::NOT_FOUND, format!("Route `/{path}` not found")) 31 | } 32 | 33 | trace!(target: "app::handle", "begin HTTP request handling {:?}", req); 34 | let path = req.uri().path().trim_start_matches('/'); 35 | let (ver, path) = path 36 | .strip_prefix("api") 37 | .ok_or_else(|| not_found(path))? 38 | .trim_start_matches('/') 39 | .strip_prefix('v') 40 | .ok_or_else(|| not_found(path))? 41 | .split_once('/') 42 | .ok_or_else(|| not_found(path))?; 43 | let ver = ver.parse::().map_err(|e| { 44 | ( 45 | StatusCode::BAD_REQUEST, 46 | format!("Failed to parse SemVer version from {path}: {e}"), 47 | ) 48 | })?; 49 | if ver > *API_VERSION 50 | && (ver.major > API_VERSION.major 51 | || API_VERSION.major == 0 && ver.minor > API_VERSION.minor) 52 | { 53 | return Err(( 54 | StatusCode::NOT_IMPLEMENTED, 55 | format!("Unsupported API version `{ver}`"), 56 | )); 57 | } 58 | let (head, tail) = path 59 | .trim_start_matches('/') 60 | .split_once("/_") 61 | .map(|(left, right)| (left.to_string(), format!("_{right}"))) 62 | .unwrap_or((path.to_string(), "".into())); 63 | if head.is_empty() { 64 | return Err(not_found(path)); 65 | } 66 | 67 | let extensions = req.extensions_mut(); 68 | 69 | let (user, head) = head.split_once('/').unwrap_or((&head, "")); 70 | let user = user.parse::().map_err(|e| { 71 | ( 72 | StatusCode::BAD_REQUEST, 73 | format!("Failed to parse user name: {e}"), 74 | ) 75 | })?; 76 | trace!(target: "app::handle", "parsed user name: `{user}`"); 77 | assert_eq!(extensions.insert(user), None, "duplicate user name"); 78 | if head.is_empty() { 79 | return match *req.method() { 80 | Method::HEAD => Ok(users::head.into_service().call(req).await.into_response()), 81 | Method::GET => Ok(users::get.into_service().call(req).await.into_response()), 82 | Method::PUT => Ok(users::put.into_service().call(req).await.into_response()), 83 | _ => Err(( 84 | StatusCode::METHOD_NOT_ALLOWED, 85 | "Method not allowed for user endpoint".into(), 86 | )), 87 | }; 88 | } 89 | 90 | let repo = head.parse::().map_err(|e| { 91 | ( 92 | StatusCode::BAD_REQUEST, 93 | format!("Failed to parse repository name: {e}"), 94 | ) 95 | })?; 96 | trace!(target: "app::handle", "parsed repository name: `{repo}`"); 97 | assert_eq!(extensions.insert(repo), None, "duplicate repository name"); 98 | 99 | let mut tail = tail.splitn(4, '/'); 100 | match (tail.next(), tail.next(), tail.next()) { 101 | (None | Some(""), None, None) => match *req.method() { 102 | Method::HEAD => Ok(repos::head.into_service().call(req).await.into_response()), 103 | Method::GET => Ok(repos::get.into_service().call(req).await.into_response()), 104 | Method::PUT => Ok(repos::put.into_service().call(req).await.into_response()), 105 | _ => Err(( 106 | StatusCode::METHOD_NOT_ALLOWED, 107 | "Method not allowed for repository endpoint".into(), 108 | )), 109 | }, 110 | (Some("_tag"), None, None) => match *req.method() { 111 | Method::GET => Ok(tags::query.into_service().call(req).await.into_response()), 112 | _ => Err(( 113 | StatusCode::METHOD_NOT_ALLOWED, 114 | "Method not allowed for repository tag query endpoint".into(), 115 | )), 116 | }, 117 | (Some("_tag"), Some(tag), prop @ (None | Some("tree"))) => { 118 | let tag = tag.parse::().map_err(|e| { 119 | ( 120 | StatusCode::BAD_REQUEST, 121 | format!("Failed to parse tag name: {e}"), 122 | ) 123 | })?; 124 | trace!(target: "app::handle", "parsed tag name: `{tag}`"); 125 | assert_eq!(extensions.insert(tag), None, "duplicate tag name"); 126 | 127 | if prop.is_none() { 128 | return match *req.method() { 129 | Method::HEAD => Ok(tags::head.into_service().call(req).await.into_response()), 130 | Method::GET => Ok(tags::get.into_service().call(req).await.into_response()), 131 | Method::PUT => Ok(tags::put.into_service().call(req).await.into_response()), 132 | _ => Err(( 133 | StatusCode::METHOD_NOT_ALLOWED, 134 | "Method not allowed for tag endpoint".into(), 135 | )), 136 | }; 137 | } 138 | 139 | let path = tail.next().unwrap_or("").parse::().map_err(|e| { 140 | ( 141 | StatusCode::BAD_REQUEST, 142 | format!("Failed to parse tree path: {e}"), 143 | ) 144 | })?; 145 | trace!(target: "app::handle", "parsed tree path: `{path}`"); 146 | assert_eq!(extensions.insert(path), None, "duplicate tree path"); 147 | match *req.method() { 148 | Method::HEAD => Ok(trees::head.into_service().call(req).await.into_response()), 149 | Method::GET => Ok(trees::get.into_service().call(req).await.into_response()), 150 | Method::PUT => Ok(trees::put.into_service().call(req).await.into_response()), 151 | _ => Err(( 152 | StatusCode::METHOD_NOT_ALLOWED, 153 | "Method not allowed for tag tree endpoint".into(), 154 | )), 155 | } 156 | } 157 | _ => Err(( 158 | StatusCode::NOT_FOUND, 159 | "Route not found on repository".into(), 160 | )), 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /crates/server/src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | #![forbid(unsafe_code)] 5 | #![deny( 6 | clippy::all, 7 | absolute_paths_not_starting_with_crate, 8 | deprecated_in_future, 9 | missing_copy_implementations, 10 | missing_debug_implementations, 11 | noop_method_call, 12 | rust_2018_compatibility, 13 | rust_2018_idioms, 14 | rust_2021_compatibility, 15 | single_use_lifetimes, 16 | trivial_bounds, 17 | trivial_casts, 18 | trivial_numeric_casts, 19 | unreachable_code, 20 | unreachable_patterns, 21 | unreachable_pub, 22 | unstable_features, 23 | unused, 24 | unused_crate_dependencies, 25 | unused_import_braces, 26 | unused_lifetimes, 27 | unused_results, 28 | variant_size_differences 29 | )] 30 | 31 | mod builder; 32 | mod handle; 33 | 34 | pub mod auth; 35 | pub mod repos; 36 | pub mod store; 37 | pub mod tags; 38 | pub mod trees; 39 | pub mod users; 40 | 41 | pub use auth::{OidcClaims, ScopeContext, ScopeLevel, TlsConfig, TrustedCertificate}; 42 | pub use builder::*; 43 | pub(crate) use handle::*; 44 | pub(crate) use store::*; 45 | 46 | pub use openidconnect::url; 47 | 48 | use anyhow::Context as _; 49 | use async_std::path::Path; 50 | use axum::extract::Extension; 51 | use axum::routing::IntoMakeService; 52 | use axum::Router; 53 | use futures::lock::Mutex; 54 | use futures::{AsyncRead, AsyncWrite}; 55 | use futures_rustls::TlsAcceptor; 56 | use hyper::server::conn::Http; 57 | use tokio_util::compat::FuturesAsyncReadCompatExt; 58 | use tower::MakeService; 59 | use tracing::trace; 60 | 61 | #[allow(missing_debug_implementations)] // TlsAcceptor does not implement Debug 62 | pub struct App { 63 | make_service: Mutex>, 64 | tls: TlsAcceptor, 65 | } 66 | 67 | impl App { 68 | pub fn builder>(store: S, tls: TlsConfig, oidc: OidcConfig) -> Builder { 69 | Builder::new(store, tls, oidc) 70 | } 71 | 72 | pub async fn new( 73 | store: impl AsRef, 74 | tls: TlsConfig, 75 | oidc: OidcConfig, 76 | ) -> anyhow::Result { 77 | Self::builder(store, tls, oidc).build().await 78 | } 79 | 80 | pub async fn handle( 81 | &self, 82 | stream: impl 'static + Unpin + AsyncRead + AsyncWrite, 83 | ) -> anyhow::Result<()> { 84 | trace!(target: "app::App::handle", "begin TLS handshake"); 85 | let stream = self 86 | .tls 87 | .accept(stream) 88 | .await 89 | .context("failed to accept TLS connection")?; 90 | trace!(target: "app::App::handle", "completed TLS handshake"); 91 | 92 | let mut svc = self 93 | .make_service 94 | .lock() 95 | .await 96 | .make_service(()) 97 | .await 98 | .context("failed to create app service")?; 99 | let (_, conn) = stream.get_ref(); 100 | if conn.peer_certificates().is_some() { 101 | svc = svc.layer(Extension(TrustedCertificate)); 102 | trace!(target: "app::App::handle", "add TrustedCertificate to extensions"); 103 | } 104 | trace!(target: "app::App::handle", "begin HTTP request serving"); 105 | Http::new() 106 | .serve_connection(stream.compat(), svc) 107 | .await 108 | .context("failed to handle request") 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /crates/server/src/repos/get.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_type::RepositoryContext; 7 | 8 | use async_std::sync::Arc; 9 | use axum::response::IntoResponse; 10 | use axum::Extension; 11 | use tracing::{debug, trace}; 12 | 13 | pub async fn get( 14 | Extension(ref store): Extension>, 15 | claims: OidcClaims, 16 | cx: RepositoryContext, 17 | ) -> impl IntoResponse { 18 | trace!(target: "app::trees::get", "called for `{cx}`"); 19 | 20 | let user = claims 21 | .assert_user(store, &cx.owner, ScopeContext::Repository, ScopeLevel::Read) 22 | .await 23 | .map_err(IntoResponse::into_response)?; 24 | 25 | // TODO: Stream body 26 | // https://github.com/profianinc/drawbridge/issues/56 27 | let mut body = vec![]; 28 | user.repository(&cx.name) 29 | .get_to_writer(&mut body) 30 | .await 31 | .map_err(|e| { 32 | debug!(target: "app::repos::get", "failed for `{cx}`: {:?}", e); 33 | e.into_response() 34 | }) 35 | .map(|meta| (meta, body)) 36 | } 37 | -------------------------------------------------------------------------------- /crates/server/src/repos/head.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_type::RepositoryContext; 7 | 8 | use async_std::sync::Arc; 9 | use axum::response::IntoResponse; 10 | use axum::Extension; 11 | use tracing::{debug, trace}; 12 | 13 | pub async fn head( 14 | Extension(ref store): Extension>, 15 | claims: OidcClaims, 16 | cx: RepositoryContext, 17 | ) -> impl IntoResponse { 18 | trace!(target: "app::trees::head", "called for `{cx}`"); 19 | 20 | claims 21 | .assert_user(store, &cx.owner, ScopeContext::Repository, ScopeLevel::Read) 22 | .await 23 | .map_err(IntoResponse::into_response)? 24 | .repository(&cx.name) 25 | .get_meta() 26 | .await 27 | .map_err(|e| { 28 | debug!(target: "app::repos::head", "failed for `{cx}`: {:?}", e); 29 | e.into_response() 30 | }) 31 | .map(|meta| (meta, ())) 32 | } 33 | -------------------------------------------------------------------------------- /crates/server/src/repos/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | mod get; 4 | mod head; 5 | mod put; 6 | 7 | pub use get::*; 8 | pub use head::*; 9 | pub use put::*; 10 | -------------------------------------------------------------------------------- /crates/server/src/repos/put.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_type::{Meta, RepositoryConfig, RepositoryContext}; 7 | 8 | use async_std::sync::Arc; 9 | use axum::http::StatusCode; 10 | use axum::response::IntoResponse; 11 | use axum::{Extension, Json}; 12 | use tracing::{debug, trace}; 13 | 14 | pub async fn put( 15 | Extension(ref store): Extension>, 16 | claims: OidcClaims, 17 | cx: RepositoryContext, 18 | meta: Meta, 19 | Json(config): Json, 20 | ) -> impl IntoResponse { 21 | trace!(target: "app::trees::put", "called for `{cx}`"); 22 | 23 | claims 24 | .assert_user( 25 | store, 26 | &cx.owner, 27 | ScopeContext::Repository, 28 | ScopeLevel::Write, 29 | ) 30 | .await 31 | .map_err(IntoResponse::into_response)? 32 | .create_repository(&cx.name, meta, &config) 33 | .await 34 | .map_err(|e| { 35 | debug!(target: "app::repos::put", "failed for `{cx}`: {:?}", e); 36 | e.into_response() 37 | }) 38 | .map(|_| StatusCode::CREATED) 39 | } 40 | -------------------------------------------------------------------------------- /crates/server/src/store/entity.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use std::io; 5 | use std::os::unix::fs::DirBuilderExt; 6 | 7 | use drawbridge_type::Meta; 8 | 9 | use anyhow::Context; 10 | use axum::http::StatusCode; 11 | use axum::response::{IntoResponse, Response}; 12 | use camino::{Utf8Path, Utf8PathBuf}; 13 | use cap_async_std::fs_utf8::{Dir, DirBuilder, ReadDir}; 14 | use drawbridge_type::digest::ContentDigest; 15 | use futures::future::TryFutureExt; 16 | use futures::io::copy; 17 | use futures::try_join; 18 | use futures::{AsyncRead, AsyncWrite}; 19 | use serde::{Deserialize, Serialize}; 20 | use tracing::{debug, trace}; 21 | 22 | const STORAGE_FAILURE_RESPONSE: (StatusCode, &str) = 23 | (StatusCode::INTERNAL_SERVER_ERROR, "Storage backend failure"); 24 | 25 | #[derive(Debug)] 26 | pub enum CreateError { 27 | Occupied, 28 | LengthMismatch { expected: u64, got: u64 }, 29 | DigestMismatch, 30 | Internal(E), 31 | } 32 | 33 | impl IntoResponse for CreateError { 34 | fn into_response(self) -> Response { 35 | match self { 36 | CreateError::Occupied => (StatusCode::CONFLICT, "Already exists").into_response(), 37 | CreateError::DigestMismatch => { 38 | (StatusCode::BAD_REQUEST, "Content digest mismatch").into_response() 39 | } 40 | CreateError::LengthMismatch { expected, got } => ( 41 | StatusCode::BAD_REQUEST, 42 | format!("Content length mismatch, expected: {expected}, got {got}"), 43 | ) 44 | .into_response(), 45 | CreateError::Internal(_) => STORAGE_FAILURE_RESPONSE.into_response(), 46 | } 47 | } 48 | } 49 | 50 | #[derive(Clone, Debug, PartialEq, Eq)] 51 | pub enum GetError { 52 | NotFound, 53 | Internal(E), 54 | } 55 | 56 | impl IntoResponse for GetError { 57 | fn into_response(self) -> Response { 58 | match self { 59 | GetError::NotFound => (StatusCode::NOT_FOUND, "Not found"), 60 | GetError::Internal(_) => STORAGE_FAILURE_RESPONSE, 61 | } 62 | .into_response() 63 | } 64 | } 65 | 66 | #[derive(Debug)] 67 | pub enum GetToWriterError { 68 | IO(io::Error), 69 | Get(GetError), 70 | } 71 | 72 | impl IntoResponse for GetToWriterError { 73 | fn into_response(self) -> Response { 74 | match self { 75 | GetToWriterError::Get(GetError::NotFound) => { 76 | (StatusCode::NOT_FOUND, "Repository does not exist") 77 | } 78 | GetToWriterError::Get(GetError::Internal(_)) => { 79 | (StatusCode::INTERNAL_SERVER_ERROR, "Storage backend failure") 80 | } 81 | GetToWriterError::IO(_) => (StatusCode::INTERNAL_SERVER_ERROR, "I/O error"), 82 | } 83 | .into_response() 84 | } 85 | } 86 | 87 | #[derive(Copy, Clone, Debug)] 88 | pub struct Entity<'a, P> { 89 | root: &'a Dir, 90 | prefix: P, 91 | } 92 | 93 | async fn create_verified( 94 | dir: &Dir, 95 | path: impl AsRef, 96 | hash: ContentDigest, 97 | size: u64, 98 | rdr: impl Unpin + AsyncRead, 99 | ) -> Result<(), CreateError> { 100 | let mut file = dir.create(path).await.map_err(|e| match e.kind() { 101 | io::ErrorKind::AlreadyExists => CreateError::Occupied, 102 | _ => CreateError::Internal(anyhow::Error::new(e).context("failed to create file")), 103 | })?; 104 | match copy(hash.verifier(rdr), &mut file).await { 105 | Err(e) if e.kind() == io::ErrorKind::InvalidData => Err(CreateError::DigestMismatch), 106 | Err(e) => Err(CreateError::Internal( 107 | anyhow::Error::new(e).context("failed to write file"), 108 | )), 109 | Ok(n) if n != size => Err(CreateError::LengthMismatch { 110 | expected: size, 111 | got: n, 112 | }), 113 | Ok(_) => Ok(()), 114 | } 115 | } 116 | 117 | impl<'a> Entity<'a, &'static str> { 118 | pub fn new(root: &'a Dir) -> Self { 119 | Self { root, prefix: "" } 120 | } 121 | } 122 | 123 | impl<'a, P: AsRef> Entity<'a, P> { 124 | /// Returns a child [Entity] rooted at `path`. 125 | pub fn child(&self, path: impl AsRef) -> Entity<'a, Utf8PathBuf> { 126 | Entity { 127 | root: self.root, 128 | prefix: self.path(path), 129 | } 130 | } 131 | 132 | fn path(&self, path: impl AsRef) -> Utf8PathBuf { 133 | self.prefix.as_ref().join(path) 134 | } 135 | 136 | fn meta_path(&self) -> Utf8PathBuf { 137 | self.path("meta.json") 138 | } 139 | 140 | fn content_path(&self) -> Utf8PathBuf { 141 | self.path("content") 142 | } 143 | 144 | pub(super) async fn create_from_reader( 145 | &self, 146 | meta: Meta, 147 | rdr: impl Unpin + AsyncRead, 148 | ) -> Result<(), CreateError> { 149 | trace!(target: "app::store::Entity::create_from_reader", "create entity at `{}`", self.prefix.as_ref()); 150 | let meta_json = serde_json::to_vec(&meta) 151 | .context("failed to encode metadata") 152 | .map_err(CreateError::Internal)?; 153 | try_join!( 154 | self.root 155 | .write(self.meta_path(), meta_json) 156 | .map_err(|e| match e.kind() { 157 | io::ErrorKind::AlreadyExists => CreateError::Occupied, 158 | _ => CreateError::Internal( 159 | anyhow::Error::new(e).context("failed to write metadata"), 160 | ), 161 | }) 162 | .map_err(|e| { 163 | debug!(target: "app::store::Entity::create_from_reader", "failed to create meta file `{:?}`", e); 164 | e 165 | }), 166 | create_verified(self.root, self.content_path(), meta.hash, meta.size, rdr).map_err(|e| { 167 | debug!(target: "app::store::Entity::create_from_reader", "failed to create content file `{:?}`", e); 168 | e 169 | }) 170 | )?; 171 | Ok(()) 172 | } 173 | 174 | pub(super) async fn create_json( 175 | &self, 176 | meta: Meta, 177 | val: &impl Serialize, 178 | ) -> Result<(), CreateError> { 179 | let buf = serde_json::to_vec(val) 180 | .context("failed to encode value to JSON") 181 | .map_err(CreateError::Internal)?; 182 | self.create_from_reader(meta, buf.as_slice()).await 183 | } 184 | 185 | pub(super) async fn create_dir( 186 | &self, 187 | path: impl AsRef, 188 | ) -> Result<(), CreateError> { 189 | let path = self.path(path); 190 | debug_assert_ne!(path, self.meta_path()); 191 | debug_assert_ne!(path, self.content_path()); 192 | 193 | trace!(target: "app::store::Entity::create_dir", "create directory at `{path}`"); 194 | self.root 195 | .create_dir_with(path, DirBuilder::new().mode(0o700)) 196 | .map_err(|e| match e.kind() { 197 | io::ErrorKind::AlreadyExists => CreateError::Occupied, 198 | _ => CreateError::Internal( 199 | anyhow::Error::new(e).context("failed to create directory"), 200 | ), 201 | }) 202 | .map_err(|e| { 203 | debug!(target: "app::store::Entity::create_dir", "failed to create directory: `{:?}`", e); 204 | e 205 | }) 206 | } 207 | 208 | pub(super) async fn read_dir( 209 | &self, 210 | path: impl AsRef, 211 | ) -> Result> { 212 | self.root 213 | .read_dir(self.path(path)) 214 | .await 215 | .map_err(|e| match e.kind() { 216 | io::ErrorKind::NotFound => GetError::NotFound, 217 | _ => GetError::Internal(anyhow::Error::new(e).context("failed to read directory")), 218 | }) 219 | } 220 | 221 | /// Returns metadata of the entity. 222 | pub async fn get_meta(&self) -> Result> { 223 | let buf = self 224 | .root 225 | .read(self.meta_path()) 226 | .await 227 | .map_err(|e| match e.kind() { 228 | io::ErrorKind::NotFound => GetError::NotFound, 229 | _ => GetError::Internal(anyhow::Error::new(e).context("failed to read metadata")), 230 | })?; 231 | serde_json::from_slice(&buf) 232 | .context("failed to decode metadata") 233 | .map_err(GetError::Internal) 234 | } 235 | 236 | /// Returns contents of the entity as [AsyncRead]. 237 | pub async fn get_content(&self) -> Result> { 238 | self.root 239 | .open(self.content_path()) 240 | .map_err(|e| match e.kind() { 241 | io::ErrorKind::NotFound => GetError::NotFound, 242 | _ => { 243 | GetError::Internal(anyhow::Error::new(e).context("failed to open content file")) 244 | } 245 | }) 246 | .await 247 | } 248 | 249 | /// Reads contents of the entity. 250 | pub async fn read_content(&self) -> Result, GetError> { 251 | self.root 252 | .read(self.content_path()) 253 | .map_err(|e| match e.kind() { 254 | io::ErrorKind::NotFound => GetError::NotFound, 255 | _ => { 256 | GetError::Internal(anyhow::Error::new(e).context("failed to read content file")) 257 | } 258 | }) 259 | .await 260 | } 261 | 262 | /// Returns the contents of the entity as JSON. 263 | #[allow(single_use_lifetimes)] 264 | pub async fn get_content_json(&self) -> Result> 265 | where 266 | for<'de> T: Deserialize<'de>, 267 | { 268 | let buf = self.read_content().await?; 269 | serde_json::from_slice(&buf) 270 | .context("failed to decode content as JSON") 271 | .map_err(GetError::Internal) 272 | } 273 | 274 | /// Returns metadata of the entity and a reader of its contents. 275 | pub async fn get(&self) -> Result<(Meta, impl '_ + AsyncRead), GetError> { 276 | try_join!(self.get_meta(), self.get_content()) 277 | } 278 | 279 | /// Returns metadata of the entity and writes its contents into `dst`. 280 | pub async fn get_to_writer( 281 | &self, 282 | dst: &mut (impl Unpin + AsyncWrite), 283 | ) -> Result> { 284 | let (meta, rdr) = self.get().await.map_err(GetToWriterError::Get)?; 285 | _ = copy(rdr, dst).await.map_err(GetToWriterError::IO)?; 286 | // TODO: Validate size 287 | Ok(meta) 288 | } 289 | } 290 | -------------------------------------------------------------------------------- /crates/server/src/store/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | mod entity; 4 | mod repo; 5 | mod tag; 6 | mod tree; 7 | mod user; 8 | 9 | pub use entity::*; 10 | pub use repo::*; 11 | pub use tag::*; 12 | pub use tree::*; 13 | pub use user::*; 14 | 15 | use drawbridge_type::{Meta, RepositoryContext, TagContext, TreeContext, UserContext, UserRecord}; 16 | 17 | use async_std::io; 18 | use camino::{Utf8Path, Utf8PathBuf}; 19 | use cap_async_std::fs_utf8::Dir; 20 | use futures::try_join; 21 | 22 | #[derive(Debug)] 23 | pub struct Store { 24 | root: Dir, 25 | } 26 | 27 | async fn upsert_dir(root: &Dir, path: impl AsRef) -> io::Result<()> { 28 | let path = path.as_ref(); 29 | if !root.is_dir(path).await { 30 | root.create_dir(path) 31 | } else { 32 | Ok(()) 33 | } 34 | } 35 | 36 | impl Store { 37 | /// Initalizes a new [Store] at `root` 38 | pub async fn new(root: Dir) -> io::Result { 39 | upsert_dir(&root, "users").await?; 40 | Ok(Self { root }) 41 | } 42 | 43 | pub fn user(&self, UserContext { name }: &UserContext) -> User<'_, Utf8PathBuf> { 44 | Entity::new(&self.root) 45 | .child(format!("users/{name}")) 46 | .into() 47 | } 48 | 49 | pub async fn create_user( 50 | &self, 51 | cx: &UserContext, 52 | meta: Meta, 53 | rec: &UserRecord, 54 | ) -> Result, CreateError> { 55 | let user = self.user(cx); 56 | user.create_dir("").await?; 57 | try_join!(user.create_json(meta, rec), user.create_dir("repos"),)?; 58 | Ok(user) 59 | } 60 | 61 | pub fn repository<'a>( 62 | &'a self, 63 | RepositoryContext { owner, name }: &'a RepositoryContext, 64 | ) -> Repository<'a> { 65 | self.user(owner).repository(name) 66 | } 67 | 68 | pub fn tag<'a>(&'a self, TagContext { repository, name }: &'a TagContext) -> Tag<'a> { 69 | self.repository(repository).tag(name) 70 | } 71 | 72 | pub fn tree<'a>(&'a self, TreeContext { tag, path }: &'a TreeContext) -> Node<'a> { 73 | self.tag(tag).node(path) 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /crates/server/src/store/repo.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{CreateError, Entity, GetError, Tag}; 5 | 6 | use std::ops::Deref; 7 | 8 | use drawbridge_type::digest::{Algorithms, ContentDigest}; 9 | use drawbridge_type::{Meta, RepositoryConfig, TagEntry, TagName}; 10 | 11 | use anyhow::{anyhow, Context}; 12 | use camino::{Utf8Path, Utf8PathBuf}; 13 | 14 | #[repr(transparent)] 15 | #[derive(Copy, Clone, Debug)] 16 | pub struct Repository<'a, P = Utf8PathBuf>(Entity<'a, P>); 17 | 18 | impl<'a, P> Deref for Repository<'a, P> { 19 | type Target = Entity<'a, P>; 20 | 21 | fn deref(&self) -> &Self::Target { 22 | &self.0 23 | } 24 | } 25 | 26 | impl<'a, P> From> for Repository<'a, P> { 27 | fn from(entity: Entity<'a, P>) -> Self { 28 | Self(entity) 29 | } 30 | } 31 | 32 | impl<'a, P: AsRef> Repository<'a, P> { 33 | pub async fn get_json(&self) -> Result> { 34 | self.get_content_json().await 35 | } 36 | 37 | pub async fn is_public(&self) -> Result> { 38 | let conf = self.get_json().await?; 39 | Ok(conf.public) 40 | } 41 | 42 | pub async fn tags(&self) -> Result, GetError> { 43 | self.read_dir("tags") 44 | .await? 45 | .try_fold(vec![], |mut names, entry| { 46 | let name = entry? 47 | .file_name() 48 | .context("failed to read tag name")? 49 | .parse() 50 | .context("failed to parse tag name")?; 51 | names.push(name); 52 | Ok(names) 53 | }) 54 | .map_err(GetError::Internal) 55 | } 56 | 57 | pub async fn tags_json(&self) -> Result<(ContentDigest, Vec), GetError> { 58 | // TODO: Optimize hash computation 59 | let tags = self.tags().await?; 60 | let buf = serde_json::to_vec(&tags) 61 | .context("failed to encode tags as JSON") 62 | .map_err(GetError::Internal)?; 63 | let (n, hash) = Algorithms::default() 64 | .read_sync(&buf[..]) 65 | .context("failed to compute tag digest") 66 | .map_err(GetError::Internal)?; 67 | if n != buf.len() as u64 { 68 | return Err(GetError::Internal(anyhow!( 69 | "invalid amount of bytes read, expected: {}, got {n}", 70 | buf.len(), 71 | ))); 72 | } 73 | Ok((hash, buf)) 74 | } 75 | 76 | pub fn tag(&self, name: &TagName) -> Tag<'a, Utf8PathBuf> { 77 | self.child(format!("tags/{name}")).into() 78 | } 79 | 80 | pub async fn create_tag( 81 | &self, 82 | name: &TagName, 83 | meta: Meta, 84 | entry: &TagEntry, 85 | ) -> Result, CreateError> { 86 | let tag = self.tag(name); 87 | tag.create_dir("").await?; 88 | tag.create_json(meta, entry).await?; 89 | Ok(tag) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /crates/server/src/store/tag.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{CreateError, Entity, Node}; 5 | 6 | use std::ops::Deref; 7 | 8 | use drawbridge_type::{Meta, TreeDirectory, TreeEntry, TreePath}; 9 | 10 | use camino::{Utf8Path, Utf8PathBuf}; 11 | use futures::{try_join, AsyncRead}; 12 | use tracing::debug; 13 | 14 | #[repr(transparent)] 15 | #[derive(Copy, Clone, Debug)] 16 | pub struct Tag<'a, P = Utf8PathBuf>(Entity<'a, P>); 17 | 18 | impl<'a, P> Deref for Tag<'a, P> { 19 | type Target = Entity<'a, P>; 20 | 21 | fn deref(&self) -> &Self::Target { 22 | &self.0 23 | } 24 | } 25 | 26 | impl<'a, P> From> for Tag<'a, P> { 27 | fn from(entity: Entity<'a, P>) -> Self { 28 | Self(entity) 29 | } 30 | } 31 | 32 | impl<'a, P: AsRef> Tag<'a, P> { 33 | pub fn node(&self, path: &TreePath) -> Node<'a, Utf8PathBuf> { 34 | if path.is_empty() { 35 | self.0.child("tree").into() 36 | } else { 37 | self.0 38 | .child(format!("tree/entries/{}", path.intersperse("/entries/"))) 39 | .into() 40 | } 41 | } 42 | 43 | pub async fn create_file_node( 44 | &self, 45 | path: &TreePath, 46 | meta: Meta, 47 | rdr: impl Unpin + AsyncRead, 48 | ) -> Result, CreateError> { 49 | // TODO: Validate node hash against parents' expected values 50 | // https://github.com/profianinc/drawbridge/issues/77 51 | let node = self.node(path); 52 | node.create_dir("").await.map_err(|e| { 53 | debug!(target: "app::store::Tag::create_file_node", "failed to create content directory: {:?}", e); 54 | e 55 | })?; 56 | node.create_from_reader(meta, rdr).await?; 57 | Ok(node) 58 | } 59 | 60 | pub async fn create_directory_node( 61 | &self, 62 | path: &TreePath, 63 | meta: Meta, 64 | dir: &TreeDirectory, 65 | ) -> Result, CreateError> { 66 | // TODO: Validate node hash against parents' expected values 67 | // https://github.com/profianinc/drawbridge/issues/77 68 | let node = self.node(path); 69 | node.create_dir("").await.map_err(|e| { 70 | debug!(target: "app::store::Tag::create_directory_node", "failed to create content directory: {:?}", e); 71 | e 72 | })?; 73 | try_join!(node.create_json(meta, dir), node.create_dir("entries"))?; 74 | Ok(node) 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /crates/server/src/store/tree.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::Entity; 5 | 6 | use std::ops::Deref; 7 | 8 | use camino::Utf8PathBuf; 9 | 10 | #[repr(transparent)] 11 | #[derive(Copy, Clone, Debug)] 12 | pub struct Node<'a, P = Utf8PathBuf>(Entity<'a, P>); 13 | 14 | impl<'a, P> Deref for Node<'a, P> { 15 | type Target = Entity<'a, P>; 16 | 17 | fn deref(&self) -> &Self::Target { 18 | &self.0 19 | } 20 | } 21 | 22 | impl<'a, P> From> for Node<'a, P> { 23 | fn from(entity: Entity<'a, P>) -> Self { 24 | Self(entity) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /crates/server/src/store/user.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::{CreateError, Entity, Repository}; 5 | 6 | use std::ops::Deref; 7 | 8 | use drawbridge_type::{Meta, RepositoryConfig, RepositoryName}; 9 | 10 | use camino::{Utf8Path, Utf8PathBuf}; 11 | use futures::try_join; 12 | 13 | #[repr(transparent)] 14 | #[derive(Copy, Clone, Debug)] 15 | pub struct User<'a, P = Utf8PathBuf>(Entity<'a, P>); 16 | 17 | impl<'a, P> Deref for User<'a, P> { 18 | type Target = Entity<'a, P>; 19 | 20 | fn deref(&self) -> &Self::Target { 21 | &self.0 22 | } 23 | } 24 | 25 | impl<'a, P> From> for User<'a, P> { 26 | fn from(entity: Entity<'a, P>) -> Self { 27 | Self(entity) 28 | } 29 | } 30 | 31 | impl<'a, P: AsRef> User<'a, P> { 32 | pub fn repository(&self, name: &RepositoryName) -> Repository<'a, Utf8PathBuf> { 33 | self.0.child(format!("repos/{name}")).into() 34 | } 35 | 36 | pub async fn create_repository( 37 | &self, 38 | name: &RepositoryName, 39 | meta: Meta, 40 | conf: &RepositoryConfig, 41 | ) -> Result, CreateError> { 42 | let repo = self.repository(name); 43 | repo.create_dir("").await?; 44 | try_join!(repo.create_json(meta, conf), repo.create_dir("tags"))?; 45 | Ok(repo) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /crates/server/src/tags/get.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::Store; 5 | use crate::auth::assert_repository_read; 6 | 7 | use drawbridge_type::TagContext; 8 | 9 | use async_std::sync::Arc; 10 | use axum::body::Body; 11 | use axum::http::Request; 12 | use axum::response::IntoResponse; 13 | use axum::Extension; 14 | use tracing::{debug, trace}; 15 | 16 | pub async fn get( 17 | Extension(ref store): Extension>, 18 | cx: TagContext, 19 | req: Request, 20 | ) -> impl IntoResponse { 21 | trace!(target: "app::tags::get", "called for `{cx}`"); 22 | 23 | let (repo, _) = assert_repository_read(store, &cx.repository, req) 24 | .await 25 | .map_err(IntoResponse::into_response)?; 26 | 27 | // TODO: Stream body 28 | // https://github.com/profianinc/drawbridge/issues/56 29 | let mut body = vec![]; 30 | repo.tag(&cx.name) 31 | .get_to_writer(&mut body) 32 | .await 33 | .map_err(|e| { 34 | debug!(target: "app::tags::get", "failed for `{cx}`: {:?}", e); 35 | e.into_response() 36 | }) 37 | .map(|meta| (meta, body)) 38 | } 39 | -------------------------------------------------------------------------------- /crates/server/src/tags/head.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::Store; 5 | use crate::auth::assert_repository_read; 6 | 7 | use drawbridge_type::TagContext; 8 | 9 | use async_std::sync::Arc; 10 | use axum::body::Body; 11 | use axum::http::Request; 12 | use axum::response::IntoResponse; 13 | use axum::Extension; 14 | use tracing::{debug, trace}; 15 | 16 | pub async fn head( 17 | Extension(ref store): Extension>, 18 | cx: TagContext, 19 | req: Request, 20 | ) -> impl IntoResponse { 21 | trace!(target: "app::tags::head", "called for `{cx}`"); 22 | 23 | assert_repository_read(store, &cx.repository, req) 24 | .await 25 | .map_err(IntoResponse::into_response) 26 | .map(|(repo, _)| repo)? 27 | .tag(&cx.name) 28 | .get_meta() 29 | .await 30 | .map_err(|e| { 31 | debug!(target: "app::tags::head", "failed for `{cx}`: {:?}", e); 32 | e.into_response() 33 | }) 34 | .map(|meta| (meta, ())) 35 | } 36 | -------------------------------------------------------------------------------- /crates/server/src/tags/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | mod get; 4 | mod head; 5 | mod put; 6 | mod query; 7 | 8 | pub use get::*; 9 | pub use head::*; 10 | pub use put::*; 11 | pub use query::*; 12 | -------------------------------------------------------------------------------- /crates/server/src/tags/put.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_jose::jws::Jws; 7 | use drawbridge_jose::MediaTyped; 8 | use drawbridge_type::{Meta, TagContext, TagEntry, TreeEntry}; 9 | 10 | use async_std::sync::Arc; 11 | use axum::body::Body; 12 | use axum::extract::RequestParts; 13 | use axum::http::{Request, StatusCode}; 14 | use axum::response::IntoResponse; 15 | use axum::{Extension, Json}; 16 | use tracing::{debug, trace}; 17 | 18 | pub async fn put( 19 | Extension(store): Extension>, 20 | claims: OidcClaims, 21 | cx: TagContext, 22 | meta: Meta, 23 | req: Request, 24 | ) -> impl IntoResponse { 25 | trace!(target: "app::tags::put", "called for `{cx}`"); 26 | 27 | if meta.hash.is_empty() { 28 | return Err(( 29 | StatusCode::BAD_REQUEST, 30 | "At least one content digest value must be specified", 31 | ) 32 | .into_response()); 33 | } 34 | 35 | let user = claims 36 | .assert_user( 37 | &store, 38 | &cx.repository.owner, 39 | ScopeContext::Tag, 40 | ScopeLevel::Write, 41 | ) 42 | .await 43 | .map_err(IntoResponse::into_response)?; 44 | 45 | let mut req = RequestParts::new(req); 46 | let entry = match meta.mime.to_string().as_str() { 47 | TreeEntry::<()>::TYPE => req.extract().await.map(|Json(v)| TagEntry::Unsigned(v)), 48 | Jws::TYPE => req.extract().await.map(|Json(v)| TagEntry::Signed(v)), 49 | _ => return Err((StatusCode::BAD_REQUEST, "Invalid content type").into_response()), 50 | } 51 | .map_err(|e| (StatusCode::BAD_REQUEST, e).into_response())?; 52 | user.repository(&cx.repository.name) 53 | .create_tag(&cx.name, meta, &entry) 54 | .await 55 | .map_err(|e| { 56 | debug!(target: "app::tags::put", "failed for `{cx}`: {:?}", e); 57 | e.into_response() 58 | }) 59 | .map(|_| StatusCode::CREATED) 60 | } 61 | -------------------------------------------------------------------------------- /crates/server/src/tags/query.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::Store; 5 | use crate::auth::assert_repository_read; 6 | 7 | use drawbridge_type::{Meta, RepositoryContext}; 8 | 9 | use async_std::sync::Arc; 10 | use axum::body::Body; 11 | use axum::http::Request; 12 | use axum::response::IntoResponse; 13 | use axum::Extension; 14 | use mime::APPLICATION_JSON; 15 | use tracing::{debug, trace}; 16 | 17 | pub async fn query( 18 | Extension(store): Extension>, 19 | cx: RepositoryContext, 20 | req: Request, 21 | ) -> impl IntoResponse { 22 | trace!(target: "app::tags::query", "called for `{cx}`"); 23 | 24 | assert_repository_read(&store, &cx, req) 25 | .await 26 | .map_err(IntoResponse::into_response) 27 | .map(|(repo, _)| repo)? 28 | .tags_json() 29 | .await 30 | .map(|(hash, buf)| { 31 | ( 32 | Meta { 33 | hash, 34 | size: buf.len() as _, 35 | mime: APPLICATION_JSON, 36 | }, 37 | buf, 38 | ) 39 | }) 40 | .map_err(|e| { 41 | debug!(target: "app::tags::query", "failed: {:?}", e); 42 | e.into_response() 43 | }) 44 | } 45 | -------------------------------------------------------------------------------- /crates/server/src/trees/get.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{Store, TrustedCertificate}; 5 | use crate::auth::assert_repository_read; 6 | 7 | use drawbridge_type::TreeContext; 8 | 9 | use async_std::sync::Arc; 10 | use axum::body::Body; 11 | use axum::http::Request; 12 | use axum::response::IntoResponse; 13 | use axum::Extension; 14 | use tracing::{debug, trace}; 15 | 16 | pub async fn get( 17 | Extension(ref store): Extension>, 18 | cert: Option>, 19 | cx: TreeContext, 20 | req: Request, 21 | ) -> impl IntoResponse { 22 | trace!(target: "app::trees::get", "called for `{cx}`"); 23 | 24 | let repo = if cert.is_none() { 25 | assert_repository_read(store, &cx.tag.repository, req) 26 | .await 27 | .map_err(IntoResponse::into_response) 28 | .map(|(repo, _)| repo)? 29 | } else { 30 | store.repository(&cx.tag.repository) 31 | }; 32 | 33 | // TODO: Stream body 34 | // https://github.com/profianinc/drawbridge/issues/56 35 | let mut body = vec![]; 36 | repo.tag(&cx.tag.name) 37 | .node(&cx.path) 38 | .get_to_writer(&mut body) 39 | .await 40 | .map_err(|e| { 41 | debug!(target: "app::trees::get", "failed for `{cx}`: {:?}", e); 42 | e.into_response() 43 | }) 44 | .map(|meta| (meta, body)) 45 | } 46 | -------------------------------------------------------------------------------- /crates/server/src/trees/head.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{Store, TrustedCertificate}; 5 | use crate::auth::assert_repository_read; 6 | 7 | use drawbridge_type::TreeContext; 8 | 9 | use async_std::sync::Arc; 10 | use axum::body::Body; 11 | use axum::http::Request; 12 | use axum::response::IntoResponse; 13 | use axum::Extension; 14 | use tracing::{debug, trace}; 15 | 16 | pub async fn head( 17 | Extension(ref store): Extension>, 18 | cert: Option>, 19 | cx: TreeContext, 20 | req: Request, 21 | ) -> impl IntoResponse { 22 | trace!(target: "app::trees::head", "called for `{cx}`"); 23 | 24 | if cert.is_none() { 25 | assert_repository_read(store, &cx.tag.repository, req) 26 | .await 27 | .map_err(IntoResponse::into_response) 28 | .map(|(repo, _)| repo)? 29 | } else { 30 | store.repository(&cx.tag.repository) 31 | } 32 | .tag(&cx.tag.name) 33 | .node(&cx.path) 34 | .get_meta() 35 | .await 36 | .map_err(|e| { 37 | debug!(target: "app::trees::head", "failed for `{cx}`: {:?}", e); 38 | e.into_response() 39 | }) 40 | .map(|meta| (meta, ())) 41 | } 42 | -------------------------------------------------------------------------------- /crates/server/src/trees/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | mod get; 4 | mod head; 5 | mod put; 6 | 7 | pub use get::*; 8 | pub use head::*; 9 | pub use put::*; 10 | -------------------------------------------------------------------------------- /crates/server/src/trees/put.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_type::{Meta, TreeContext, TreeDirectory}; 7 | 8 | use async_std::sync::Arc; 9 | use axum::body::Body; 10 | use axum::extract::{BodyStream, RequestParts}; 11 | use axum::http::{Request, StatusCode}; 12 | use axum::response::IntoResponse; 13 | use axum::{Extension, Json}; 14 | use futures::{io, TryStreamExt}; 15 | use tracing::{debug, trace}; 16 | 17 | pub async fn put( 18 | Extension(ref store): Extension>, 19 | claims: OidcClaims, 20 | cx: TreeContext, 21 | meta: Meta, 22 | req: Request, 23 | ) -> impl IntoResponse { 24 | trace!(target: "app::trees::put", "called for `{cx}`"); 25 | 26 | if meta.hash.is_empty() { 27 | return Err(( 28 | StatusCode::BAD_REQUEST, 29 | "At least one content digest value must be specified", 30 | ) 31 | .into_response()); 32 | } 33 | 34 | let user = claims 35 | .assert_user( 36 | store, 37 | &cx.tag.repository.owner, 38 | ScopeContext::Tag, 39 | ScopeLevel::Write, 40 | ) 41 | .await 42 | .map_err(IntoResponse::into_response)?; 43 | 44 | let mut req = RequestParts::new(req); 45 | let tag = user.repository(&cx.tag.repository.name).tag(&cx.tag.name); 46 | match meta.mime.to_string().as_str() { 47 | TreeDirectory::<()>::TYPE => { 48 | let dir = req 49 | .extract() 50 | .await 51 | .map(|Json(v)| v) 52 | .map_err(|e| (StatusCode::BAD_REQUEST, e).into_response())?; 53 | tag.create_directory_node(&cx.path, meta, &dir).await 54 | } 55 | _ => { 56 | let body = req 57 | .extract::() 58 | .await 59 | .map_err(|e| (StatusCode::BAD_REQUEST, e).into_response())? 60 | .map_err(|e| io::Error::new(io::ErrorKind::Other, e)); 61 | tag.create_file_node(&cx.path, meta, body.into_async_read()) 62 | .await 63 | } 64 | } 65 | .map_err(|e| { 66 | debug!(target: "app::trees::put", "failed for `{cx}`: {:?}", e); 67 | e.into_response() 68 | }) 69 | .map(|_| StatusCode::CREATED) 70 | } 71 | -------------------------------------------------------------------------------- /crates/server/src/users/get.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_type::UserContext; 7 | 8 | use async_std::sync::Arc; 9 | use axum::response::IntoResponse; 10 | use axum::Extension; 11 | use tracing::{debug, trace}; 12 | 13 | pub async fn get( 14 | Extension(store): Extension>, 15 | claims: OidcClaims, 16 | cx: UserContext, 17 | ) -> impl IntoResponse { 18 | trace!(target: "app::users::get", "called for `{cx}`"); 19 | 20 | let user = claims 21 | .assert_user(&store, &cx, ScopeContext::User, ScopeLevel::Read) 22 | .await 23 | .map_err(IntoResponse::into_response)?; 24 | 25 | // TODO: Stream body 26 | // https://github.com/profianinc/drawbridge/issues/56 27 | let mut body = vec![]; 28 | user.get_to_writer(&mut body) 29 | .await 30 | .map_err(|e| { 31 | debug!(target: "app::users::get", "failed for `{cx}`: {:?}", e); 32 | e.into_response() 33 | }) 34 | .map(|meta| (meta, body)) 35 | } 36 | -------------------------------------------------------------------------------- /crates/server/src/users/head.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_type::UserContext; 7 | 8 | use async_std::sync::Arc; 9 | use axum::response::IntoResponse; 10 | use axum::Extension; 11 | use tracing::{debug, trace}; 12 | 13 | pub async fn head( 14 | Extension(store): Extension>, 15 | claims: OidcClaims, 16 | cx: UserContext, 17 | ) -> impl IntoResponse { 18 | trace!(target: "app::users::head", "called for `{cx}`"); 19 | 20 | claims 21 | .assert_user(&store, &cx, ScopeContext::User, ScopeLevel::Read) 22 | .await 23 | .map_err(IntoResponse::into_response)? 24 | .get_meta() 25 | .await 26 | .map_err(|e| { 27 | debug!(target: "app::users::head", "failed for `{cx}`: {:?}", e); 28 | e.into_response() 29 | }) 30 | .map(|meta| (meta, ())) 31 | } 32 | -------------------------------------------------------------------------------- /crates/server/src/users/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | mod get; 4 | mod head; 5 | mod put; 6 | 7 | pub use get::*; 8 | pub use head::*; 9 | pub use put::*; 10 | -------------------------------------------------------------------------------- /crates/server/src/users/put.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | use super::super::{OidcClaims, ScopeContext, ScopeLevel, Store}; 5 | 6 | use drawbridge_type::{Meta, UserContext, UserRecord}; 7 | 8 | use async_std::sync::Arc; 9 | use axum::http::StatusCode; 10 | use axum::response::IntoResponse; 11 | use axum::{Extension, Json}; 12 | use tracing::{debug, trace}; 13 | 14 | pub async fn put( 15 | Extension(store): Extension>, 16 | claims: OidcClaims, 17 | cx: UserContext, 18 | meta: Meta, 19 | Json(record): Json, 20 | ) -> impl IntoResponse { 21 | trace!(target: "app::users::put", "called for `{cx}`"); 22 | 23 | claims 24 | .assert_scope(ScopeContext::User, ScopeLevel::Write) 25 | .map_err(IntoResponse::into_response)?; 26 | 27 | if record.subject != claims.subject() { 28 | return Err((StatusCode::UNAUTHORIZED, "OpenID Connect subject mismatch").into_response()); 29 | } 30 | 31 | store 32 | .create_user(&cx, meta, &record) 33 | .await 34 | .map_err(|e| { 35 | debug!(target: "app::users::put", "failed for `{cx}`: {:?}", e); 36 | e.into_response() 37 | }) 38 | .map(|_| StatusCode::CREATED) 39 | } 40 | -------------------------------------------------------------------------------- /crates/type/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "drawbridge-type" 3 | version = "0.4.3" 4 | authors = ["Profian Inc", "The Enarx Project Developers"] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | homepage = "https://github.com/enarx/drawbridge" 8 | repository = "https://github.com/enarx/drawbridge" 9 | description = "Drawbridge type definitions." 10 | keywords = ["drawbridge"] 11 | 12 | [dependencies] 13 | # Internal dependencies 14 | drawbridge-byte = { workspace = true } 15 | drawbridge-jose = { workspace = true } 16 | 17 | # External dependencies 18 | anyhow = { workspace = true, features = ["std"] } 19 | axum = { workspace = true, features = ["headers", "json"], optional = true } 20 | base64 = { workspace = true, features = ["std"] } 21 | futures = { workspace = true, features = ["std"] } 22 | headers = { workspace = true, optional = true } 23 | mime = { workspace = true } 24 | semver = { workspace = true, features = ["serde", "std"] } 25 | serde = { workspace = true, features = ["derive"] } 26 | serde_json = { workspace = true, features = ["std"] } 27 | sha2 = { workspace = true, features = ["std"] } 28 | walkdir = { workspace = true } 29 | 30 | [dev-dependencies] 31 | async-std = { workspace = true, features = ["attributes", "default"] } 32 | tempfile = { workspace = true } 33 | 34 | [features] 35 | default = [] 36 | server = ["axum", "futures/async-await", "headers"] 37 | -------------------------------------------------------------------------------- /crates/type/src/digest/algorithm.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::{Error, Reader, Writer}; 4 | 5 | use std::str::FromStr; 6 | 7 | use serde::{de::Visitor, Deserialize, Serialize}; 8 | use sha2::{digest::DynDigest, Digest as _, Sha224, Sha256, Sha384, Sha512}; 9 | 10 | /// A hashing algorithm 11 | #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 12 | #[non_exhaustive] 13 | pub enum Algorithm { 14 | Sha224, 15 | Sha256, 16 | Sha384, 17 | Sha512, 18 | } 19 | 20 | impl AsRef for Algorithm { 21 | fn as_ref(&self) -> &str { 22 | match self { 23 | Self::Sha224 => "sha-224", 24 | Self::Sha256 => "sha-256", 25 | Self::Sha384 => "sha-384", 26 | Self::Sha512 => "sha-512", 27 | } 28 | } 29 | } 30 | 31 | impl std::fmt::Display for Algorithm { 32 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 33 | f.write_str(self.as_ref()) 34 | } 35 | } 36 | 37 | impl FromStr for Algorithm { 38 | type Err = Error; 39 | 40 | fn from_str(s: &str) -> Result { 41 | match &*s.to_ascii_lowercase() { 42 | "sha-224" => Ok(Self::Sha224), 43 | "sha-256" => Ok(Self::Sha256), 44 | "sha-384" => Ok(Self::Sha384), 45 | "sha-512" => Ok(Self::Sha512), 46 | _ => Err(Error::UnknownAlgorithm), 47 | } 48 | } 49 | } 50 | 51 | impl Serialize for Algorithm { 52 | fn serialize(&self, serializer: S) -> Result { 53 | self.as_ref().serialize(serializer) 54 | } 55 | } 56 | 57 | impl<'de> Deserialize<'de> for Algorithm { 58 | fn deserialize>(deserializer: D) -> Result { 59 | struct StrVisitor; 60 | 61 | impl Visitor<'_> for StrVisitor { 62 | type Value = Algorithm; 63 | 64 | fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 65 | formatter.write_str("a Content-Digest algorithm name") 66 | } 67 | 68 | fn visit_str(self, v: &str) -> Result { 69 | v.parse().map_err(|_| E::custom("unknown algorithm")) 70 | } 71 | 72 | fn visit_string(self, v: String) -> Result { 73 | v.parse().map_err(|_| E::custom("unknown algorithm")) 74 | } 75 | } 76 | 77 | deserializer.deserialize_str(StrVisitor) 78 | } 79 | } 80 | 81 | impl Algorithm { 82 | pub(crate) fn hasher(self) -> Box { 83 | match self { 84 | Self::Sha224 => Box::new(Sha224::new()), 85 | Self::Sha256 => Box::new(Sha256::new()), 86 | Self::Sha384 => Box::new(Sha384::new()), 87 | Self::Sha512 => Box::new(Sha512::new()), 88 | } 89 | } 90 | 91 | /// Creates a reader instance 92 | pub fn reader(&self, reader: T) -> Reader { 93 | Reader::new(reader, [*self]) 94 | } 95 | 96 | /// Creates a writer instance 97 | pub fn writer(&self, writer: T) -> Writer { 98 | Writer::new(writer, [*self]) 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /crates/type/src/digest/algorithms.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::{Algorithm, ContentDigest, Reader, Writer}; 4 | 5 | use std::collections::BTreeSet; 6 | use std::ops::{Deref, DerefMut}; 7 | 8 | use futures::io::{self, copy, sink, AsyncRead}; 9 | use serde::{Deserialize, Serialize}; 10 | 11 | /// A set of hashing algorithms 12 | #[derive(Clone, Debug, Serialize, Deserialize)] 13 | pub struct Algorithms(BTreeSet); 14 | 15 | impl Default for Algorithms { 16 | fn default() -> Self { 17 | let mut set = BTreeSet::new(); 18 | assert!(set.insert(Algorithm::Sha224)); 19 | assert!(set.insert(Algorithm::Sha256)); 20 | assert!(set.insert(Algorithm::Sha384)); 21 | assert!(set.insert(Algorithm::Sha512)); 22 | Self(set) 23 | } 24 | } 25 | 26 | impl From> for Algorithms { 27 | fn from(value: BTreeSet) -> Self { 28 | Self(value) 29 | } 30 | } 31 | 32 | impl Deref for Algorithms { 33 | type Target = BTreeSet; 34 | 35 | fn deref(&self) -> &Self::Target { 36 | &self.0 37 | } 38 | } 39 | 40 | impl DerefMut for Algorithms { 41 | fn deref_mut(&mut self) -> &mut Self::Target { 42 | &mut self.0 43 | } 44 | } 45 | 46 | impl Algorithms { 47 | /// Creates a reader instance 48 | pub fn reader(&self, reader: T) -> Reader { 49 | Reader::new(reader, self.iter().cloned()) 50 | } 51 | 52 | /// Creates a writer instance 53 | pub fn writer(&self, writer: T) -> Writer { 54 | Writer::new(writer, self.iter().cloned()) 55 | } 56 | 57 | /// Calculates a digest from an async reader 58 | pub async fn read(&self, reader: impl Unpin + AsyncRead) -> io::Result<(u64, ContentDigest)> { 59 | let mut r = self.reader(reader); 60 | let n = copy(&mut r, &mut sink()).await?; 61 | Ok((n, r.digests())) 62 | } 63 | 64 | /// Calculates a digest from a sync reader 65 | pub fn read_sync(&self, reader: impl std::io::Read) -> io::Result<(u64, ContentDigest)> { 66 | let mut r = self.reader(reader); 67 | let n = std::io::copy(&mut r, &mut std::io::sink())?; 68 | Ok((n, r.digests())) 69 | } 70 | } 71 | 72 | #[cfg(test)] 73 | mod tests { 74 | use super::*; 75 | 76 | #[async_std::test] 77 | async fn digest() { 78 | let algorithms = Algorithms::default(); 79 | let rdr = &b"foo"[..]; 80 | let content_digest = "sha-224=:CAj2TmDViXn8tnbJbsk4Jw3qQkRa7vzTpOb42w==:,sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:,sha-384=:mMEf/f3VQGdrGhN8saIrKnA1DJpEFx1rEYDGvly7LuP3nVMsih3Z7y6OCOdSo7q7:,sha-512=:9/u6bgY2+JDlb7vzKD5STG+jIErimDgtYkdB0NxmODJuKCxBvl5CVNiCB3LFUYosWowMf37aGVlKfrU5RT4e1w==:" 81 | .parse::() 82 | .unwrap(); 83 | assert_eq!( 84 | algorithms.read(rdr).await.unwrap(), 85 | ("foo".len() as _, content_digest.clone()) 86 | ); 87 | assert_eq!( 88 | algorithms.read_sync(rdr).unwrap(), 89 | ("foo".len() as _, content_digest) 90 | ); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /crates/type/src/digest/digests.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::{Algorithm, Error, Reader, Verifier, Writer}; 4 | 5 | use std::collections::btree_map::IntoIter; 6 | use std::collections::BTreeMap; 7 | use std::ops::{Deref, DerefMut}; 8 | use std::str::FromStr; 9 | 10 | use drawbridge_byte::Bytes; 11 | use serde::{Deserialize, Serialize}; 12 | 13 | #[cfg(feature = "headers")] 14 | use headers::{Error as HeadErr, Header, HeaderName, HeaderValue}; 15 | 16 | /// A set of hashes for the same contents 17 | #[derive(Clone, Default, Debug, Serialize, Deserialize)] 18 | pub struct ContentDigest>(BTreeMap>) 19 | where 20 | H: AsRef<[u8]> + From>; 21 | 22 | impl ContentDigest 23 | where 24 | H: AsRef<[u8]> + From>, 25 | { 26 | /// Creates a reader instance 27 | pub fn reader(&self, reader: T) -> Reader { 28 | Reader::new(reader, self.iter().map(|x| *x.0)) 29 | } 30 | 31 | /// Creates a writer instance 32 | pub fn writer(&self, writer: T) -> Writer { 33 | Writer::new(writer, self.iter().map(|x| *x.0)) 34 | } 35 | 36 | /// Creates a verifier instance 37 | pub fn verifier(self, reader: T) -> Verifier { 38 | Verifier::new(self.reader(reader), self) 39 | } 40 | } 41 | 42 | impl From>> for ContentDigest 43 | where 44 | H: AsRef<[u8]> + From>, 45 | { 46 | fn from(value: BTreeMap>) -> Self { 47 | Self(value) 48 | } 49 | } 50 | 51 | impl Eq for ContentDigest where H: AsRef<[u8]> + From> {} 52 | impl PartialEq> for ContentDigest 53 | where 54 | T: AsRef<[u8]> + From>, 55 | U: AsRef<[u8]> + From>, 56 | { 57 | fn eq(&self, other: &ContentDigest) -> bool { 58 | if self.len() != other.len() { 59 | return false; 60 | } 61 | 62 | for (lhs, rhs) in self.0.iter().zip(other.0.iter()) { 63 | if lhs.0 != rhs.0 { 64 | return false; 65 | } 66 | 67 | if lhs.1.as_ref() != rhs.1.as_ref() { 68 | return false; 69 | } 70 | } 71 | 72 | true 73 | } 74 | } 75 | 76 | impl Deref for ContentDigest 77 | where 78 | H: AsRef<[u8]> + From>, 79 | { 80 | type Target = BTreeMap>; 81 | 82 | fn deref(&self) -> &Self::Target { 83 | &self.0 84 | } 85 | } 86 | 87 | impl DerefMut for ContentDigest 88 | where 89 | H: AsRef<[u8]> + From>, 90 | { 91 | fn deref_mut(&mut self) -> &mut Self::Target { 92 | &mut self.0 93 | } 94 | } 95 | 96 | impl std::fmt::Display for ContentDigest 97 | where 98 | H: AsRef<[u8]> + From>, 99 | { 100 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 101 | let mut comma = ""; 102 | 103 | for (algo, hash) in self.iter() { 104 | write!(f, "{comma}{algo}=:{hash}:")?; 105 | comma = ","; 106 | } 107 | 108 | Ok(()) 109 | } 110 | } 111 | 112 | impl FromStr for ContentDigest 113 | where 114 | H: AsRef<[u8]> + From>, 115 | { 116 | type Err = Error; 117 | 118 | fn from_str(s: &str) -> Result { 119 | s.split(',') 120 | .map(|s| { 121 | let (key, val) = s.split_once('=').ok_or(Error::MissingEq)?; 122 | let alg = key.parse()?; 123 | let b64 = val 124 | .strip_prefix(':') 125 | .and_then(|val| val.strip_suffix(':')) 126 | .ok_or(Error::MissingColons)? 127 | .parse()?; 128 | Ok((alg, b64)) 129 | }) 130 | .collect::>() 131 | .map(Self) 132 | } 133 | } 134 | 135 | impl IntoIterator for ContentDigest 136 | where 137 | H: AsRef<[u8]> + From>, 138 | { 139 | type Item = (Algorithm, Bytes); 140 | type IntoIter = IntoIter>; 141 | 142 | fn into_iter(self) -> Self::IntoIter { 143 | self.0.into_iter() 144 | } 145 | } 146 | 147 | #[cfg(feature = "headers")] 148 | static CONTENT_DIGEST: HeaderName = HeaderName::from_static("content-digest"); 149 | 150 | #[cfg(feature = "headers")] 151 | impl Header for ContentDigest 152 | where 153 | H: Default + AsRef<[u8]> + From>, 154 | { 155 | fn name() -> &'static HeaderName { 156 | &CONTENT_DIGEST 157 | } 158 | 159 | #[allow(single_use_lifetimes)] 160 | fn decode<'i, I>(values: &mut I) -> Result 161 | where 162 | Self: Sized, 163 | I: Iterator, 164 | { 165 | let mut all = Self::default(); 166 | 167 | for value in values { 168 | let digests: ContentDigest = std::str::from_utf8(value.as_bytes()) 169 | .map_err(|_| HeadErr::invalid())? 170 | .parse() 171 | .map_err(|_| HeadErr::invalid())?; 172 | 173 | for (algo, hash) in digests { 174 | let _ = all.insert(algo, hash); 175 | } 176 | } 177 | 178 | if all.is_empty() { 179 | return Err(HeadErr::invalid()); 180 | } 181 | 182 | Ok(all) 183 | } 184 | 185 | fn encode>(&self, values: &mut E) { 186 | let value = HeaderValue::from_str(&self.to_string()).unwrap(); 187 | values.extend([value]) 188 | } 189 | } 190 | 191 | #[cfg(test)] 192 | mod tests { 193 | use super::*; 194 | 195 | #[async_std::test] 196 | async fn isomorphism() { 197 | const STR: &str = "sha-224=:CAj2TmDViXn8tnbJbsk4Jw3qQkRa7vzTpOb42w==:,sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:,sha-384=:mMEf/f3VQGdrGhN8saIrKnA1DJpEFx1rEYDGvly7LuP3nVMsih3Z7y6OCOdSo7q7:,sha-512=:9/u6bgY2+JDlb7vzKD5STG+jIErimDgtYkdB0NxmODJuKCxBvl5CVNiCB3LFUYosWowMf37aGVlKfrU5RT4e1w==:"; 198 | assert_eq!(STR.parse::().unwrap().to_string(), STR); 199 | } 200 | } 201 | -------------------------------------------------------------------------------- /crates/type/src/digest/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | mod algorithm; 4 | mod algorithms; 5 | mod digests; 6 | mod reader; 7 | mod verifier; 8 | mod writer; 9 | 10 | pub use algorithm::Algorithm; 11 | pub use algorithms::Algorithms; 12 | pub use digests::ContentDigest; 13 | pub use reader::Reader; 14 | pub use verifier::Verifier; 15 | pub use writer::Writer; 16 | 17 | /// Parsing error 18 | #[derive(Clone, Debug, PartialEq, Eq)] 19 | pub enum Error { 20 | MissingEq, 21 | MissingColons, 22 | UnknownAlgorithm, 23 | Decode(base64::DecodeError), 24 | } 25 | 26 | impl std::fmt::Display for Error { 27 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 28 | match self { 29 | Self::Decode(e) => e.fmt(f), 30 | Self::MissingEq => f.write_str("missing equals"), 31 | Self::MissingColons => f.write_str("missing colons"), 32 | Self::UnknownAlgorithm => f.write_str("unknown algorithm"), 33 | } 34 | } 35 | } 36 | 37 | impl std::error::Error for Error {} 38 | 39 | impl From for Error { 40 | fn from(value: base64::DecodeError) -> Self { 41 | Self::Decode(value) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /crates/type/src/digest/reader.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::{Algorithm, ContentDigest}; 4 | 5 | use std::io; 6 | use std::pin::Pin; 7 | use std::task::{Context, Poll}; 8 | 9 | use futures::AsyncRead; 10 | use sha2::digest::DynDigest; 11 | 12 | /// A hashing reader 13 | /// 14 | /// This type wraps another reader and hashes the bytes as they are read. 15 | #[allow(missing_debug_implementations)] // DynDigest does not implement Debug 16 | pub struct Reader { 17 | reader: T, 18 | digests: Vec<(Algorithm, Box)>, 19 | } 20 | 21 | impl Reader { 22 | pub(crate) fn new(reader: T, digests: impl IntoIterator) -> Self { 23 | let digests = digests.into_iter().map(|a| (a, a.hasher())).collect(); 24 | Reader { reader, digests } 25 | } 26 | 27 | fn update(&mut self, buf: &[u8]) { 28 | for digest in &mut self.digests { 29 | digest.1.update(buf); 30 | } 31 | } 32 | 33 | /// Calculates the digests for all the bytes written so far. 34 | pub fn digests(&self) -> ContentDigest> { 35 | let mut set = ContentDigest::default(); 36 | 37 | for digest in &self.digests { 38 | let _ = set.insert(digest.0, digest.1.clone().finalize().into()); 39 | } 40 | 41 | set 42 | } 43 | } 44 | 45 | impl AsyncRead for Reader { 46 | fn poll_read( 47 | mut self: Pin<&mut Self>, 48 | cx: &mut Context<'_>, 49 | buf: &mut [u8], 50 | ) -> Poll> { 51 | Pin::new(&mut self.reader).poll_read(cx, buf).map_ok(|n| { 52 | self.update(&buf[..n]); 53 | n 54 | }) 55 | } 56 | } 57 | 58 | impl io::Read for Reader { 59 | fn read(&mut self, buf: &mut [u8]) -> io::Result { 60 | let n = self.reader.read(buf)?; 61 | self.update(&buf[..n]); 62 | Ok(n) 63 | } 64 | } 65 | 66 | #[cfg(test)] 67 | mod tests { 68 | use futures::io::{copy, sink}; 69 | 70 | use super::*; 71 | 72 | #[async_std::test] 73 | async fn success() { 74 | const HASH: &str = "sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:"; 75 | let hash: ContentDigest = HASH.parse().unwrap(); 76 | 77 | let mut reader = hash.reader(&b"foo"[..]); 78 | assert_eq!(copy(&mut reader, &mut sink()).await.unwrap(), 3); 79 | assert_eq!(reader.digests(), hash); 80 | } 81 | 82 | #[async_std::test] 83 | async fn failure() { 84 | const HASH: &str = "sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:"; 85 | let hash: ContentDigest = HASH.parse().unwrap(); 86 | 87 | let mut reader = hash.reader(&b"bar"[..]); 88 | assert_eq!(copy(&mut reader, &mut sink()).await.unwrap(), 3); 89 | assert_ne!(reader.digests(), hash); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /crates/type/src/digest/verifier.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::{ContentDigest, Reader}; 4 | 5 | use std::io::{self, Error, ErrorKind}; 6 | use std::pin::Pin; 7 | use std::task::{Context, Poll}; 8 | 9 | use futures::AsyncRead; 10 | 11 | /// A verifying reader 12 | /// 13 | /// This type is exactly the same as [`Reader`](super::Reader) except that it 14 | /// additionally verifies the expected hashes. When the end-of-file condition 15 | /// is reached, if the actual hashes do not match the expected hashes, an error 16 | /// is produced. 17 | #[allow(missing_debug_implementations)] // Reader does not implement Debug 18 | pub struct Verifier 19 | where 20 | H: AsRef<[u8]> + From>, 21 | { 22 | reader: Reader, 23 | hashes: ContentDigest, 24 | } 25 | 26 | #[allow(unsafe_code)] 27 | unsafe impl Sync for Verifier 28 | where 29 | T: Sync, 30 | H: Sync + AsRef<[u8]> + From>, 31 | { 32 | } 33 | 34 | #[allow(unsafe_code)] 35 | unsafe impl Send for Verifier 36 | where 37 | T: Send, 38 | H: Send + AsRef<[u8]> + From>, 39 | { 40 | } 41 | 42 | impl Verifier 43 | where 44 | H: AsRef<[u8]> + From>, 45 | { 46 | pub(crate) fn new(reader: Reader, hashes: ContentDigest) -> Self { 47 | Self { reader, hashes } 48 | } 49 | 50 | pub fn digests(&self) -> ContentDigest> { 51 | self.reader.digests() 52 | } 53 | } 54 | 55 | impl Unpin for Verifier where H: AsRef<[u8]> + From> {} 56 | 57 | impl AsyncRead for Verifier 58 | where 59 | H: AsRef<[u8]> + From>, 60 | { 61 | fn poll_read( 62 | mut self: Pin<&mut Self>, 63 | cx: &mut Context<'_>, 64 | buf: &mut [u8], 65 | ) -> Poll> { 66 | Pin::new(&mut self.reader) 67 | .poll_read(cx, buf) 68 | .map(|r| match r? { 69 | 0 if self.reader.digests() != self.hashes => { 70 | Err(Error::new(ErrorKind::InvalidData, "hash mismatch")) 71 | } 72 | n => Ok(n), 73 | }) 74 | } 75 | } 76 | 77 | impl io::Read for Verifier 78 | where 79 | H: AsRef<[u8]> + From>, 80 | { 81 | fn read(&mut self, buf: &mut [u8]) -> io::Result { 82 | match self.reader.read(buf)? { 83 | 0 if self.reader.digests() != self.hashes => { 84 | Err(Error::new(ErrorKind::InvalidData, "hash mismatch")) 85 | } 86 | n => Ok(n), 87 | } 88 | } 89 | } 90 | 91 | #[cfg(test)] 92 | mod tests { 93 | use futures::io::{copy, sink}; 94 | 95 | use super::*; 96 | 97 | #[async_std::test] 98 | async fn read_success() { 99 | let rdr = &b"foo"[..]; 100 | let content_digest = "sha-224=:CAj2TmDViXn8tnbJbsk4Jw3qQkRa7vzTpOb42w==:,sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:,sha-384=:mMEf/f3VQGdrGhN8saIrKnA1DJpEFx1rEYDGvly7LuP3nVMsih3Z7y6OCOdSo7q7:,sha-512=:9/u6bgY2+JDlb7vzKD5STG+jIErimDgtYkdB0NxmODJuKCxBvl5CVNiCB3LFUYosWowMf37aGVlKfrU5RT4e1w==:" 101 | .parse::() 102 | .unwrap(); 103 | 104 | assert_eq!( 105 | copy(&mut content_digest.clone().verifier(rdr), &mut sink()) 106 | .await 107 | .unwrap(), 108 | "foo".len() as u64, 109 | ); 110 | assert_eq!( 111 | std::io::copy(&mut content_digest.verifier(rdr), &mut std::io::sink()).unwrap(), 112 | "foo".len() as u64, 113 | ); 114 | } 115 | 116 | #[async_std::test] 117 | async fn read_failure() { 118 | let rdr = &b"bar"[..]; 119 | let content_digest = "sha-224=:CAj2TmDViXn8tnbJbsk4Jw3qQkRa7vzTpOb42w==:,sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:,sha-384=:mMEf/f3VQGdrGhN8saIrKnA1DJpEFx1rEYDGvly7LuP3nVMsih3Z7y6OCOdSo7q7:,sha-512=:9/u6bgY2+JDlb7vzKD5STG+jIErimDgtYkdB0NxmODJuKCxBvl5CVNiCB3LFUYosWowMf37aGVlKfrU5RT4e1w==:" 120 | .parse::() 121 | .unwrap(); 122 | 123 | assert_eq!( 124 | copy(&mut content_digest.clone().verifier(rdr), &mut sink()) 125 | .await 126 | .unwrap_err() 127 | .kind(), 128 | ErrorKind::InvalidData, 129 | ); 130 | assert_eq!( 131 | std::io::copy(&mut content_digest.verifier(rdr), &mut std::io::sink()) 132 | .unwrap_err() 133 | .kind(), 134 | ErrorKind::InvalidData, 135 | ); 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /crates/type/src/digest/writer.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::{Algorithm, ContentDigest}; 4 | 5 | use std::io; 6 | use std::pin::Pin; 7 | use std::task::{Context, Poll}; 8 | 9 | use futures::AsyncWrite; 10 | use sha2::digest::DynDigest; 11 | 12 | /// A hashing writer 13 | /// 14 | /// This type wraps another writer and hashes the bytes as they are written. 15 | #[allow(missing_debug_implementations)] // DynDigest does not implement Debug 16 | pub struct Writer { 17 | writer: T, 18 | digests: Vec<(Algorithm, Box)>, 19 | } 20 | 21 | #[allow(unsafe_code)] 22 | unsafe impl Sync for Writer where T: Sync {} 23 | 24 | #[allow(unsafe_code)] 25 | unsafe impl Send for Writer where T: Send {} 26 | 27 | impl Writer { 28 | pub(crate) fn new(writer: T, digests: impl IntoIterator) -> Self { 29 | let digests = digests.into_iter().map(|a| (a, a.hasher())).collect(); 30 | Writer { writer, digests } 31 | } 32 | 33 | fn update(&mut self, buf: &[u8]) { 34 | for digest in &mut self.digests { 35 | digest.1.update(buf); 36 | } 37 | } 38 | 39 | /// Calculates the digests for all the bytes written so far. 40 | pub fn digests(&self) -> ContentDigest> { 41 | let mut set = ContentDigest::default(); 42 | 43 | for digest in &self.digests { 44 | _ = set.insert(digest.0, digest.1.clone().finalize().into()); 45 | } 46 | 47 | set 48 | } 49 | } 50 | 51 | impl AsyncWrite for Writer { 52 | fn poll_write( 53 | mut self: Pin<&mut Self>, 54 | cx: &mut Context<'_>, 55 | buf: &[u8], 56 | ) -> Poll> { 57 | Pin::new(&mut self.writer).poll_write(cx, buf).map_ok(|n| { 58 | self.update(&buf[..n]); 59 | n 60 | }) 61 | } 62 | 63 | fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { 64 | Pin::new(&mut self.writer).poll_flush(cx) 65 | } 66 | 67 | fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { 68 | Pin::new(&mut self.writer).poll_close(cx) 69 | } 70 | } 71 | 72 | impl io::Write for Writer { 73 | fn write(&mut self, buf: &[u8]) -> io::Result { 74 | let n = self.writer.write(buf)?; 75 | self.update(&buf[..n]); 76 | Ok(n) 77 | } 78 | 79 | fn flush(&mut self) -> io::Result<()> { 80 | self.writer.flush() 81 | } 82 | } 83 | 84 | #[cfg(test)] 85 | mod tests { 86 | use futures::io::{copy, sink}; 87 | 88 | use super::*; 89 | 90 | #[async_std::test] 91 | async fn success() { 92 | const HASH: &str = "sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:"; 93 | let set = HASH.parse::().unwrap(); 94 | 95 | let mut writer = set.clone().writer(sink()); 96 | assert_eq!(copy(&mut &b"foo"[..], &mut writer).await.unwrap(), 3); 97 | assert_eq!(writer.digests(), set); 98 | } 99 | 100 | #[async_std::test] 101 | async fn failure() { 102 | const HASH: &str = "sha-256=:LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564=:"; 103 | let set = HASH.parse::().unwrap(); 104 | 105 | let mut writer = set.clone().writer(sink()); 106 | assert_eq!(copy(&mut &b"bar"[..], &mut writer).await.unwrap(), 3); 107 | assert_ne!(writer.digests(), set); 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /crates/type/src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | #![cfg_attr(not(test), forbid(clippy::expect_used, clippy::panic))] 4 | #![deny( 5 | clippy::all, 6 | absolute_paths_not_starting_with_crate, 7 | deprecated_in_future, 8 | missing_copy_implementations, 9 | missing_debug_implementations, 10 | noop_method_call, 11 | rust_2018_compatibility, 12 | rust_2018_idioms, 13 | rust_2021_compatibility, 14 | single_use_lifetimes, 15 | trivial_bounds, 16 | trivial_casts, 17 | trivial_numeric_casts, 18 | unreachable_code, 19 | unreachable_patterns, 20 | unreachable_pub, 21 | unsafe_code, 22 | unstable_features, 23 | unused, 24 | unused_crate_dependencies, 25 | unused_import_braces, 26 | unused_lifetimes, 27 | unused_results, 28 | variant_size_differences 29 | )] 30 | 31 | pub mod digest; 32 | pub mod repository; 33 | pub mod tag; 34 | pub mod tree; 35 | pub mod user; 36 | 37 | mod meta; 38 | 39 | pub use meta::*; 40 | pub use repository::{ 41 | Config as RepositoryConfig, Context as RepositoryContext, Name as RepositoryName, 42 | }; 43 | pub use tag::{Context as TagContext, Entry as TagEntry, Name as TagName}; 44 | pub use tree::{ 45 | Content as TreeContent, Context as TreeContext, Directory as TreeDirectory, Entry as TreeEntry, 46 | Name as TreeName, Path as TreePath, Tree, 47 | }; 48 | pub use user::{Context as UserContext, Name as UserName, Record as UserRecord}; 49 | -------------------------------------------------------------------------------- /crates/type/src/meta.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use crate::digest::ContentDigest; 4 | 5 | use mime::Mime; 6 | use serde::{de::Error as _, Deserialize, Deserializer, Serialize, Serializer}; 7 | 8 | #[cfg(feature = "axum")] 9 | use axum::{ 10 | extract::rejection::{TypedHeaderRejection, TypedHeaderRejectionReason}, 11 | extract::{FromRequest, RequestParts, TypedHeader}, 12 | headers::{ContentLength, ContentType}, 13 | response::{IntoResponseParts, Response, ResponseParts}, 14 | }; 15 | 16 | #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] 17 | pub struct Meta { 18 | #[serde(rename = "digest")] 19 | pub hash: ContentDigest>, 20 | 21 | #[serde(rename = "length")] 22 | pub size: u64, 23 | 24 | #[serde(deserialize_with = "deserialize")] 25 | #[serde(serialize_with = "serialize")] 26 | #[serde(rename = "type")] 27 | pub mime: Mime, 28 | } 29 | 30 | #[allow(single_use_lifetimes)] 31 | fn deserialize<'de, D: Deserializer<'de>>(deserializer: D) -> Result { 32 | String::deserialize(deserializer)? 33 | .parse() 34 | .map_err(|_| D::Error::custom("invalid mime type")) 35 | } 36 | 37 | fn serialize(mime: &Mime, serializer: S) -> Result { 38 | mime.to_string().serialize(serializer) 39 | } 40 | 41 | #[cfg(feature = "axum")] 42 | #[axum::async_trait] 43 | impl FromRequest for Meta { 44 | type Rejection = TypedHeaderRejection; 45 | 46 | async fn from_request(req: &mut RequestParts) -> Result { 47 | let hash = match req.extract().await { 48 | Ok(TypedHeader(hash)) => hash, 49 | Err(e) if matches!(e.reason(), TypedHeaderRejectionReason::Missing) => { 50 | Default::default() 51 | } 52 | Err(e) => return Err(e), 53 | }; 54 | let size = req.extract::>().await?.0 .0; 55 | let mime = req.extract::>().await?.0.into(); 56 | Ok(Meta { hash, size, mime }) 57 | } 58 | } 59 | 60 | #[cfg(feature = "axum")] 61 | impl IntoResponseParts for Meta { 62 | type Error = Response; 63 | 64 | fn into_response_parts(self, res: ResponseParts) -> Result { 65 | let hash = TypedHeader(self.hash); 66 | let size = TypedHeader(ContentLength(self.size)); 67 | let mime = TypedHeader(ContentType::from(self.mime)); 68 | (hash, size, mime).into_response_parts(res) 69 | } 70 | } 71 | 72 | #[cfg(test)] 73 | mod tests { 74 | use super::*; 75 | 76 | use serde_json::json; 77 | 78 | #[test] 79 | fn serialization() { 80 | let meta = Meta { 81 | hash: "sha-384=:mqVuAfXRKap7bdgcCY5uykM6+R9GqQ8K/uxy9rx7HNQlGYl1kPzQho1wx4JwY8w=:" 82 | .parse() 83 | .unwrap(), 84 | size: 42, 85 | mime: "text/plain".parse().unwrap(), 86 | }; 87 | 88 | let json = json!({ 89 | "digest": {"sha-384": "mqVuAfXRKap7bdgcCY5uykM6+R9GqQ8K/uxy9rx7HNQlGYl1kPzQho1wx4JwY8w="}, 90 | "length": 42, 91 | "type": "text/plain", 92 | }); 93 | 94 | assert_eq!(serde_json::to_string(&meta).unwrap(), json.to_string()); 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /crates/type/src/repository/config.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | /// A repository config 6 | #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] 7 | #[allow(missing_copy_implementations)] 8 | #[serde(deny_unknown_fields)] 9 | pub struct Config { 10 | pub public: bool, 11 | } 12 | -------------------------------------------------------------------------------- /crates/type/src/repository/context.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::super::UserContext; 4 | use super::Name; 5 | 6 | use std::fmt::Display; 7 | use std::str::FromStr; 8 | 9 | use anyhow::{anyhow, Context as _}; 10 | 11 | #[derive(Clone, Debug, Eq, Hash, PartialEq)] 12 | pub struct Context { 13 | pub owner: UserContext, 14 | pub name: Name, 15 | } 16 | 17 | impl TryFrom<(&str, &str)> for Context { 18 | type Error = anyhow::Error; 19 | 20 | fn try_from((user, repo): (&str, &str)) -> Result { 21 | let owner = user.parse().context("failed to parse user context")?; 22 | let name = repo.parse().context("failed to parse repository name")?; 23 | Ok(Self { owner, name }) 24 | } 25 | } 26 | 27 | impl FromStr for Context { 28 | type Err = anyhow::Error; 29 | 30 | fn from_str(s: &str) -> Result { 31 | let (owner, name) = s 32 | .rsplit_once(['/', ':']) 33 | .ok_or_else(|| anyhow!("`/` or ':' separator not found"))?; 34 | let owner = owner.parse().context("failed to parse user context")?; 35 | let name = name.parse().context("failed to parse repository name")?; 36 | Ok(Self { owner, name }) 37 | } 38 | } 39 | 40 | impl Display for Context { 41 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 42 | write!(f, "{}/{}", self.owner, self.name) 43 | } 44 | } 45 | 46 | #[cfg(feature = "axum")] 47 | #[axum::async_trait] 48 | impl axum::extract::FromRequest for Context { 49 | type Rejection = (axum::http::StatusCode, String); 50 | 51 | async fn from_request( 52 | req: &mut axum::extract::RequestParts, 53 | ) -> Result { 54 | let owner = req.extract().await?; 55 | let axum::Extension(name) = req.extract().await.map_err(|e| { 56 | ( 57 | axum::http::StatusCode::INTERNAL_SERVER_ERROR, 58 | anyhow::Error::new(e) 59 | .context("failed to extract repository context") 60 | .to_string(), 61 | ) 62 | })?; 63 | Ok(Self { owner, name }) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /crates/type/src/repository/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | mod config; 3 | mod context; 4 | mod name; 5 | 6 | pub use config::*; 7 | pub use context::*; 8 | pub use name::*; 9 | -------------------------------------------------------------------------------- /crates/type/src/repository/name.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use std::fmt::Display; 4 | use std::ops::Deref; 5 | use std::str::FromStr; 6 | 7 | use anyhow::bail; 8 | use serde::de::Error; 9 | use serde::{Deserialize, Deserializer, Serialize}; 10 | 11 | /// A repository name 12 | #[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)] 13 | #[repr(transparent)] 14 | #[serde(transparent)] 15 | pub struct Name(String); 16 | 17 | impl Name { 18 | #[inline] 19 | fn validate(s: impl AsRef) -> anyhow::Result<()> { 20 | let s = s.as_ref(); 21 | if s.is_empty() { 22 | bail!("empty repository name") 23 | } else if s 24 | .find(|c| !matches!(c, '0'..='9' | 'a'..='z' | 'A'..='Z' | '-')) 25 | .is_some() 26 | { 27 | bail!("invalid characters in repository name") 28 | } else { 29 | Ok(()) 30 | } 31 | } 32 | } 33 | 34 | impl AsRef for Name { 35 | fn as_ref(&self) -> &str { 36 | &self.0 37 | } 38 | } 39 | 40 | impl AsRef for Name { 41 | fn as_ref(&self) -> &String { 42 | &self.0 43 | } 44 | } 45 | 46 | impl Deref for Name { 47 | type Target = String; 48 | 49 | fn deref(&self) -> &Self::Target { 50 | &self.0 51 | } 52 | } 53 | 54 | impl<'de> Deserialize<'de> for Name { 55 | fn deserialize(deserializer: D) -> Result 56 | where 57 | D: Deserializer<'de>, 58 | { 59 | let name = String::deserialize(deserializer)?; 60 | name.try_into().map_err(D::Error::custom) 61 | } 62 | } 63 | 64 | impl Display for Name { 65 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 66 | write!(f, "{}", self.0) 67 | } 68 | } 69 | 70 | impl FromStr for Name { 71 | type Err = anyhow::Error; 72 | 73 | #[inline] 74 | fn from_str(s: &str) -> Result { 75 | Self::validate(s).map(|()| Self(s.into())) 76 | } 77 | } 78 | 79 | impl TryFrom for Name { 80 | type Error = anyhow::Error; 81 | 82 | #[inline] 83 | fn try_from(s: String) -> Result { 84 | Self::validate(&s).map(|()| Self(s)) 85 | } 86 | } 87 | 88 | #[cfg(test)] 89 | mod tests { 90 | use super::*; 91 | 92 | #[test] 93 | fn from_str() { 94 | assert!("".parse::().is_err()); 95 | assert!(" ".parse::().is_err()); 96 | assert!("/".parse::().is_err()); 97 | assert!("/name".parse::().is_err()); 98 | assert!("name/".parse::().is_err()); 99 | assert!("/name/".parse::().is_err()); 100 | assert!("group//name".parse::().is_err()); 101 | assert!("group/subgroup///name".parse::().is_err()); 102 | assert!("group/subg%roup/name".parse::().is_err()); 103 | assert!("group/subgяoup/name".parse::().is_err()); 104 | assert!("group /subgroup/name".parse::().is_err()); 105 | assert!("group/subgr☣up/name".parse::().is_err()); 106 | assert!("gr.oup/subgroup/name".parse::().is_err()); 107 | assert!("group/name".parse::().is_err()); 108 | assert!("group/subgroup/name".parse::().is_err()); 109 | assert!("gr0uP/subgr0up/-n4mE".parse::().is_err()); 110 | 111 | assert_eq!("name".parse::().unwrap(), Name("name".into())); 112 | assert_eq!("-n4M3".parse::().unwrap(), Name("-n4M3".into())); 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /crates/type/src/tag/context.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::super::RepositoryContext; 4 | use super::Name; 5 | 6 | use std::fmt::Display; 7 | use std::str::FromStr; 8 | 9 | use anyhow::{anyhow, Context as _}; 10 | 11 | #[derive(Clone, Debug, Eq, Hash, PartialEq)] 12 | pub struct Context { 13 | pub repository: RepositoryContext, 14 | pub name: Name, 15 | } 16 | 17 | impl TryFrom<(&str, &str, &str)> for Context { 18 | type Error = anyhow::Error; 19 | 20 | fn try_from((user, repo, tag): (&str, &str, &str)) -> Result { 21 | let repository = (user, repo) 22 | .try_into() 23 | .context("failed to parse repository context")?; 24 | let name = tag 25 | .parse() 26 | .context("failed to parse tag semantic version")?; 27 | Ok(Self { repository, name }) 28 | } 29 | } 30 | 31 | impl FromStr for Context { 32 | type Err = anyhow::Error; 33 | 34 | fn from_str(s: &str) -> Result { 35 | let (repository, name) = s 36 | .rsplit_once(['/', ':']) 37 | .ok_or_else(|| anyhow!("'/' or `:` separator not found"))?; 38 | let repository = repository 39 | .parse() 40 | .context("failed to parse repository context")?; 41 | let name = name 42 | .parse() 43 | .context("failed to parse tag semantic version")?; 44 | Ok(Self { repository, name }) 45 | } 46 | } 47 | 48 | impl Display for Context { 49 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 50 | write!(f, "{}:{}", self.repository, self.name) 51 | } 52 | } 53 | 54 | #[cfg(feature = "axum")] 55 | #[axum::async_trait] 56 | impl axum::extract::FromRequest for Context { 57 | type Rejection = (axum::http::StatusCode, String); 58 | 59 | async fn from_request( 60 | req: &mut axum::extract::RequestParts, 61 | ) -> Result { 62 | let repository = req.extract().await?; 63 | let axum::Extension(name) = req.extract().await.map_err(|e| { 64 | ( 65 | axum::http::StatusCode::INTERNAL_SERVER_ERROR, 66 | anyhow::Error::new(e) 67 | .context("failed to extract tag context") 68 | .to_string(), 69 | ) 70 | })?; 71 | Ok(Self { repository, name }) 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /crates/type/src/tag/entry.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::super::TreeEntry; 4 | 5 | use drawbridge_jose::jws::Jws; 6 | 7 | use serde::{Deserialize, Serialize}; 8 | 9 | #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] 10 | #[serde(untagged)] 11 | pub enum Entry { 12 | Signed(Jws), 13 | Unsigned(E), 14 | } 15 | -------------------------------------------------------------------------------- /crates/type/src/tag/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | mod context; 3 | mod entry; 4 | mod name; 5 | 6 | pub use context::*; 7 | pub use entry::*; 8 | pub use name::*; 9 | -------------------------------------------------------------------------------- /crates/type/src/tag/name.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use std::fmt::Display; 4 | use std::ops::{Deref, DerefMut}; 5 | use std::str::FromStr; 6 | 7 | use serde::{Deserialize, Serialize}; 8 | 9 | #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] 10 | #[repr(transparent)] 11 | #[serde(transparent)] 12 | pub struct Name(semver::Version); 13 | 14 | impl FromStr for Name { 15 | type Err = semver::Error; 16 | 17 | fn from_str(s: &str) -> Result { 18 | s.parse().map(Name) 19 | } 20 | } 21 | 22 | impl Deref for Name { 23 | type Target = semver::Version; 24 | 25 | fn deref(&self) -> &Self::Target { 26 | &self.0 27 | } 28 | } 29 | 30 | impl DerefMut for Name { 31 | fn deref_mut(&mut self) -> &mut Self::Target { 32 | &mut self.0 33 | } 34 | } 35 | 36 | impl Display for Name { 37 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 38 | self.0.fmt(f) 39 | } 40 | } 41 | 42 | #[cfg(test)] 43 | mod tests { 44 | use super::*; 45 | 46 | #[test] 47 | fn from_str() { 48 | for s in ["", "=", "/", "v1.2/3", "v1.2.3"] { 49 | assert!( 50 | s.parse::().is_err(), 51 | "input '{}' should fail to parse", 52 | s 53 | ); 54 | } 55 | 56 | for (s, expected) in [ 57 | ( 58 | "1.2.3", 59 | semver::Version { 60 | major: 1, 61 | minor: 2, 62 | patch: 3, 63 | pre: Default::default(), 64 | build: Default::default(), 65 | }, 66 | ), 67 | ( 68 | "1.2.3-test", 69 | semver::Version { 70 | major: 1, 71 | minor: 2, 72 | patch: 3, 73 | pre: semver::Prerelease::new("test").unwrap(), 74 | build: Default::default(), 75 | }, 76 | ), 77 | ] { 78 | assert_eq!( 79 | s.parse::().unwrap(), 80 | Name(expected), 81 | "input '{}' should succeed to parse", 82 | s 83 | ); 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /crates/type/src/tree/context.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::super::TagContext; 4 | use super::Path; 5 | 6 | use std::fmt::Display; 7 | 8 | #[derive(Clone, Debug, Eq, Hash, PartialEq)] 9 | pub struct Context { 10 | pub tag: TagContext, 11 | pub path: Path, 12 | } 13 | 14 | impl Display for Context { 15 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 16 | write!(f, "{}/{}", self.tag, self.path) 17 | } 18 | } 19 | 20 | #[cfg(feature = "axum")] 21 | #[axum::async_trait] 22 | impl axum::extract::FromRequest for Context { 23 | type Rejection = (axum::http::StatusCode, String); 24 | 25 | async fn from_request( 26 | req: &mut axum::extract::RequestParts, 27 | ) -> Result { 28 | let tag = req.extract().await?; 29 | let axum::Extension(path) = req.extract().await.map_err(|e| { 30 | ( 31 | axum::http::StatusCode::INTERNAL_SERVER_ERROR, 32 | anyhow::Error::new(e) 33 | .context("failed to extract tree context") 34 | .to_string(), 35 | ) 36 | })?; 37 | Ok(Self { tag, path }) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /crates/type/src/tree/directory.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::{Entry, Name}; 4 | 5 | use std::collections::BTreeMap; 6 | use std::ops::{Deref, DerefMut}; 7 | 8 | use serde::{Deserialize, Serialize}; 9 | 10 | /// A directory 11 | /// 12 | /// A directory is simply a sorted name to `E` map. 13 | #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] 14 | #[serde(transparent)] 15 | #[repr(transparent)] 16 | pub struct Directory(BTreeMap); 17 | 18 | impl Directory { 19 | pub const TYPE: &'static str = "application/vnd.drawbridge.directory.v1+json"; 20 | } 21 | 22 | impl IntoIterator for Directory { 23 | type Item = (Name, E); 24 | type IntoIter = std::collections::btree_map::IntoIter; 25 | 26 | fn into_iter(self) -> Self::IntoIter { 27 | self.0.into_iter() 28 | } 29 | } 30 | 31 | impl From> for Directory { 32 | fn from(m: BTreeMap) -> Self { 33 | Self(m) 34 | } 35 | } 36 | 37 | impl FromIterator<(Name, E)> for Directory { 38 | fn from_iter>(iter: T) -> Self { 39 | Self(BTreeMap::from_iter(iter)) 40 | } 41 | } 42 | 43 | impl Deref for Directory { 44 | type Target = BTreeMap; 45 | 46 | fn deref(&self) -> &Self::Target { 47 | &self.0 48 | } 49 | } 50 | 51 | impl DerefMut for Directory { 52 | fn deref_mut(&mut self) -> &mut Self::Target { 53 | &mut self.0 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /crates/type/src/tree/entry.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::super::Meta; 4 | 5 | use std::collections::HashMap; 6 | 7 | use serde::{Deserialize, Serialize}; 8 | use serde_json::Value; 9 | 10 | /// A directory entry 11 | /// 12 | /// Note that this type is designed to be extensible. Therefore, the fields 13 | /// here represent the minimum required fields. Other fields may be present. 14 | #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] 15 | pub struct Entry { 16 | /// The metadata of this entry 17 | #[serde(flatten)] 18 | pub meta: Meta, 19 | 20 | /// Custom fields 21 | #[serde(flatten)] 22 | pub custom: HashMap, 23 | 24 | #[serde(skip)] 25 | pub content: C, 26 | } 27 | 28 | impl Entry { 29 | pub const TYPE: &'static str = "application/vnd.drawbridge.entry.v1+json"; 30 | } 31 | -------------------------------------------------------------------------------- /crates/type/src/tree/name.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::Path; 4 | 5 | use std::fmt::Display; 6 | use std::ops::Deref; 7 | use std::path::PathBuf; 8 | use std::str::FromStr; 9 | 10 | use anyhow::bail; 11 | use serde::de::Error; 12 | use serde::{Deserialize, Deserializer, Serialize}; 13 | 14 | #[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq, Serialize)] 15 | #[repr(transparent)] 16 | #[serde(transparent)] 17 | pub struct Name(String); 18 | 19 | impl Name { 20 | #[inline] 21 | fn validate(s: impl AsRef) -> anyhow::Result<()> { 22 | let s = s.as_ref(); 23 | if s.is_empty() { 24 | bail!("empty entry name") 25 | } else if s 26 | .find(|c| !matches!(c, '0'..='9' | 'a'..='z' | 'A'..='Z' | '-' | '_' | '.' | ':')) 27 | .is_some() 28 | { 29 | bail!("invalid characters in entry name") 30 | } else { 31 | Ok(()) 32 | } 33 | } 34 | 35 | pub fn join(self, name: Name) -> Path { 36 | vec![self, name].into_iter().collect() 37 | } 38 | } 39 | 40 | impl AsRef for Name { 41 | fn as_ref(&self) -> &str { 42 | &self.0 43 | } 44 | } 45 | 46 | impl AsRef for Name { 47 | fn as_ref(&self) -> &String { 48 | &self.0 49 | } 50 | } 51 | 52 | impl Deref for Name { 53 | type Target = String; 54 | 55 | fn deref(&self) -> &Self::Target { 56 | &self.0 57 | } 58 | } 59 | 60 | impl<'de> Deserialize<'de> for Name { 61 | fn deserialize(deserializer: D) -> Result 62 | where 63 | D: Deserializer<'de>, 64 | { 65 | let name = String::deserialize(deserializer)?; 66 | name.try_into().map_err(D::Error::custom) 67 | } 68 | } 69 | 70 | impl Display for Name { 71 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 72 | write!(f, "{}", self.0) 73 | } 74 | } 75 | 76 | impl From for PathBuf { 77 | fn from(name: Name) -> Self { 78 | Self::from(name.0) 79 | } 80 | } 81 | 82 | impl From for String { 83 | fn from(name: Name) -> Self { 84 | name.0 85 | } 86 | } 87 | 88 | impl FromStr for Name { 89 | type Err = anyhow::Error; 90 | 91 | #[inline] 92 | fn from_str(s: &str) -> Result { 93 | Self::validate(s).map(|()| Self(s.into())) 94 | } 95 | } 96 | 97 | impl TryFrom for Name { 98 | type Error = anyhow::Error; 99 | 100 | #[inline] 101 | fn try_from(s: String) -> Result { 102 | Self::validate(&s).map(|()| Self(s)) 103 | } 104 | } 105 | 106 | #[cfg(test)] 107 | mod tests { 108 | use super::*; 109 | 110 | #[test] 111 | fn from_str() { 112 | assert!("/".parse::().is_err()); 113 | assert!("/test".parse::().is_err()); 114 | assert!("test/".parse::().is_err()); 115 | 116 | assert_eq!("foo".parse::().unwrap(), Name("foo".into())); 117 | assert_eq!("some.txt".parse::().unwrap(), Name("some.txt".into())); 118 | assert_eq!( 119 | "my_wasm.wasm".parse::().unwrap(), 120 | Name("my_wasm.wasm".into()) 121 | ); 122 | assert_eq!( 123 | "not.a.cor-Rec.t.eX.tens.si0n_".parse::().unwrap(), 124 | Name("not.a.cor-Rec.t.eX.tens.si0n_".into()) 125 | ); 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /crates/type/src/tree/path.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::Name; 4 | 5 | use std::fmt::Display; 6 | use std::ops::Deref; 7 | use std::path::PathBuf; 8 | use std::str::FromStr; 9 | 10 | use serde::{Deserialize, Serialize}; 11 | 12 | #[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)] 13 | #[repr(transparent)] 14 | #[serde(transparent)] 15 | pub struct Path(Vec); 16 | 17 | impl Path { 18 | pub const ROOT: Self = Self(vec![]); 19 | 20 | pub fn intersperse(&self, sep: &str) -> String { 21 | let mut it = self.0.iter(); 22 | match it.next() { 23 | None => Default::default(), 24 | Some(first) => { 25 | let mut s = String::with_capacity( 26 | self.0.iter().map(|p| p.len()).sum::() + self.0.len() - 1, 27 | ); 28 | s.push_str(first); 29 | for p in it { 30 | s.push_str(sep); 31 | s.push_str(p); 32 | } 33 | s 34 | } 35 | } 36 | } 37 | } 38 | 39 | impl AsRef> for Path { 40 | fn as_ref(&self) -> &Vec { 41 | &self.0 42 | } 43 | } 44 | 45 | impl AsRef<[Name]> for Path { 46 | fn as_ref(&self) -> &[Name] { 47 | &self.0 48 | } 49 | } 50 | 51 | impl Deref for Path { 52 | type Target = Vec; 53 | 54 | fn deref(&self) -> &Self::Target { 55 | &self.0 56 | } 57 | } 58 | 59 | impl Display for Path { 60 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 61 | write!(f, "{}", self.intersperse("/")) 62 | } 63 | } 64 | 65 | impl From for Path { 66 | fn from(name: Name) -> Self { 67 | Self(vec![name]) 68 | } 69 | } 70 | 71 | impl From for PathBuf { 72 | fn from(path: Path) -> Self { 73 | path.into_iter().map(PathBuf::from).collect() 74 | } 75 | } 76 | 77 | impl FromIterator for Path { 78 | fn from_iter>(iter: T) -> Self { 79 | Self(Vec::::from_iter(iter)) 80 | } 81 | } 82 | 83 | impl FromStr for Path { 84 | type Err = anyhow::Error; 85 | 86 | fn from_str(s: &str) -> Result { 87 | s.trim_start_matches('/') 88 | .split_terminator('/') 89 | .map(FromStr::from_str) 90 | .collect::, Self::Err>>() 91 | .map(Self) 92 | } 93 | } 94 | 95 | impl IntoIterator for Path { 96 | type Item = Name; 97 | type IntoIter = std::vec::IntoIter; 98 | 99 | fn into_iter(self) -> Self::IntoIter { 100 | self.0.into_iter() 101 | } 102 | } 103 | 104 | #[cfg(test)] 105 | mod tests { 106 | use super::*; 107 | 108 | #[test] 109 | fn from_str() { 110 | assert_eq!("/".parse::().unwrap(), Path::ROOT); 111 | assert_eq!( 112 | "/foo".parse::().unwrap(), 113 | Path(vec!["foo".parse().unwrap()]) 114 | ); 115 | assert_eq!( 116 | "/foo/".parse::().unwrap(), 117 | Path(vec!["foo".parse().unwrap()]) 118 | ); 119 | assert_eq!( 120 | "/foo/bar".parse::().unwrap(), 121 | Path(vec!["foo".parse().unwrap(), "bar".parse().unwrap()]) 122 | ); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /crates/type/src/user/context.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use super::Name; 4 | 5 | use std::fmt::Display; 6 | use std::str::FromStr; 7 | 8 | use anyhow::Context as _; 9 | 10 | #[derive(Clone, Debug, Eq, Hash, PartialEq)] 11 | pub struct Context { 12 | pub name: Name, 13 | } 14 | 15 | impl FromStr for Context { 16 | type Err = anyhow::Error; 17 | 18 | fn from_str(s: &str) -> Result { 19 | let name = s.parse().context("failed to parse user name")?; 20 | Ok(Self { name }) 21 | } 22 | } 23 | 24 | impl Display for Context { 25 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 26 | write!(f, "{}", self.name) 27 | } 28 | } 29 | 30 | #[cfg(feature = "axum")] 31 | #[axum::async_trait] 32 | impl axum::extract::FromRequest for Context { 33 | type Rejection = (axum::http::StatusCode, String); 34 | 35 | async fn from_request( 36 | req: &mut axum::extract::RequestParts, 37 | ) -> Result { 38 | let axum::Extension(name) = req.extract().await.map_err(|e| { 39 | ( 40 | axum::http::StatusCode::INTERNAL_SERVER_ERROR, 41 | anyhow::Error::new(e) 42 | .context("failed to extract user context") 43 | .to_string(), 44 | ) 45 | })?; 46 | Ok(Self { name }) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /crates/type/src/user/mod.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | mod context; 3 | mod name; 4 | mod record; 5 | 6 | pub use context::*; 7 | pub use name::*; 8 | pub use record::*; 9 | -------------------------------------------------------------------------------- /crates/type/src/user/name.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use std::fmt::Display; 4 | use std::ops::Deref; 5 | use std::str::FromStr; 6 | 7 | use anyhow::bail; 8 | use serde::de::Error; 9 | use serde::{Deserialize, Deserializer, Serialize}; 10 | 11 | /// A user name 12 | #[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)] 13 | #[repr(transparent)] 14 | #[serde(transparent)] 15 | pub struct Name(String); 16 | 17 | impl Name { 18 | #[inline] 19 | fn validate(s: impl AsRef) -> anyhow::Result<()> { 20 | let s = s.as_ref(); 21 | if s.is_empty() { 22 | bail!("empty user name") 23 | } 24 | if s.find(|c| !matches!(c, '0'..='9' | 'a'..='z' | 'A'..='Z')) 25 | .is_some() 26 | { 27 | bail!("invalid characters in user name") 28 | } else { 29 | Ok(()) 30 | } 31 | } 32 | } 33 | 34 | impl Deref for Name { 35 | type Target = String; 36 | 37 | fn deref(&self) -> &Self::Target { 38 | &self.0 39 | } 40 | } 41 | 42 | impl<'de> Deserialize<'de> for Name { 43 | fn deserialize(deserializer: D) -> Result 44 | where 45 | D: Deserializer<'de>, 46 | { 47 | let name = String::deserialize(deserializer)?; 48 | name.try_into().map_err(D::Error::custom) 49 | } 50 | } 51 | 52 | impl Display for Name { 53 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 54 | write!(f, "{}", self.0) 55 | } 56 | } 57 | 58 | impl FromStr for Name { 59 | type Err = anyhow::Error; 60 | 61 | #[inline] 62 | fn from_str(s: &str) -> Result { 63 | Self::validate(s).map(|()| Self(s.into())) 64 | } 65 | } 66 | 67 | impl TryFrom for Name { 68 | type Error = anyhow::Error; 69 | 70 | #[inline] 71 | fn try_from(s: String) -> Result { 72 | Self::validate(&s).map(|()| Self(s)) 73 | } 74 | } 75 | 76 | #[cfg(test)] 77 | mod tests { 78 | use super::*; 79 | 80 | #[test] 81 | fn from_str() { 82 | assert!("".parse::().is_err()); 83 | assert!(" ".parse::().is_err()); 84 | assert!("/".parse::().is_err()); 85 | assert!("name/".parse::().is_err()); 86 | assert!("/name".parse::().is_err()); 87 | assert!("n%ame".parse::().is_err()); 88 | assert!("n.ame".parse::().is_err()); 89 | 90 | assert_eq!("name".parse::().unwrap(), Name("name".into())); 91 | assert_eq!("n4M3".parse::().unwrap(), Name("n4M3".into())); 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /crates/type/src/user/record.rs: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | /// A user record 6 | #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] 7 | #[serde(deny_unknown_fields)] 8 | pub struct Record { 9 | /// OpenID Connect identity subject uniquely identifying the user 10 | pub subject: String, 11 | } 12 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "stable" 3 | components = [ "rustfmt", "clippy" ] 4 | profile = "minimal" 5 | targets = [ 6 | "aarch64-unknown-linux-musl", 7 | "x86_64-unknown-linux-musl" 8 | ] 9 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | #![forbid(unsafe_code)] 5 | #![deny( 6 | clippy::all, 7 | absolute_paths_not_starting_with_crate, 8 | deprecated_in_future, 9 | missing_copy_implementations, 10 | missing_debug_implementations, 11 | noop_method_call, 12 | rust_2018_compatibility, 13 | rust_2018_idioms, 14 | rust_2021_compatibility, 15 | single_use_lifetimes, 16 | trivial_bounds, 17 | trivial_casts, 18 | trivial_numeric_casts, 19 | unreachable_code, 20 | unreachable_patterns, 21 | unreachable_pub, 22 | unstable_features, 23 | unused, 24 | unused_import_braces, 25 | unused_lifetimes, 26 | unused_results, 27 | variant_size_differences 28 | )] 29 | 30 | pub mod bytes { 31 | pub use drawbridge_byte::*; 32 | } 33 | #[cfg(feature = "client")] 34 | pub mod client { 35 | pub use drawbridge_client::*; 36 | } 37 | pub mod jose { 38 | pub use drawbridge_jose::*; 39 | } 40 | pub mod server { 41 | pub use drawbridge_server::*; 42 | } 43 | pub mod types { 44 | pub use drawbridge_type::*; 45 | } 46 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | // SPDX-FileCopyrightText: 2022 Profian Inc. 2 | // SPDX-License-Identifier: Apache-2.0 3 | 4 | #![forbid(unsafe_code)] 5 | #![deny( 6 | clippy::all, 7 | absolute_paths_not_starting_with_crate, 8 | deprecated_in_future, 9 | missing_copy_implementations, 10 | missing_debug_implementations, 11 | noop_method_call, 12 | rust_2018_compatibility, 13 | rust_2018_idioms, 14 | rust_2021_compatibility, 15 | single_use_lifetimes, 16 | trivial_bounds, 17 | trivial_casts, 18 | trivial_numeric_casts, 19 | unreachable_code, 20 | unreachable_patterns, 21 | unreachable_pub, 22 | unstable_features, 23 | unused, 24 | unused_import_braces, 25 | unused_lifetimes, 26 | unused_qualifications, 27 | unused_results, 28 | variant_size_differences 29 | )] 30 | 31 | use std::fs::File; 32 | use std::io::{self, BufRead, BufReader}; 33 | use std::net::{IpAddr, Ipv4Addr, SocketAddr}; 34 | use std::path::{Path, PathBuf}; 35 | 36 | use drawbridge_server::url::Url; 37 | use drawbridge_server::{App, OidcConfig, TlsConfig}; 38 | 39 | use anyhow::Context as _; 40 | use async_std::net::TcpListener; 41 | use clap::Parser; 42 | use confargs::{args, prefix_char_filter, Toml}; 43 | use futures::StreamExt; 44 | use tracing::{debug, error}; 45 | 46 | /// Server for hosting WebAssembly modules for use in Enarx keeps. 47 | /// 48 | /// Any command-line options listed here may be specified by one or 49 | /// more configuration files, which can be used by passing the 50 | /// name of the file on the command-line with the syntax `@config.toml`. 51 | /// The configuration file must contain valid TOML table mapping argument 52 | /// names to their values. 53 | #[derive(Parser, Debug)] 54 | #[command(author, version, about)] 55 | struct Args { 56 | /// Address to bind to. 57 | #[arg(long, default_value_t = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 8080))] 58 | addr: SocketAddr, 59 | 60 | /// Path to the Drawbridge store. 61 | #[arg(long)] 62 | store: PathBuf, 63 | 64 | /// Path to PEM-encoded server certificate. 65 | #[arg(long)] 66 | cert: PathBuf, 67 | 68 | /// Path to PEM-encoded server certificate key. 69 | #[arg(long)] 70 | key: PathBuf, 71 | 72 | /// Path to PEM-encoded trusted CA certificate. 73 | /// 74 | /// Clients that present a valid certificate signed by this CA 75 | /// are granted read-only access to all repositories in the store. 76 | #[arg(long)] 77 | ca: PathBuf, 78 | 79 | /// OpenID Connect issuer URL. 80 | #[arg(long)] 81 | oidc_issuer: Url, 82 | 83 | /// OpenID Connect audience. 84 | #[arg(long)] 85 | oidc_audience: String, 86 | } 87 | 88 | fn open_buffered(p: impl AsRef) -> io::Result { 89 | File::open(p).map(BufReader::new) 90 | } 91 | 92 | #[async_std::main] 93 | async fn main() -> anyhow::Result<()> { 94 | if std::env::var("RUST_LOG_JSON").is_ok() { 95 | tracing_subscriber::fmt::fmt() 96 | .json() 97 | .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) 98 | .init(); 99 | } else { 100 | tracing_subscriber::fmt::init(); 101 | } 102 | 103 | let Args { 104 | addr, 105 | store, 106 | cert, 107 | key, 108 | ca, 109 | oidc_audience, 110 | oidc_issuer, 111 | } = args::(prefix_char_filter::<'@'>) 112 | .context("Failed to parse config") 113 | .map(Args::parse_from)?; 114 | 115 | let cert = open_buffered(cert).context("Failed to open server certificate file")?; 116 | let key = open_buffered(key).context("Failed to open server key file")?; 117 | let ca = open_buffered(ca).context("Failed to open CA certificate file")?; 118 | let tls = TlsConfig::read(cert, key, ca).context("Failed to construct server TLS config")?; 119 | 120 | let app = App::new( 121 | store, 122 | tls, 123 | OidcConfig { 124 | audience: oidc_audience, 125 | issuer: oidc_issuer, 126 | }, 127 | ) 128 | .await 129 | .context("Failed to build app")?; 130 | TcpListener::bind(addr) 131 | .await 132 | .with_context(|| format!("Failed to bind to {addr}"))? 133 | .incoming() 134 | .for_each_concurrent(None, |stream| async { 135 | if let Err(e) = async { 136 | let stream = stream.context("failed to initialize connection")?; 137 | debug!( 138 | target: "main", 139 | "received TCP connection from {}", 140 | stream 141 | .peer_addr() 142 | .map(|peer| peer.to_string()) 143 | .unwrap_or_else(|_| "unknown address".into()) 144 | ); 145 | app.handle(stream).await 146 | } 147 | .await 148 | { 149 | error!(target: "main", "failed to handle request: {e}"); 150 | } 151 | }) 152 | .await; 153 | Ok(()) 154 | } 155 | -------------------------------------------------------------------------------- /testdata/ca.conf: -------------------------------------------------------------------------------- 1 | [req] 2 | distinguished_name = req_distinguished_name 3 | prompt = no 4 | x509_extensions = v3_ca 5 | 6 | [req_distinguished_name] 7 | C = US 8 | ST = North Carolina 9 | L = Raleigh 10 | CN = ca.profian.localhost 11 | 12 | [v3_ca] 13 | basicConstraints = critical,CA:TRUE 14 | keyUsage = cRLSign, keyCertSign 15 | nsComment = "CA certificate" 16 | subjectKeyIdentifier = hash 17 | -------------------------------------------------------------------------------- /testdata/ca.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICDzCCAbagAwIBAgIUPRV5nvtvma6UD1yCz8h23YhIU4IwCgYIKoZIzj0EAwIw 3 | VzELMAkGA1UEBhMCVVMxFzAVBgNVBAgMDk5vcnRoIENhcm9saW5hMRAwDgYDVQQH 4 | DAdSYWxlaWdoMR0wGwYDVQQDDBRjYS5wcm9maWFuLmxvY2FsaG9zdDAgFw0yMjA5 5 | MjcxMDI5MjJaGA8yMDUwMDIxMTEwMjkyMlowVzELMAkGA1UEBhMCVVMxFzAVBgNV 6 | BAgMDk5vcnRoIENhcm9saW5hMRAwDgYDVQQHDAdSYWxlaWdoMR0wGwYDVQQDDBRj 7 | YS5wcm9maWFuLmxvY2FsaG9zdDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABPbI 8 | GTlvupARV88XZzhPFkbnsVxjwzdSSAGranJX2knGfy4NoFax3ywFqdSyRdcxShlq 9 | 9MjWjYwFu1MVXSR2dEqjXjBcMA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEG 10 | MB0GCWCGSAGG+EIBDQQQFg5DQSBjZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUrHjrHTYA 11 | NrCRO4mMFibzqanmwUswCgYIKoZIzj0EAwIDRwAwRAIgbDC+0YD9sJLAX93bDh8u 12 | aF/GkCVQjWLr2puL4VVcdkACIB6JHtP0nf9ZBel5aGkWRI7spUSsnPB3GacIEbGf 13 | Ob/d 14 | -----END CERTIFICATE----- 15 | -------------------------------------------------------------------------------- /testdata/ca.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgMz3U8FPTp7TmYQp/ 3 | PDThtovTYG+nBQVyvGr8yjHlhH+hRANCAAT2yBk5b7qQEVfPF2c4TxZG57FcY8M3 4 | UkgBq2pyV9pJxn8uDaBWsd8sBanUskXXMUoZavTI1o2MBbtTFV0kdnRK 5 | -----END PRIVATE KEY----- 6 | -------------------------------------------------------------------------------- /testdata/ca.srl: -------------------------------------------------------------------------------- 1 | 3B87E1D88A9361A95FAF38FA158992F077ED2895 2 | -------------------------------------------------------------------------------- /testdata/client.conf: -------------------------------------------------------------------------------- 1 | [req] 2 | distinguished_name = req_distinguished_name 3 | prompt = no 4 | req_extensions = v3_req 5 | x509_extensions = client_crt 6 | 7 | [req_distinguished_name] 8 | C = US 9 | ST = North Carolina 10 | L = Raleigh 11 | O = Profian 12 | CN = localhost 13 | 14 | [client_crt] 15 | nsCertType = client 16 | nsComment = "Client Certificate" 17 | subjectAltName = @alt_names 18 | 19 | [alt_names] 20 | DNS.1 = localhost 21 | DNS.2 = *.localhost 22 | IP.1 = 127.0.0.1 23 | IP.2 = ::1 24 | 25 | [v3_req] 26 | basicConstraints = CA:FALSE 27 | extendedKeyUsage = clientAuth 28 | keyUsage = digitalSignature, nonRepudiation, keyEncipherment 29 | -------------------------------------------------------------------------------- /testdata/client.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICLTCCAdKgAwIBAgIUO4fh2IqTYalfrzj6FYmS8HftKJUwCgYIKoZIzj0EAwIw 3 | VzELMAkGA1UEBhMCVVMxFzAVBgNVBAgMDk5vcnRoIENhcm9saW5hMRAwDgYDVQQH 4 | DAdSYWxlaWdoMR0wGwYDVQQDDBRjYS5wcm9maWFuLmxvY2FsaG9zdDAgFw0yMjA5 5 | MjcxMDI5MjJaGA8yMDUwMDIxMTEwMjkyMlowXjELMAkGA1UEBhMCVVMxFzAVBgNV 6 | BAgMDk5vcnRoIENhcm9saW5hMRAwDgYDVQQHDAdSYWxlaWdoMRAwDgYDVQQKDAdQ 7 | cm9maWFuMRIwEAYDVQQDDAlsb2NhbGhvc3QwWTATBgcqhkjOPQIBBggqhkjOPQMB 8 | BwNCAAQgT+X0AWel3PRCB+N1T5aAQ67aed5EH8uSX+weKRn1ojyWBIYlGjg+imiv 9 | kNHimpvf/Nn4jd8X25/t4a+eBQ/1o3MwcTARBglghkgBhvhCAQEEBAMCB4AwIQYJ 10 | YIZIAYb4QgENBBQWEkNsaWVudCBDZXJ0aWZpY2F0ZTA5BgNVHREEMjAwgglsb2Nh 11 | bGhvc3SCCyoubG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMAoGCCqG 12 | SM49BAMCA0kAMEYCIQDGTNRpdbUVjFJNp2MZnR40lCPCBYhKZnk8yUJuxrzchQIh 13 | APmmvx3PisMfdzxwWuGmF5utoYL2WrgJ/+5qrKtgqO78 14 | -----END CERTIFICATE----- 15 | -------------------------------------------------------------------------------- /testdata/client.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIBVjCB/gIBADBeMQswCQYDVQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xp 3 | bmExEDAOBgNVBAcMB1JhbGVpZ2gxEDAOBgNVBAoMB1Byb2ZpYW4xEjAQBgNVBAMM 4 | CWxvY2FsaG9zdDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCBP5fQBZ6Xc9EIH 5 | 43VPloBDrtp53kQfy5Jf7B4pGfWiPJYEhiUaOD6KaK+Q0eKam9/82fiN3xfbn+3h 6 | r54FD/WgPjA8BgkqhkiG9w0BCQ4xLzAtMAkGA1UdEwQCMAAwEwYDVR0lBAwwCgYI 7 | KwYBBQUHAwIwCwYDVR0PBAQDAgXgMAoGCCqGSM49BAMCA0cAMEQCICGuQfXLi4cH 8 | 6Nns41/CwhNDe2JTFxQy5yCR5MU3sbYcAiAso1ldahetguYFK4XSV0+jOPwMQ61M 9 | 93qE6dmbYznlDg== 10 | -----END CERTIFICATE REQUEST----- 11 | -------------------------------------------------------------------------------- /testdata/client.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgMzqJ0VKDQwe1oZqM 3 | DRlinzLXpT5zb/wdr+EZga4n6sihRANCAAQgT+X0AWel3PRCB+N1T5aAQ67aed5E 4 | H8uSX+weKRn1ojyWBIYlGjg+imivkNHimpvf/Nn4jd8X25/t4a+eBQ/1 5 | -----END PRIVATE KEY----- 6 | -------------------------------------------------------------------------------- /testdata/generate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | printf "Generating CA key\n" 3 | openssl ecparam -genkey -name prime256v1 | openssl pkcs8 -topk8 -nocrypt -out ca.key 4 | printf "\nCA " 5 | openssl pkey -noout -text -in ca.key 6 | 7 | printf "\nGenerating CA certificate\n" 8 | openssl req -new -x509 -days 9999 -config ca.conf -key ca.key -out ca.crt 9 | printf "\nCA " 10 | openssl x509 -noout -text -in ca.crt 11 | 12 | printf "\nGenerating Server key\n" 13 | openssl ecparam -genkey -name prime256v1 | openssl pkcs8 -topk8 -nocrypt -out server.key 14 | printf "\nServer " 15 | openssl pkey -noout -text -in server.key 16 | 17 | printf "\nGenerating Server Certificate Signing Request\n" 18 | openssl req -new -config server.conf -key server.key -out server.csr 19 | printf "\nServer " 20 | openssl req -text -in server.csr 21 | 22 | printf "\nGenerating Server Certificate\n" 23 | openssl x509 -req -days 9999 -CAcreateserial -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -extfile server.conf -extensions server_crt 24 | printf "\nServer " 25 | openssl x509 -noout -text -in server.crt 26 | 27 | printf "\nGenerating Client key\n" 28 | openssl ecparam -genkey -name prime256v1 | openssl pkcs8 -topk8 -nocrypt -out client.key 29 | printf "\nClient " 30 | openssl pkey -noout -text -in client.key 31 | 32 | printf "\nGenerating Client Certificate Signing Request\n" 33 | openssl req -new -config client.conf -key client.key -out client.csr 34 | printf "\nClient " 35 | openssl req -text -in client.csr 36 | 37 | printf "\nGenerating Client Certificate\n" 38 | openssl x509 -req -days 9999 -CAcreateserial -CA ca.crt -CAkey ca.key -in client.csr -out client.crt -extfile client.conf -extensions client_crt 39 | printf "\nClient " 40 | openssl x509 -noout -text -in client.crt 41 | -------------------------------------------------------------------------------- /testdata/server.conf: -------------------------------------------------------------------------------- 1 | [req] 2 | distinguished_name = req_distinguished_name 3 | prompt = no 4 | req_extensions = v3_req 5 | x509_extensions = server_crt 6 | 7 | [req_distinguished_name] 8 | C = US 9 | ST = North Carolina 10 | L = Raleigh 11 | O = Profian 12 | CN = localhost 13 | 14 | [server_crt] 15 | nsCertType = server 16 | nsComment = "Server Certificate" 17 | subjectAltName = @alt_names 18 | 19 | [alt_names] 20 | DNS.1 = localhost 21 | DNS.2 = *.localhost 22 | IP.1 = 127.0.0.1 23 | IP.2 = ::1 24 | 25 | [v3_req] 26 | basicConstraints = CA:FALSE 27 | extendedKeyUsage = serverAuth 28 | keyUsage = digitalSignature, nonRepudiation, keyEncipherment 29 | -------------------------------------------------------------------------------- /testdata/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICLTCCAdKgAwIBAgIUO4fh2IqTYalfrzj6FYmS8HftKJQwCgYIKoZIzj0EAwIw 3 | VzELMAkGA1UEBhMCVVMxFzAVBgNVBAgMDk5vcnRoIENhcm9saW5hMRAwDgYDVQQH 4 | DAdSYWxlaWdoMR0wGwYDVQQDDBRjYS5wcm9maWFuLmxvY2FsaG9zdDAgFw0yMjA5 5 | MjcxMDI5MjJaGA8yMDUwMDIxMTEwMjkyMlowXjELMAkGA1UEBhMCVVMxFzAVBgNV 6 | BAgMDk5vcnRoIENhcm9saW5hMRAwDgYDVQQHDAdSYWxlaWdoMRAwDgYDVQQKDAdQ 7 | cm9maWFuMRIwEAYDVQQDDAlsb2NhbGhvc3QwWTATBgcqhkjOPQIBBggqhkjOPQMB 8 | BwNCAAStd1QLb4iKCf8lH/LO6VKbDLAJaiyVbh4+eV+pZsuiC5c8iN1r/LyfTKvR 9 | 3WLGBBeSXnyoxQBHP2PWUrBJttNEo3MwcTARBglghkgBhvhCAQEEBAMCBkAwIQYJ 10 | YIZIAYb4QgENBBQWElNlcnZlciBDZXJ0aWZpY2F0ZTA5BgNVHREEMjAwgglsb2Nh 11 | bGhvc3SCCyoubG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMAoGCCqG 12 | SM49BAMCA0kAMEYCIQDNwjzg81ltKgZ0S701SqCw+VAZ11g9HcxFPhZKrpgqwgIh 13 | AJBSvXfEj1ndgXCRMJ4ANVmG2XiD9xkrXsSFk2JJU3Ea 14 | -----END CERTIFICATE----- 15 | -------------------------------------------------------------------------------- /testdata/server.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIBVzCB/gIBADBeMQswCQYDVQQGEwJVUzEXMBUGA1UECAwOTm9ydGggQ2Fyb2xp 3 | bmExEDAOBgNVBAcMB1JhbGVpZ2gxEDAOBgNVBAoMB1Byb2ZpYW4xEjAQBgNVBAMM 4 | CWxvY2FsaG9zdDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABK13VAtviIoJ/yUf 5 | 8s7pUpsMsAlqLJVuHj55X6lmy6ILlzyI3Wv8vJ9Mq9HdYsYEF5JefKjFAEc/Y9ZS 6 | sEm200SgPjA8BgkqhkiG9w0BCQ4xLzAtMAkGA1UdEwQCMAAwEwYDVR0lBAwwCgYI 7 | KwYBBQUHAwEwCwYDVR0PBAQDAgXgMAoGCCqGSM49BAMCA0gAMEUCIHCEwPcTQtkF 8 | W2Aj1Nf9fGqdq9nbn9bQkXYfrJcdScLDAiEA5+EidM6aeT7/D50dnie8NWbALPf/ 9 | SsQHCrJcsi+0rpQ= 10 | -----END CERTIFICATE REQUEST----- 11 | -------------------------------------------------------------------------------- /testdata/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgu9xt6xQvDxn+/3yd 3 | yA+59/1x/6imShJf5H2Zkwot6QGhRANCAAStd1QLb4iKCf8lH/LO6VKbDLAJaiyV 4 | bh4+eV+pZsuiC5c8iN1r/LyfTKvR3WLGBBeSXnyoxQBHP2PWUrBJttNE 5 | -----END PRIVATE KEY----- 6 | --------------------------------------------------------------------------------