├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ └── feature_request.md ├── codecov.yml ├── dependabot.yml └── workflows │ ├── checks.yml │ ├── ci.yml │ ├── pr-benchmarks.yml │ └── release-plz.yml ├── .gitignore ├── .rustfmt.toml ├── CHANGELOG.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── SECURITY.md ├── deny.toml ├── examples ├── archive_async │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── archive_sync │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── axum_embedded │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── diesel_embedded │ ├── Cargo.toml │ ├── README.md │ ├── diesel.toml │ ├── migrations │ │ ├── .keep │ │ └── 2024-08-17-200823_create_posts │ │ │ ├── down.sql │ │ │ └── up.sql │ └── src │ │ ├── main.rs │ │ ├── models.rs │ │ └── schema.rs ├── download_progress_bar │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── embedded_async │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── embedded_sync │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── portal_corp_extension │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── postgres_embedded │ ├── Cargo.toml │ ├── README.md │ └── src │ │ └── main.rs ├── sqlx_embedded │ ├── Cargo.toml │ ├── README.md │ └── src │ │ └── main.rs ├── tensor_chord_extension │ ├── Cargo.toml │ └── src │ │ └── main.rs └── zonky │ ├── Cargo.toml │ └── src │ └── main.rs ├── images ├── full_logo.png ├── logo.png └── original_logo.png ├── postgresql_archive ├── Cargo.toml ├── README.md ├── benches │ └── archive.rs ├── src │ ├── archive.rs │ ├── blocking │ │ ├── archive.rs │ │ └── mod.rs │ ├── configuration │ │ ├── custom │ │ │ ├── matcher.rs │ │ │ └── mod.rs │ │ ├── mod.rs │ │ ├── theseus │ │ │ ├── extractor.rs │ │ │ ├── matcher.rs │ │ │ └── mod.rs │ │ └── zonky │ │ │ ├── extractor.rs │ │ │ ├── matcher.rs │ │ │ ├── mod.rs │ │ │ └── repository.rs │ ├── error.rs │ ├── extractor │ │ ├── mod.rs │ │ ├── model.rs │ │ ├── registry.rs │ │ ├── tar_gz_extractor.rs │ │ ├── tar_xz_extractor.rs │ │ └── zip_extractor.rs │ ├── hasher │ │ ├── md5.rs │ │ ├── mod.rs │ │ ├── registry.rs │ │ ├── sha1.rs │ │ ├── sha2_256.rs │ │ └── sha2_512.rs │ ├── lib.rs │ ├── matcher │ │ ├── mod.rs │ │ └── registry.rs │ ├── repository │ │ ├── github │ │ │ ├── mod.rs │ │ │ ├── models.rs │ │ │ └── repository.rs │ │ ├── maven │ │ │ ├── mod.rs │ │ │ ├── models.rs │ │ │ └── repository.rs │ │ ├── mod.rs │ │ ├── model.rs │ │ └── registry.rs │ └── version.rs └── tests │ ├── archive.rs │ ├── blocking.rs │ └── zonky.rs ├── postgresql_commands ├── Cargo.toml ├── README.md └── src │ ├── clusterdb.rs │ ├── createdb.rs │ ├── createuser.rs │ ├── dropdb.rs │ ├── dropuser.rs │ ├── ecpg.rs │ ├── error.rs │ ├── initdb.rs │ ├── lib.rs │ ├── oid2name.rs │ ├── pg_amcheck.rs │ ├── pg_archivecleanup.rs │ ├── pg_basebackup.rs │ ├── pg_checksums.rs │ ├── pg_config.rs │ ├── pg_controldata.rs │ ├── pg_ctl.rs │ ├── pg_dump.rs │ ├── pg_dumpall.rs │ ├── pg_isready.rs │ ├── pg_receivewal.rs │ ├── pg_recvlogical.rs │ ├── pg_resetwal.rs │ ├── pg_restore.rs │ ├── pg_rewind.rs │ ├── pg_test_fsync.rs │ ├── pg_test_timing.rs │ ├── pg_upgrade.rs │ ├── pg_verifybackup.rs │ ├── pg_waldump.rs │ ├── pgbench.rs │ ├── postgres.rs │ ├── psql.rs │ ├── reindexdb.rs │ ├── traits.rs │ ├── vacuumdb.rs │ └── vacuumlo.rs ├── postgresql_embedded ├── Cargo.toml ├── README.md ├── benches │ └── embedded.rs ├── build │ ├── build.rs │ └── bundle.rs ├── src │ ├── blocking │ │ ├── mod.rs │ │ └── postgresql.rs │ ├── error.rs │ ├── lib.rs │ ├── postgresql.rs │ └── settings.rs └── tests │ ├── blocking.rs │ ├── dump_command.rs │ ├── environment_variables.rs │ ├── postgresql.rs │ ├── start_config.rs │ └── zonky.rs ├── postgresql_extensions ├── Cargo.toml ├── README.md ├── src │ ├── blocking │ │ ├── extensions.rs │ │ └── mod.rs │ ├── error.rs │ ├── extensions.rs │ ├── lib.rs │ ├── matcher.rs │ ├── model.rs │ └── repository │ │ ├── mod.rs │ │ ├── model.rs │ │ ├── portal_corp │ │ ├── mod.rs │ │ └── repository.rs │ │ ├── registry.rs │ │ ├── steampipe │ │ ├── extensions.rs │ │ ├── mod.rs │ │ └── repository.rs │ │ └── tensor_chord │ │ ├── mod.rs │ │ └── repository.rs └── tests │ ├── blocking.rs │ ├── extensions.rs │ ├── portal_corp.rs │ └── steampipe.rs ├── release-plz.toml └── rust-toolchain.toml /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VARIANT="bullseye" 2 | FROM mcr.microsoft.com/vscode/devcontainers/rust:1-${VARIANT} 3 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Rust", 3 | "build": { 4 | "dockerfile": "Dockerfile", 5 | "args": { 6 | "VARIANT": "bullseye" 7 | } 8 | }, 9 | "runArgs": [ 10 | "--cap-add=SYS_PTRACE", 11 | "--security-opt", 12 | "seccomp=unconfined" 13 | ], 14 | 15 | "customizations": { 16 | "vscode": { 17 | "settings": { 18 | "lldb.executable": "/usr/bin/lldb", 19 | "files.watcherExclude": { 20 | "**/target/**": true 21 | }, 22 | "rust-analyzer.checkOnSave.command": "clippy" 23 | }, 24 | 25 | // Add the IDs of extensions you want installed when the container is created. 26 | "extensions": [ 27 | "mutantdino.resourcemonitor", 28 | "rust-lang.rust-analyzer", 29 | "serayuzgur.crates", 30 | "tamasfe.even-better-toml", 31 | "vadimcn.vscode-lldb" 32 | ] 33 | } 34 | }, 35 | 36 | "remoteUser": "vscode" 37 | } 38 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: brianheineman 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U0001F41E Bug Report" 3 | about: "If something isn't working as expected \U0001F914." 4 | title: '' 5 | labels: 'i: bug, i: needs triage' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **What steps will reproduce the bug? (please provide code snippet if relevant)** 11 | 12 | 1. step 1 13 | 2. step 2 14 | 3. ... 15 | 16 | **What happens?** 17 | 18 | ... 19 | 20 | **What did you expect to happen instead?** 21 | 22 | ... 23 | 24 | ### Information about your environment 25 | 26 | * postgresql_embedded version: [REQUIRED] (e.g. "0.14.2") 27 | * Database version: [REQUIRED] (e.g. "16.4.0") 28 | * Operating system: [REQUIRED] 29 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U00002728 Feature Request" 3 | about: "I have a suggestion (and may want to implement it \U0001F642)!" 4 | title: '' 5 | labels: 'i: enhancement, i: needs triage' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | patch: 4 | default: 5 | threshold: 0.05% 6 | project: 7 | default: 8 | threshold: 0.05% 9 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "cargo" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | -------------------------------------------------------------------------------- /.github/workflows/checks.yml: -------------------------------------------------------------------------------- 1 | name: Fast checks 2 | 3 | env: 4 | CARGO_TERM_COLOR: always 5 | RUSTFLAGS: "-D warnings" 6 | 7 | on: 8 | workflow_call: 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | audit: 15 | runs-on: ubuntu-22.04 16 | steps: 17 | - name: Checkout repository 18 | uses: actions/checkout@v4 19 | - name: Install Rust 20 | uses: dtolnay/rust-toolchain@master 21 | with: 22 | toolchain: stable 23 | - name: Install cargo audit 24 | run: cargo install cargo-audit 25 | - name: Audit dependencies 26 | run: cargo audit 27 | 28 | check: 29 | runs-on: ubuntu-22.04 30 | steps: 31 | - name: Checkout repository 32 | uses: actions/checkout@v4 33 | - name: Install Rust 34 | uses: dtolnay/rust-toolchain@master 35 | with: 36 | toolchain: stable 37 | - name: Check the project 38 | run: | 39 | cargo check --workspace --all-targets --features blocking 40 | cargo check --workspace --all-targets --features bundled 41 | cargo check --workspace --all-targets --features tokio 42 | cargo check --workspace --all-targets --all-features 43 | 44 | clippy: 45 | runs-on: ubuntu-22.04 46 | steps: 47 | - name: Checkout repository 48 | uses: actions/checkout@v4 49 | - name: Install Rust 50 | uses: dtolnay/rust-toolchain@master 51 | with: 52 | toolchain: stable 53 | - name: Check lints 54 | env: 55 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 56 | run: | 57 | cargo clippy --all-targets --all-features --examples --tests 58 | 59 | deny: 60 | runs-on: ubuntu-22.04 61 | steps: 62 | - name: Checkout repository 63 | uses: actions/checkout@v4 64 | - name: Install Rust 65 | uses: dtolnay/rust-toolchain@master 66 | with: 67 | toolchain: stable 68 | - name: Install cargo deny 69 | run: cargo install cargo-deny 70 | - name: Check licenses 71 | run: cargo deny check --allow duplicate 72 | 73 | doc: 74 | runs-on: ubuntu-22.04 75 | steps: 76 | - name: Checkout repository 77 | uses: actions/checkout@v4 78 | - name: Install Rust 79 | uses: dtolnay/rust-toolchain@master 80 | with: 81 | toolchain: stable 82 | - name: Check documentation 83 | env: 84 | RUSTDOCFLAGS: -D warnings 85 | run: cargo doc --workspace --no-deps --document-private-items --all-features 86 | 87 | fmt: 88 | runs-on: ubuntu-22.04 89 | steps: 90 | - name: Checkout repository 91 | uses: actions/checkout@v4 92 | - name: Install Rust 93 | uses: dtolnay/rust-toolchain@master 94 | with: 95 | toolchain: stable 96 | components: rustfmt 97 | - name: Check formatting 98 | run: cargo fmt --all --check 99 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | checks: 16 | name: Checks 17 | uses: ./.github/workflows/checks.yml 18 | 19 | build: 20 | name: ${{ matrix.platform }} 21 | needs: [ checks ] 22 | runs-on: ${{ matrix.os }} 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | platform: 27 | - linux-arm 28 | - linux-x64 29 | - macos-arm 30 | - macos-x64 31 | - windows-x64 32 | 33 | include: 34 | - platform: linux-arm 35 | os: ubuntu-24.04-arm 36 | - platform: linux-x64 37 | os: ubuntu-latest 38 | - platform: macos-arm 39 | os: macos-15 40 | - platform: macos-x64 41 | os: macos-13 42 | - platform: windows-x64 43 | os: windows-2022 44 | 45 | steps: 46 | - name: Checkout source code 47 | uses: actions/checkout@v4 48 | 49 | - name: Install Rust 50 | uses: dtolnay/rust-toolchain@master 51 | with: 52 | toolchain: stable 53 | 54 | - name: Install cargo-llvm-cov 55 | uses: taiki-e/install-action@main 56 | with: 57 | tool: cargo-llvm-cov 58 | 59 | - name: Tests 60 | if: ${{ matrix.platform != 'linux-x64' }} 61 | env: 62 | CARGO_TERM_COLOR: always 63 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 64 | RUST_LOG: "info,postgresql_archive=debug,postgresql_commands=debug,postgresql_embedded=debug" 65 | RUST_LOG_SPAN_EVENTS: full 66 | run: | 67 | cargo test 68 | 69 | - name: Tests 70 | if: ${{ matrix.platform == 'linux-x64' }} 71 | env: 72 | CARGO_TERM_COLOR: always 73 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 74 | RUST_LOG: "info,postgresql_archive=debug,postgresql_commands=debug,postgresql_embedded=debug" 75 | RUST_LOG_SPAN_EVENTS: full 76 | run: | 77 | cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info 78 | 79 | - name: Upload to codecov.io 80 | if: ${{ matrix.platform == 'linux-x64' }} 81 | uses: codecov/codecov-action@v4 82 | with: 83 | files: lcov.info 84 | fail_ci_if_error: true 85 | verbose: true 86 | env: 87 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 88 | 89 | - name: Install benchmarking tools 90 | if: ${{ github.ref == 'refs/heads/main' && matrix.platform == 'linux-x64' }} 91 | uses: bencherdev/bencher@main 92 | 93 | - name: Run benchmarks 94 | if: ${{ github.ref == 'refs/heads/main' && matrix.platform == 'linux-x64' }} 95 | env: 96 | BENCHER_API_TOKEN: ${{ secrets.BENCHER_API_TOKEN }} 97 | BENCHER_PROJECT: theseus-rs-postgresql-embedded 98 | BENCHER_ADAPTER: rust_criterion 99 | run: | 100 | bencher run "cargo bench --features blocking" 101 | -------------------------------------------------------------------------------- /.github/workflows/pr-benchmarks.yml: -------------------------------------------------------------------------------- 1 | name: Benchmarks 2 | 3 | on: 4 | pull_request: 5 | types: [ opened, reopened, synchronize ] 6 | 7 | permissions: 8 | contents: read 9 | 10 | jobs: 11 | benchmark: 12 | name: Run Benchmarks 13 | runs-on: ubuntu-22.04 14 | permissions: 15 | pull-requests: write 16 | steps: 17 | - name: Checkout source code 18 | uses: actions/checkout@v4 19 | 20 | - name: Install Rust 21 | uses: dtolnay/rust-toolchain@master 22 | with: 23 | components: 'llvm-tools-preview' 24 | toolchain: stable 25 | 26 | - name: Install benchmarking tools 27 | uses: bencherdev/bencher@main 28 | 29 | - name: Run benchmarks 30 | env: 31 | BENCHER_API_TOKEN: ${{ secrets.BENCHER_API_TOKEN }} 32 | BENCHER_PROJECT: theseus-rs-postgresql-embedded 33 | BENCHER_ADAPTER: rust_criterion 34 | run: | 35 | bencher run \ 36 | --branch $GITHUB_HEAD_REF \ 37 | --ci-number "${{ github.event.number }}" \ 38 | --github-actions "${{ secrets.GITHUB_TOKEN }}" \ 39 | --err \ 40 | "cargo bench --features blocking" 41 | -------------------------------------------------------------------------------- /.github/workflows/release-plz.yml: -------------------------------------------------------------------------------- 1 | name: Release-plz 2 | 3 | permissions: 4 | pull-requests: write 5 | contents: write 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | 14 | # Release unpublished packages. 15 | release-plz-release: 16 | name: Release-plz release 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v4 21 | with: 22 | fetch-depth: 0 23 | - name: Install Rust toolchain 24 | uses: dtolnay/rust-toolchain@stable 25 | - name: Run release-plz 26 | uses: release-plz/action@v0.5 27 | with: 28 | command: release 29 | env: 30 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 31 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} 32 | 33 | # Create a PR with the new versions and changelog, preparing the next release. 34 | release-plz-pr: 35 | name: Release-plz PR 36 | runs-on: ubuntu-latest 37 | concurrency: 38 | group: release-plz-${{ github.ref }} 39 | cancel-in-progress: false 40 | steps: 41 | - name: Checkout repository 42 | uses: actions/checkout@v4 43 | with: 44 | fetch-depth: 0 45 | - name: Install Rust toolchain 46 | uses: dtolnay/rust-toolchain@stable 47 | - name: Run release-plz 48 | uses: release-plz/action@v0.5 49 | with: 50 | command: release-pr 51 | env: 52 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 53 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} 54 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | 3 | # Rust Rover 4 | /.idea 5 | -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | newline_style = "Unix" 2 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | default-members = [ 3 | "postgresql_archive", 4 | "postgresql_commands", 5 | "postgresql_embedded", 6 | "postgresql_extensions", 7 | ] 8 | members = [ 9 | "examples/*", 10 | "postgresql_archive", 11 | "postgresql_commands", 12 | "postgresql_embedded", 13 | "postgresql_extensions", 14 | ] 15 | resolver = "3" 16 | 17 | [workspace.package] 18 | authors = ["Brian Heineman "] 19 | categories = ["database"] 20 | edition = "2024" 21 | keywords = ["postgresql", "postgres", "embedded", "database", "server"] 22 | license = "(Apache-2.0 OR MIT) AND PostgreSQL" 23 | repository = "https://github.com/theseus-rs/postgresql-embedded" 24 | rust-version = "1.87.0" 25 | version = "0.18.5" 26 | 27 | [workspace.dependencies] 28 | anyhow = "1.0.98" 29 | async-trait = "0.1.88" 30 | axum = "0.8.4" 31 | criterion = "0.6.0" 32 | diesel = "2.2.10" 33 | diesel_migrations = "2.2.0" 34 | flate2 = "1.1.1" 35 | futures-util = "0.3.31" 36 | hex = "0.4.3" 37 | indicatif = "0.17.11" 38 | indoc = "2.0.6" 39 | liblzma = "0.4.1" 40 | md-5 = "0.10.6" 41 | num-format = "0.4.4" 42 | pgvector = "0.4.1" 43 | postgres = "0.19.10" 44 | quick-xml = "0.37.5" 45 | r2d2_postgres = "0.18.2" 46 | rand = "0.9.1" 47 | regex-lite = "0.1.6" 48 | reqwest = { version = "0.12.18", default-features = false } 49 | reqwest-middleware = "0.4.2" 50 | reqwest-retry = "0.7.0" 51 | reqwest-tracing = "0.5.7" 52 | semver = "1.0.26" 53 | serde = "1.0.219" 54 | serde_json = "1.0.140" 55 | sha1 = "0.10.6" 56 | sha2 = "0.10.8" 57 | sqlx = { version = "0.8.6", default-features = false, features = ["postgres"] } 58 | tar = "0.4.44" 59 | target-triple = "0.1.4" 60 | tempfile = "3.20.0" 61 | test-log = "0.2.17" 62 | thiserror = "2.0.12" 63 | tokio = "1.45.1" 64 | tracing = "0.1.41" 65 | tracing-indicatif = "0.3.9" 66 | tracing-subscriber = "0.3.19" 67 | url = "2.5.4" 68 | zip = { version = "4.0.0", default-features = false, features = ["deflate"] } 69 | 70 | [workspace.metadata.release] 71 | shared-version = true 72 | dependent-version = "upgrade" 73 | tag-name = "v{{version}}" 74 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Theseus contributors 4 | 5 | Permission is hereby granted, free of charge, to any 6 | person obtaining a copy of this software and associated 7 | documentation files (the "Software"), to deal in the 8 | Software without restriction, including without 9 | limitation the rights to use, copy, modify, merge, 10 | publish, distribute, sublicense, and/or sell copies of 11 | the Software, and to permit persons to whom the Software 12 | is furnished to do so, subject to the following 13 | conditions: 14 | 15 | The above copyright notice and this permission notice 16 | shall be included in all copies or substantial portions 17 | of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 20 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 21 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 22 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 23 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 24 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 25 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 26 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 27 | DEALINGS IN THE SOFTWARE. 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | # PostgreSQL Embedded 4 | 5 | [![ci](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml) 6 | [![Documentation](https://docs.rs/postgresql_embedded/badge.svg)](https://docs.rs/postgresql_embedded) 7 | [![Code Coverage](https://codecov.io/gh/theseus-rs/postgresql-embedded/branch/main/graph/badge.svg)](https://codecov.io/gh/theseus-rs/postgresql-embedded) 8 | [![Benchmarks](https://img.shields.io/badge/%F0%9F%90%B0_bencher-enabled-6ec241)](https://bencher.dev/perf/theseus-rs-postgresql-embedded) 9 | [![Latest version](https://img.shields.io/crates/v/postgresql_embedded.svg)](https://crates.io/crates/postgresql_embedded) 10 | [![License](https://img.shields.io/crates/l/postgresql_embedded)](https://github.com/theseus-rs/postgresql-embedded#license) 11 | [![Semantic Versioning](https://img.shields.io/badge/%E2%9A%99%EF%B8%8F_SemVer-2.0.0-blue)](https://semver.org/spec/v2.0.0.html) 12 | 13 | Install and run a PostgreSQL database locally on Linux, MacOS or Windows. PostgreSQL can be 14 | bundled with your application, or downloaded on demand. 15 | 16 | This library provides an embedded-like experience for PostgreSQL similar to what you would have with 17 | SQLite. This is accomplished by downloading and installing PostgreSQL during runtime. There is 18 | also a "bundled" feature that when enabled, will download the PostgreSQL installation archive at 19 | compile time, include it in your binary and install from the binary version at runtime. 20 | In either case, PostgreSQL will run in a separate process space. 21 | 22 | ## Features 23 | 24 | - installing and running PostgreSQL 25 | - running PostgreSQL on ephemeral ports 26 | - async and blocking API 27 | - bundling the PostgreSQL archive in an executable 28 | - semantic version resolution 29 | - support for custom PostgreSQL archives / binaries 30 | - ability to configure PostgreSQL startup options 31 | - URL based configuration 32 | - choice of native-tls vs rustls 33 | - support for installing PostgreSQL extensions 34 | 35 | ## Getting Started 36 | 37 | ### Example 38 | 39 | ```rust 40 | use postgresql_embedded::{PostgreSQL, Result}; 41 | 42 | #[tokio::main] 43 | async fn main() -> Result<()> { 44 | let mut postgresql = PostgreSQL::default(); 45 | postgresql.setup().await?; 46 | postgresql.start().await?; 47 | 48 | let database_name = "test"; 49 | postgresql.create_database(database_name).await?; 50 | postgresql.database_exists(database_name).await?; 51 | postgresql.drop_database(database_name).await?; 52 | 53 | postgresql.stop().await 54 | } 55 | ``` 56 | 57 | ## Safety 58 | 59 | These crates use `#![forbid(unsafe_code)]` to ensure everything is implemented in 100% safe Rust. 60 | 61 | ## License 62 | 63 | Licensed under either of 64 | 65 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0) 66 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT) 67 | 68 | at your option. 69 | 70 | PostgreSQL is covered under [The PostgreSQL License](https://opensource.org/licenses/postgresql). 71 | 72 | ## Notes 73 | 74 | Supports using PostgreSQL binaries from: 75 | 76 | * [theseus-rs/postgresql-binaries](https://github.com/theseus-rs/postgresql-binaries) (default) 77 | * [zonkyio/embedded-postgres-binaries](https://github.com/zonkyio/embedded-postgres-binaries) 78 | 79 | ## Contribution 80 | 81 | Unless you explicitly state otherwise, any contribution intentionally submitted 82 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any 83 | additional terms or conditions. 84 | 85 | 86 | VSCode Development Container 90 | 91 |
92 | 93 | GitHub Codespaces 97 | 98 | 99 | ## Prior Art 100 | 101 | Projects that inspired this one: 102 | 103 | * [zonkyio/embedded-postgres-binaries](https://github.com/zonkyio/embedded-postgres-binaries) 104 | * [faokunega/pg-embed](https://github.com/faokunega/pg-embed) 105 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | Only the latest version of this crate is supported. 6 | 7 | ## Reporting a Vulnerability 8 | 9 | To report a security vulnerability, please use the form 10 | at https://github.com/theseus-rs/postgresql-embedded/security/advisories/new 11 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | # Documentation for this configuration file can be found here 2 | # https://embarkstudios.github.io/cargo-deny/checks/cfg.html 3 | 4 | [graph] 5 | targets = [ 6 | { triple = "aarch64-unknown-linux-gnu" }, 7 | { triple = "aarch64-unknown-linux-musl" }, 8 | { triple = "aarch64-apple-darwin" }, 9 | { triple = "x86_64-apple-darwin" }, 10 | { triple = "x86_64-pc-windows-msvc" }, 11 | { triple = "x86_64-unknown-linux-gnu" }, 12 | { triple = "x86_64-unknown-linux-musl" }, 13 | ] 14 | 15 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 16 | [licenses] 17 | allow = [ 18 | "Apache-2.0", 19 | "BSD-2-Clause", 20 | "BSD-3-Clause", 21 | "BSL-1.0", 22 | "ISC", 23 | "MIT", 24 | "PostgreSQL", 25 | "Unicode-3.0", 26 | "Zlib", 27 | ] 28 | 29 | # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html 30 | [advisories] 31 | ignore = [ 32 | ] 33 | 34 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 35 | [bans] 36 | multiple-versions = "deny" 37 | wildcards = "allow" 38 | deny = [] 39 | 40 | [[licenses.clarify]] 41 | name = "ring" 42 | expression = "MIT AND ISC AND OpenSSL" 43 | license-files = [ 44 | { path = "LICENSE", hash = 0xbd0eed23 } 45 | ] 46 | -------------------------------------------------------------------------------- /examples/archive_async/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "archive_async" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | postgresql_archive = { path = "../../postgresql_archive" } 10 | tempfile = { workspace = true } 11 | tokio = { workspace = true, features = ["full"] } 12 | -------------------------------------------------------------------------------- /examples/archive_async/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use postgresql_archive::configuration::theseus; 6 | use postgresql_archive::{Result, VersionReq, extract, get_archive}; 7 | 8 | #[tokio::main] 9 | async fn main() -> Result<()> { 10 | let url = theseus::URL; 11 | let version_req = VersionReq::STAR; 12 | let (archive_version, archive) = get_archive(url, &version_req).await?; 13 | let out_dir = tempfile::tempdir()?.keep(); 14 | extract(url, &archive, &out_dir).await?; 15 | println!( 16 | "PostgreSQL {} extracted to {}", 17 | archive_version, 18 | out_dir.to_string_lossy() 19 | ); 20 | Ok(()) 21 | } 22 | 23 | #[cfg(test)] 24 | mod test { 25 | use super::*; 26 | 27 | #[test] 28 | fn test_main() -> Result<()> { 29 | main() 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /examples/archive_sync/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "archive_sync" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | postgresql_archive = { path = "../../postgresql_archive", features = ["blocking"] } 10 | tempfile = { workspace = true } 11 | -------------------------------------------------------------------------------- /examples/archive_sync/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use postgresql_archive::blocking::{extract, get_archive}; 6 | use postgresql_archive::configuration::theseus; 7 | use postgresql_archive::{Result, VersionReq}; 8 | 9 | fn main() -> Result<()> { 10 | let url = theseus::URL; 11 | let version_req = VersionReq::STAR; 12 | let (archive_version, archive) = get_archive(url, &version_req)?; 13 | let out_dir = tempfile::tempdir()?.keep(); 14 | extract(url, &archive, &out_dir)?; 15 | println!( 16 | "PostgreSQL {} extracted to {}", 17 | archive_version, 18 | out_dir.to_string_lossy() 19 | ); 20 | Ok(()) 21 | } 22 | 23 | #[cfg(test)] 24 | mod test { 25 | use super::*; 26 | 27 | #[test] 28 | fn test_main() -> Result<()> { 29 | main() 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /examples/axum_embedded/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "axum_embedded" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | axum = { workspace = true } 11 | postgresql_embedded = { path = "../../postgresql_embedded" } 12 | postgresql_extensions = { path = "../../postgresql_extensions" } 13 | sqlx = { workspace = true, features = ["runtime-tokio"] } 14 | tracing = { workspace = true } 15 | tracing-subscriber = { workspace = true } 16 | tokio = { workspace = true, features = ["full"] } 17 | -------------------------------------------------------------------------------- /examples/axum_embedded/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use anyhow::Result; 6 | use axum::extract::State; 7 | use axum::{Json, Router, http::StatusCode, routing::get}; 8 | use postgresql_embedded::{PostgreSQL, Settings, VersionReq}; 9 | use sqlx::PgPool; 10 | use sqlx::postgres::PgPoolOptions; 11 | use std::env; 12 | use std::time::Duration; 13 | use tokio::net::TcpListener; 14 | use tracing::info; 15 | 16 | /// Example of how to use postgresql embedded with axum. 17 | #[tokio::main] 18 | async fn main() -> Result<()> { 19 | tracing_subscriber::fmt().compact().init(); 20 | 21 | let db_url = 22 | env::var("DATABASE_URL").unwrap_or_else(|_| "postgresql://postgres@localhost".to_string()); 23 | info!("Installing PostgreSQL"); 24 | let settings = Settings::from_url(&db_url)?; 25 | let mut postgresql = PostgreSQL::new(settings); 26 | postgresql.setup().await?; 27 | 28 | info!("Installing the vector extension from PortalCorp"); 29 | postgresql_extensions::install( 30 | postgresql.settings(), 31 | "portal-corp", 32 | "pgvector_compiled", 33 | &VersionReq::parse("=0.16.12")?, 34 | ) 35 | .await?; 36 | 37 | info!("Starting PostgreSQL"); 38 | postgresql.start().await?; 39 | 40 | let database_name = "axum-test"; 41 | info!("Creating database {database_name}"); 42 | postgresql.create_database(database_name).await?; 43 | 44 | info!("Configuring extension"); 45 | let settings = postgresql.settings().clone(); 46 | let database_url = settings.url(database_name); 47 | let pool = PgPool::connect(database_url.as_str()).await?; 48 | pool.close().await; 49 | 50 | info!("Restarting database"); 51 | postgresql.stop().await?; 52 | postgresql.start().await?; 53 | 54 | info!("Setup connection pool"); 55 | let pool = PgPoolOptions::new() 56 | .max_connections(5) 57 | .acquire_timeout(Duration::from_secs(3)) 58 | .connect(&database_url) 59 | .await?; 60 | 61 | info!("Enabling extension"); 62 | enable_extension(&pool).await?; 63 | 64 | info!("Start application"); 65 | let app = Router::new().route("/", get(extensions)).with_state(pool); 66 | 67 | let listener = TcpListener::bind("0.0.0.0:3000").await.unwrap(); 68 | info!("Listening on {}", listener.local_addr()?); 69 | axum::serve(listener, app).await?; 70 | 71 | Ok(()) 72 | } 73 | 74 | async fn enable_extension(pool: &PgPool) -> Result<()> { 75 | sqlx::query("CREATE EXTENSION IF NOT EXISTS vector") 76 | .execute(pool) 77 | .await?; 78 | Ok(()) 79 | } 80 | 81 | async fn extensions(State(pool): State) -> Result>, (StatusCode, String)> { 82 | sqlx::query_scalar("SELECT name FROM pg_available_extensions ORDER BY name") 83 | .fetch_all(&pool) 84 | .await 85 | .map(Json) 86 | .map_err(internal_error) 87 | } 88 | 89 | fn internal_error(err: E) -> (StatusCode, String) { 90 | (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()) 91 | } 92 | -------------------------------------------------------------------------------- /examples/diesel_embedded/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "diesel_embedded" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | diesel = { workspace = true, features = ["postgres", "r2d2"] } 10 | diesel_migrations = { workspace = true, features = ["postgres"] } 11 | postgresql_embedded = { path = "../../postgresql_embedded" } 12 | r2d2_postgres = { workspace = true } 13 | tokio = { workspace = true, features = ["full"] } 14 | -------------------------------------------------------------------------------- /examples/diesel_embedded/README.md: -------------------------------------------------------------------------------- 1 | This example is taken from [Getting Started with Diesel](https://diesel.rs/guides/getting-started) 2 | and modified to work with an embedded database. 3 | -------------------------------------------------------------------------------- /examples/diesel_embedded/diesel.toml: -------------------------------------------------------------------------------- 1 | # For documentation on how to configure this file, 2 | # see https://diesel.rs/guides/configuring-diesel-cli 3 | 4 | [print_schema] 5 | file = "src/schema.rs" 6 | custom_type_derives = ["diesel::query_builder::QueryId", "Clone"] 7 | 8 | [migrations_directory] 9 | dir = "./migrations" 10 | -------------------------------------------------------------------------------- /examples/diesel_embedded/migrations/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/theseus-rs/postgresql-embedded/2d27e1ffdd0947ace3643962dce8a3867a919337/examples/diesel_embedded/migrations/.keep -------------------------------------------------------------------------------- /examples/diesel_embedded/migrations/2024-08-17-200823_create_posts/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE posts 2 | -------------------------------------------------------------------------------- /examples/diesel_embedded/migrations/2024-08-17-200823_create_posts/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE posts 2 | ( 3 | id SERIAL PRIMARY KEY, 4 | title VARCHAR NOT NULL, 5 | body TEXT NOT NULL, 6 | published BOOLEAN NOT NULL DEFAULT FALSE 7 | ) 8 | -------------------------------------------------------------------------------- /examples/diesel_embedded/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use crate::models::{NewPost, Post}; 6 | use diesel::r2d2::{ConnectionManager, Pool}; 7 | use diesel::{PgConnection, RunQueryDsl, SelectableHelper}; 8 | use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations}; 9 | use postgresql_embedded::{PostgreSQL, Result, Settings, VersionReq}; 10 | 11 | mod models; 12 | pub mod schema; 13 | 14 | const MIGRATIONS: EmbeddedMigrations = embed_migrations!("./migrations/"); 15 | #[tokio::main] 16 | async fn main() -> Result<()> { 17 | let settings = Settings { 18 | version: VersionReq::parse("=16.4.0")?, 19 | username: "postgres".to_string(), 20 | password: "postgres".to_string(), 21 | ..Default::default() 22 | }; 23 | let mut postgresql = PostgreSQL::new(settings); 24 | postgresql.setup().await?; 25 | postgresql.start().await?; 26 | 27 | let database_name = "diesel_demo"; 28 | postgresql.create_database(database_name).await?; 29 | postgresql.database_exists(database_name).await?; 30 | 31 | { 32 | let database_url = postgresql.settings().url(database_name); 33 | let manager = ConnectionManager::::new(database_url); 34 | let pool = Pool::builder() 35 | .test_on_check_out(true) 36 | .build(manager) 37 | .expect("Could not build connection pool"); 38 | let mut mig_run = pool.clone().get().unwrap(); 39 | mig_run.run_pending_migrations(MIGRATIONS).unwrap(); 40 | 41 | let post = create_post( 42 | &mut pool.get().unwrap(), 43 | "My First Post", 44 | "This is my firs post", 45 | ); 46 | println!("Post '{}' created", post.title); 47 | } 48 | 49 | postgresql.drop_database(database_name).await?; 50 | 51 | postgresql.stop().await 52 | } 53 | 54 | /// Create a new post 55 | /// 56 | /// # Panics 57 | /// if the post cannot be saved 58 | pub fn create_post(conn: &mut PgConnection, title: &str, body: &str) -> Post { 59 | use crate::schema::posts; 60 | 61 | let new_post = NewPost { title, body }; 62 | 63 | diesel::insert_into(posts::table) 64 | .values(&new_post) 65 | .returning(Post::as_returning()) 66 | .get_result(conn) 67 | .expect("Error saving new post") 68 | } 69 | 70 | #[cfg(test)] 71 | mod test { 72 | use super::*; 73 | 74 | #[test] 75 | fn test_main() -> Result<()> { 76 | main() 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /examples/diesel_embedded/src/models.rs: -------------------------------------------------------------------------------- 1 | use diesel::prelude::*; 2 | 3 | #[derive(Queryable, Selectable)] 4 | #[diesel(table_name = crate::schema::posts)] 5 | #[diesel(check_for_backend(diesel::pg::Pg))] 6 | pub struct Post { 7 | pub id: i32, 8 | pub title: String, 9 | pub body: String, 10 | pub published: bool, 11 | } 12 | 13 | #[derive(Insertable)] 14 | #[diesel(table_name = crate::schema::posts)] 15 | pub struct NewPost<'a> { 16 | pub title: &'a str, 17 | pub body: &'a str, 18 | } 19 | -------------------------------------------------------------------------------- /examples/diesel_embedded/src/schema.rs: -------------------------------------------------------------------------------- 1 | diesel::table! { 2 | posts (id) { 3 | id -> Int4, 4 | title -> Varchar, 5 | body -> Text, 6 | published -> Bool, 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /examples/download_progress_bar/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "download_progress_bar" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | indicatif = { workspace = true } 11 | postgresql_embedded = { path = "../../postgresql_embedded", features = ["indicatif"] } 12 | postgresql_extensions = { path = "../../postgresql_extensions" } 13 | tracing = { workspace = true } 14 | tracing-indicatif = { workspace = true } 15 | tracing-subscriber = { workspace = true } 16 | tokio = { workspace = true, features = ["full"] } 17 | -------------------------------------------------------------------------------- /examples/download_progress_bar/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use anyhow::Result; 6 | use indicatif::ProgressStyle; 7 | use postgresql_embedded::{PostgreSQL, Settings, VersionReq}; 8 | use tracing_indicatif::IndicatifLayer; 9 | use tracing_subscriber::filter::LevelFilter; 10 | use tracing_subscriber::prelude::*; 11 | use tracing_subscriber::{Registry, fmt}; 12 | 13 | /// Example of how to display a progress bar for the postgresql embedded archive download 14 | #[tokio::main] 15 | async fn main() -> Result<()> { 16 | let progress_style = ProgressStyle::with_template("{span_child_prefix}{spinner} {span_name} [{elapsed_precise}] [{wide_bar:.green.bold}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})")? 17 | .progress_chars("=> "); 18 | let indicatif_layer = IndicatifLayer::new().with_progress_style(progress_style); 19 | let subscriber = Registry::default() 20 | .with(fmt::Layer::default().with_filter(LevelFilter::INFO)) 21 | .with(indicatif_layer); 22 | subscriber.init(); 23 | 24 | let settings = Settings { 25 | version: VersionReq::parse("=16.4.0")?, 26 | ..Default::default() 27 | }; 28 | let mut postgresql = PostgreSQL::new(settings); 29 | postgresql.setup().await?; 30 | postgresql.start().await?; 31 | 32 | let database_name = "test"; 33 | postgresql.create_database(database_name).await?; 34 | postgresql.database_exists(database_name).await?; 35 | postgresql.drop_database(database_name).await?; 36 | 37 | postgresql.stop().await?; 38 | Ok(()) 39 | } 40 | 41 | #[cfg(test)] 42 | mod test { 43 | use super::*; 44 | 45 | #[test] 46 | fn test_main() -> Result<()> { 47 | main() 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /examples/embedded_async/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "embedded_async" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | postgresql_embedded = { path = "../../postgresql_embedded" } 10 | tokio = { workspace = true, features = ["full"] } 11 | -------------------------------------------------------------------------------- /examples/embedded_async/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use postgresql_embedded::{PostgreSQL, Result, Settings, VersionReq}; 6 | 7 | #[tokio::main] 8 | async fn main() -> Result<()> { 9 | let settings = Settings { 10 | version: VersionReq::parse("=16.4.0")?, 11 | ..Default::default() 12 | }; 13 | let mut postgresql = PostgreSQL::new(settings); 14 | postgresql.setup().await?; 15 | postgresql.start().await?; 16 | 17 | let database_name = "test"; 18 | postgresql.create_database(database_name).await?; 19 | postgresql.database_exists(database_name).await?; 20 | postgresql.drop_database(database_name).await?; 21 | 22 | postgresql.stop().await 23 | } 24 | 25 | #[cfg(test)] 26 | mod test { 27 | use super::*; 28 | 29 | #[test] 30 | fn test_main() -> Result<()> { 31 | main() 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /examples/embedded_sync/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "embedded_sync" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | postgresql_embedded = { path = "../../postgresql_embedded", features = ["blocking"] } 10 | -------------------------------------------------------------------------------- /examples/embedded_sync/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use postgresql_embedded::Result; 6 | use postgresql_embedded::blocking::PostgreSQL; 7 | 8 | fn main() -> Result<()> { 9 | let mut postgresql = PostgreSQL::default(); 10 | postgresql.setup()?; 11 | postgresql.start()?; 12 | 13 | let database_name = "test"; 14 | postgresql.create_database(database_name)?; 15 | postgresql.database_exists(database_name)?; 16 | postgresql.drop_database(database_name)?; 17 | 18 | postgresql.stop() 19 | } 20 | 21 | #[cfg(test)] 22 | mod test { 23 | use super::*; 24 | 25 | #[test] 26 | fn test_main() -> Result<()> { 27 | main() 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /examples/portal_corp_extension/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "portal_corp_extension" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | indoc = { workspace = true } 11 | pgvector = { workspace = true, features = ["sqlx"] } 12 | postgresql_embedded = { path = "../../postgresql_embedded" } 13 | postgresql_extensions = { path = "../../postgresql_extensions" } 14 | sqlx = { workspace = true, features = ["runtime-tokio"] } 15 | tracing = { workspace = true } 16 | tracing-subscriber = { workspace = true } 17 | tokio = { workspace = true, features = ["full"] } 18 | -------------------------------------------------------------------------------- /examples/portal_corp_extension/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use anyhow::Result; 6 | use indoc::indoc; 7 | use pgvector::Vector; 8 | use sqlx::{PgPool, Row}; 9 | use tracing::info; 10 | 11 | use postgresql_embedded::{PostgreSQL, Settings, VersionReq}; 12 | 13 | /// Example of how to install and configure the PortalCorp pgvector extension. 14 | /// 15 | /// See: 16 | #[tokio::main] 17 | async fn main() -> Result<()> { 18 | tracing_subscriber::fmt().compact().init(); 19 | 20 | info!("Installing PostgreSQL"); 21 | let settings = Settings { 22 | version: VersionReq::parse("=16.4.0")?, 23 | ..Default::default() 24 | }; 25 | let mut postgresql = PostgreSQL::new(settings); 26 | postgresql.setup().await?; 27 | 28 | info!("Installing the vector extension from PortalCorp"); 29 | postgresql_extensions::install( 30 | postgresql.settings(), 31 | "portal-corp", 32 | "pgvector_compiled", 33 | &VersionReq::parse("=0.16.12")?, 34 | ) 35 | .await?; 36 | 37 | info!("Starting PostgreSQL"); 38 | postgresql.start().await?; 39 | 40 | let database_name = "vector-example"; 41 | info!("Creating database {database_name}"); 42 | postgresql.create_database(database_name).await?; 43 | 44 | info!("Configuring extension"); 45 | let settings = postgresql.settings(); 46 | let database_url = settings.url(database_name); 47 | let pool = PgPool::connect(database_url.as_str()).await?; 48 | pool.close().await; 49 | 50 | info!("Restarting database"); 51 | postgresql.stop().await?; 52 | postgresql.start().await?; 53 | 54 | info!("Enabling extension"); 55 | let pool = PgPool::connect(database_url.as_str()).await?; 56 | enable_extension(&pool).await?; 57 | 58 | info!("Creating table"); 59 | create_table(&pool).await?; 60 | 61 | info!("Creating data"); 62 | create_data(&pool).await?; 63 | 64 | info!("Get the nearest neighbors by L2 distance"); 65 | execute_query( 66 | &pool, 67 | "SELECT * FROM items ORDER BY embedding <-> '[3,1,2]' LIMIT 5", 68 | ) 69 | .await?; 70 | 71 | info!("Stopping database"); 72 | postgresql.stop().await?; 73 | Ok(()) 74 | } 75 | 76 | async fn enable_extension(pool: &PgPool) -> Result<()> { 77 | sqlx::query("DROP EXTENSION IF EXISTS vector") 78 | .execute(pool) 79 | .await?; 80 | sqlx::query("CREATE EXTENSION IF NOT EXISTS vector") 81 | .execute(pool) 82 | .await?; 83 | Ok(()) 84 | } 85 | 86 | async fn create_table(pool: &PgPool) -> Result<()> { 87 | sqlx::query(indoc! {" 88 | CREATE TABLE IF NOT EXISTS items ( 89 | id bigserial PRIMARY KEY, 90 | embedding vector(3) NOT NULL 91 | ) 92 | "}) 93 | .execute(pool) 94 | .await?; 95 | Ok(()) 96 | } 97 | 98 | async fn create_data(pool: &PgPool) -> Result<()> { 99 | sqlx::query(indoc! {" 100 | INSERT INTO items (embedding) 101 | VALUES 102 | ('[1,2,3]'), 103 | ('[4,5,6]') 104 | "}) 105 | .execute(pool) 106 | .await?; 107 | Ok(()) 108 | } 109 | 110 | async fn execute_query(pool: &PgPool, query: &str) -> Result<()> { 111 | info!("Query: {query}"); 112 | let rows = sqlx::query(query).fetch_all(pool).await?; 113 | for row in rows { 114 | let id: i64 = row.try_get("id")?; 115 | let embedding: Vector = row.try_get("embedding")?; 116 | info!("ID: {id}, Embedding: {embedding:?}"); 117 | } 118 | Ok(()) 119 | } 120 | 121 | #[cfg(test)] 122 | mod test { 123 | use super::*; 124 | 125 | #[test] 126 | fn test_main() -> Result<()> { 127 | main() 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /examples/postgres_embedded/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "postgres_embedded" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | postgres = { workspace = true } 11 | postgresql_embedded = { path = "../../postgresql_embedded", features = ["blocking"] } 12 | -------------------------------------------------------------------------------- /examples/postgres_embedded/README.md: -------------------------------------------------------------------------------- 1 | This example is based on [sqlx/example/todos](https://github.com/launchbadge/sqlx/tree/main/examples/postgres/todos) 2 | and modified to work with the postgres driver. 3 | -------------------------------------------------------------------------------- /examples/postgres_embedded/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use anyhow::Result; 6 | use postgres::{Client, NoTls}; 7 | use postgresql_embedded::blocking::PostgreSQL; 8 | 9 | fn main() -> Result<()> { 10 | let mut postgresql = PostgreSQL::default(); 11 | postgresql.setup()?; 12 | postgresql.start()?; 13 | 14 | let database_name = "test"; 15 | postgresql.create_database(database_name)?; 16 | let settings = postgresql.settings(); 17 | let mut client = Client::connect( 18 | format!( 19 | "host={host} port={port} user={username} password={password}", 20 | host = settings.host, 21 | port = settings.port, 22 | username = settings.username, 23 | password = settings.password 24 | ) 25 | .as_str(), 26 | NoTls, 27 | )?; 28 | 29 | println!("Creating table 'todos'"); 30 | create_table_todo(&mut client)?; 31 | 32 | let description = "Implement embedded database with postgres"; 33 | println!("Adding new todo with description '{description}'"); 34 | let todo_id = add_todo(&mut client, description)?; 35 | println!("Added new todo with id {todo_id}"); 36 | 37 | println!("Marking todo {todo_id} as done"); 38 | if complete_todo(&mut client, todo_id)? { 39 | println!("Todo {todo_id} is marked as done"); 40 | } 41 | 42 | println!("Printing list of all todos"); 43 | list_todos(&mut client)?; 44 | 45 | Ok(()) 46 | } 47 | 48 | fn create_table_todo(client: &mut Client) -> Result<()> { 49 | let _ = client.execute( 50 | "CREATE TABLE IF NOT EXISTS todos (id BIGSERIAL PRIMARY KEY, description TEXT NOT NULL, done BOOLEAN NOT NULL DEFAULT FALSE);", 51 | &[], 52 | )?; 53 | 54 | Ok(()) 55 | } 56 | 57 | fn add_todo(client: &mut Client, description: &str) -> Result { 58 | let row = client.query_one( 59 | "INSERT INTO todos (description) VALUES ($1) RETURNING id", 60 | &[&description], 61 | )?; 62 | 63 | let id: i64 = row.get(0); 64 | Ok(id) 65 | } 66 | 67 | fn complete_todo(client: &mut Client, id: i64) -> Result { 68 | let rows_affected = client.execute("UPDATE todos SET done = TRUE WHERE id = $1", &[&id])?; 69 | 70 | Ok(rows_affected > 0) 71 | } 72 | 73 | fn list_todos(client: &mut Client) -> Result<()> { 74 | let rows = client.query("SELECT id, description, done FROM todos ORDER BY id", &[])?; 75 | 76 | for rec in rows { 77 | let id: i64 = rec.get("id"); 78 | let description: String = rec.get("description"); 79 | let done: bool = rec.get("done"); 80 | println!( 81 | "- [{}] {}: {}", 82 | if done { "x" } else { " " }, 83 | id, 84 | &description, 85 | ); 86 | } 87 | 88 | Ok(()) 89 | } 90 | 91 | #[cfg(test)] 92 | mod test { 93 | use super::*; 94 | 95 | #[test] 96 | fn test_main() -> Result<()> { 97 | main() 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /examples/sqlx_embedded/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "sqlx_embedded" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | postgresql_embedded = { path = "../../postgresql_embedded" } 11 | sqlx = { workspace = true, features = ["runtime-tokio"] } 12 | tokio = { workspace = true, features = ["full"] } 13 | -------------------------------------------------------------------------------- /examples/sqlx_embedded/README.md: -------------------------------------------------------------------------------- 1 | This example is taken from [sqlx/example/todos](https://github.com/launchbadge/sqlx/tree/main/examples/postgres/todos) 2 | and modified to work with an embedded database. 3 | -------------------------------------------------------------------------------- /examples/sqlx_embedded/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use anyhow::Result; 6 | use postgresql_embedded::PostgreSQL; 7 | use sqlx::Row; 8 | use sqlx::postgres::PgPool; 9 | 10 | #[tokio::main] 11 | async fn main() -> Result<()> { 12 | let mut postgresql = PostgreSQL::default(); 13 | postgresql.setup().await?; 14 | postgresql.start().await?; 15 | 16 | let database_name = "test"; 17 | postgresql.create_database(database_name).await?; 18 | let settings = postgresql.settings(); 19 | let database_url = settings.url(database_name); 20 | 21 | let pool = PgPool::connect(database_url.as_str()).await?; 22 | 23 | println!("Creating table 'todos'"); 24 | create_table_todo(&pool).await?; 25 | 26 | let description = "Implement embedded database with sqlx"; 27 | println!("Adding new todo with description '{description}'"); 28 | let todo_id = add_todo(&pool, description).await?; 29 | println!("Added new todo with id {todo_id}"); 30 | 31 | println!("Marking todo {todo_id} as done"); 32 | if complete_todo(&pool, todo_id).await? { 33 | println!("Todo {todo_id} is marked as done"); 34 | } 35 | 36 | println!("Printing list of all todos"); 37 | list_todos(&pool).await?; 38 | 39 | Ok(()) 40 | } 41 | 42 | async fn create_table_todo(pool: &PgPool) -> Result<()> { 43 | sqlx::query( 44 | "CREATE TABLE IF NOT EXISTS todos(id BIGSERIAL PRIMARY KEY, description TEXT NOT NULL, done BOOLEAN NOT NULL DEFAULT FALSE);" 45 | ).execute(pool).await?; 46 | 47 | Ok(()) 48 | } 49 | 50 | async fn add_todo(pool: &PgPool, description: &str) -> Result { 51 | let rec = sqlx::query("INSERT INTO todos (description) VALUES ($1) RETURNING id") 52 | .bind(description) 53 | .fetch_one(pool) 54 | .await?; 55 | 56 | let id: i64 = rec.get("id"); 57 | Ok(id) 58 | } 59 | 60 | async fn complete_todo(pool: &PgPool, id: i64) -> Result { 61 | let rows_affected = sqlx::query("UPDATE todos SET done = TRUE WHERE id = $1") 62 | .bind(id) 63 | .execute(pool) 64 | .await? 65 | .rows_affected(); 66 | 67 | Ok(rows_affected > 0) 68 | } 69 | 70 | async fn list_todos(pool: &PgPool) -> Result<()> { 71 | let recs = sqlx::query("SELECT id, description, done FROM todos ORDER BY id") 72 | .fetch_all(pool) 73 | .await?; 74 | 75 | for rec in recs { 76 | let id: i64 = rec.get("id"); 77 | let description: String = rec.get("description"); 78 | let done: bool = rec.get("done"); 79 | println!( 80 | "- [{}] {}: {}", 81 | if done { "x" } else { " " }, 82 | id, 83 | &description, 84 | ); 85 | } 86 | 87 | Ok(()) 88 | } 89 | 90 | #[cfg(test)] 91 | mod test { 92 | use super::*; 93 | 94 | #[test] 95 | fn test_main() -> Result<()> { 96 | main() 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /examples/tensor_chord_extension/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "tensor_chord_extension" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | indoc = { workspace = true } 11 | postgresql_embedded = { path = "../../postgresql_embedded" } 12 | postgresql_extensions = { path = "../../postgresql_extensions" } 13 | sqlx = { workspace = true, features = ["runtime-tokio"] } 14 | tracing = { workspace = true } 15 | tracing-subscriber = { workspace = true } 16 | tokio = { workspace = true, features = ["full"] } 17 | -------------------------------------------------------------------------------- /examples/tensor_chord_extension/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use anyhow::Result; 6 | use indoc::indoc; 7 | use sqlx::{PgPool, Row}; 8 | use tracing::info; 9 | 10 | use postgresql_embedded::{PostgreSQL, Settings, VersionReq}; 11 | 12 | /// Example of how to install and configure the TensorChord vector extension. 13 | /// 14 | /// See: 15 | #[tokio::main] 16 | async fn main() -> Result<()> { 17 | tracing_subscriber::fmt().compact().init(); 18 | 19 | info!("Installing PostgreSQL"); 20 | let settings = Settings { 21 | version: VersionReq::parse("=16.4.0")?, 22 | ..Default::default() 23 | }; 24 | let mut postgresql = PostgreSQL::new(settings); 25 | postgresql.setup().await?; 26 | 27 | info!("Installing the vector extension from TensorChord"); 28 | postgresql_extensions::install( 29 | postgresql.settings(), 30 | "tensor-chord", 31 | "pgvecto.rs", 32 | &VersionReq::parse("=0.3.0")?, 33 | ) 34 | .await?; 35 | 36 | info!("Starting PostgreSQL"); 37 | postgresql.start().await?; 38 | 39 | let database_name = "vector-example"; 40 | info!("Creating database {database_name}"); 41 | postgresql.create_database(database_name).await?; 42 | 43 | info!("Configuring extension"); 44 | let settings = postgresql.settings(); 45 | let database_url = settings.url(database_name); 46 | let pool = PgPool::connect(database_url.as_str()).await?; 47 | configure_extension(&pool).await?; 48 | pool.close().await; 49 | 50 | info!("Restarting database"); 51 | postgresql.stop().await?; 52 | postgresql.start().await?; 53 | 54 | info!("Enabling extension"); 55 | let pool = PgPool::connect(database_url.as_str()).await?; 56 | enable_extension(&pool).await?; 57 | 58 | info!("Creating table"); 59 | create_table(&pool).await?; 60 | 61 | info!("Creating data"); 62 | create_data(&pool).await?; 63 | 64 | info!("Squared Euclidean Distance"); 65 | execute_query( 66 | &pool, 67 | "SELECT '[1, 2, 3]'::vector <-> '[3, 2, 1]'::vector AS value", 68 | ) 69 | .await?; 70 | 71 | info!("Negative Dot Product"); 72 | execute_query( 73 | &pool, 74 | "SELECT '[1, 2, 3]'::vector <#> '[3, 2, 1]'::vector AS value", 75 | ) 76 | .await?; 77 | 78 | info!("Cosine Distance"); 79 | execute_query( 80 | &pool, 81 | "SELECT '[1, 2, 3]'::vector <=> '[3, 2, 1]'::vector AS value", 82 | ) 83 | .await?; 84 | 85 | info!("Stopping database"); 86 | postgresql.stop().await?; 87 | Ok(()) 88 | } 89 | 90 | async fn configure_extension(pool: &PgPool) -> Result<()> { 91 | sqlx::query("ALTER SYSTEM SET shared_preload_libraries = \"vectors.so\"") 92 | .execute(pool) 93 | .await?; 94 | sqlx::query("ALTER SYSTEM SET search_path = \"$user\", public, vectors") 95 | .execute(pool) 96 | .await?; 97 | Ok(()) 98 | } 99 | 100 | async fn enable_extension(pool: &PgPool) -> Result<()> { 101 | sqlx::query("DROP EXTENSION IF EXISTS vectors") 102 | .execute(pool) 103 | .await?; 104 | sqlx::query("CREATE EXTENSION IF NOT EXISTS vectors") 105 | .execute(pool) 106 | .await?; 107 | Ok(()) 108 | } 109 | 110 | async fn create_table(pool: &PgPool) -> Result<()> { 111 | sqlx::query(indoc! {" 112 | CREATE TABLE IF NOT EXISTS items ( 113 | id bigserial PRIMARY KEY, 114 | embedding vector(3) NOT NULL 115 | ) 116 | "}) 117 | .execute(pool) 118 | .await?; 119 | Ok(()) 120 | } 121 | 122 | async fn create_data(pool: &PgPool) -> Result<()> { 123 | sqlx::query(indoc! {" 124 | INSERT INTO items (embedding) 125 | VALUES 126 | ('[1,2,3]'), 127 | ('[4,5,6]') 128 | "}) 129 | .execute(pool) 130 | .await?; 131 | sqlx::query(indoc! {" 132 | INSERT INTO items (embedding) 133 | VALUES 134 | (ARRAY[1, 2, 3]::real[]), 135 | (ARRAY[4, 5, 6]::real[] 136 | ) 137 | "}) 138 | .execute(pool) 139 | .await?; 140 | Ok(()) 141 | } 142 | 143 | async fn execute_query(pool: &PgPool, query: &str) -> Result<()> { 144 | let row = sqlx::query(query).fetch_one(pool).await?; 145 | let value: f32 = row.try_get("value")?; 146 | info!("{}: {}", query, value); 147 | Ok(()) 148 | } 149 | 150 | #[cfg(target_os = "linux")] 151 | #[cfg(test)] 152 | mod test { 153 | use super::*; 154 | 155 | #[test] 156 | fn test_main() -> Result<()> { 157 | main() 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /examples/zonky/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition.workspace = true 3 | name = "zonky" 4 | publish = false 5 | license.workspace = true 6 | version.workspace = true 7 | 8 | [dependencies] 9 | postgresql_archive = { path = "../../postgresql_archive" } 10 | postgresql_embedded = { path = "../../postgresql_embedded", default-features = false, features = ["zonky"] } 11 | tokio = { workspace = true, features = ["full"] } 12 | -------------------------------------------------------------------------------- /examples/zonky/src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | 5 | use postgresql_archive::VersionReq; 6 | use postgresql_archive::configuration::zonky; 7 | use postgresql_embedded::{PostgreSQL, Result, Settings}; 8 | 9 | #[tokio::main] 10 | async fn main() -> Result<()> { 11 | let settings = Settings { 12 | releases_url: zonky::URL.to_string(), 13 | version: VersionReq::parse("=16.3.0")?, 14 | ..Default::default() 15 | }; 16 | let mut postgresql = PostgreSQL::new(settings); 17 | postgresql.setup().await?; 18 | postgresql.start().await?; 19 | 20 | let database_name = "test"; 21 | postgresql.create_database(database_name).await?; 22 | postgresql.database_exists(database_name).await?; 23 | postgresql.drop_database(database_name).await?; 24 | 25 | postgresql.stop().await 26 | } 27 | 28 | #[cfg(test)] 29 | mod test { 30 | use super::*; 31 | 32 | #[test] 33 | fn test_main() -> Result<()> { 34 | main() 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /images/full_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/theseus-rs/postgresql-embedded/2d27e1ffdd0947ace3643962dce8a3867a919337/images/full_logo.png -------------------------------------------------------------------------------- /images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/theseus-rs/postgresql-embedded/2d27e1ffdd0947ace3643962dce8a3867a919337/images/logo.png -------------------------------------------------------------------------------- /images/original_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/theseus-rs/postgresql-embedded/2d27e1ffdd0947ace3643962dce8a3867a919337/images/original_logo.png -------------------------------------------------------------------------------- /postgresql_archive/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors.workspace = true 3 | categories.workspace = true 4 | description = "A library for downloading and extracting PostgreSQL archives" 5 | edition.workspace = true 6 | keywords.workspace = true 7 | license.workspace = true 8 | name = "postgresql_archive" 9 | repository = "https://github.com/theseus-rs/postgresql-embedded" 10 | rust-version.workspace = true 11 | version.workspace = true 12 | 13 | [dependencies] 14 | async-trait = { workspace = true } 15 | flate2 = { workspace = true } 16 | futures-util = { workspace = true } 17 | hex = { workspace = true } 18 | liblzma = { workspace = true } 19 | md-5 = { workspace = true, optional = true } 20 | num-format = { workspace = true } 21 | quick-xml = { workspace = true, features = ["serialize"], optional = true } 22 | regex-lite = { workspace = true } 23 | reqwest = { workspace = true, default-features = false, features = ["json", "stream"] } 24 | reqwest-middleware = { workspace = true } 25 | reqwest-retry = { workspace = true } 26 | reqwest-tracing = { workspace = true } 27 | semver = { workspace = true } 28 | serde = { workspace = true, features = ["derive"] } 29 | serde_json = { workspace = true, optional = true } 30 | sha1 = { workspace = true, optional = true } 31 | sha2 = { workspace = true, optional = true } 32 | tar = { workspace = true } 33 | target-triple = { workspace = true, optional = true } 34 | tempfile = { workspace = true } 35 | thiserror = { workspace = true } 36 | tokio = { workspace = true, features = ["full"], optional = true } 37 | tracing = { workspace = true, features = ["log"] } 38 | tracing-indicatif = { workspace = true, optional = true } 39 | url = { workspace = true } 40 | zip = { workspace = true } 41 | 42 | [dev-dependencies] 43 | anyhow = { workspace = true } 44 | criterion = { workspace = true } 45 | hex = { workspace = true } 46 | test-log = { workspace = true } 47 | tokio = { workspace = true } 48 | 49 | [features] 50 | default = [ 51 | "native-tls", 52 | "theseus", 53 | ] 54 | blocking = ["dep:tokio"] 55 | github = [ 56 | "dep:serde_json", 57 | "dep:target-triple", 58 | ] 59 | indicatif = [ 60 | "dep:tracing-indicatif" 61 | ] 62 | maven = [ 63 | "dep:quick-xml", 64 | "md5", 65 | "sha1", 66 | "sha2", 67 | ] 68 | md5 = ["dep:md-5"] 69 | native-tls = ["reqwest/native-tls"] 70 | rustls = ["reqwest/rustls-tls-native-roots"] 71 | sha1 = ["dep:sha1"] 72 | sha2 = ["dep:sha2"] 73 | theseus = [ 74 | "dep:target-triple", 75 | "github", 76 | "sha2", 77 | ] 78 | zonky = [ 79 | "maven", 80 | ] 81 | 82 | [package.metadata.docs.rs] 83 | features = ["blocking"] 84 | targets = ["x86_64-unknown-linux-gnu"] 85 | 86 | [[bench]] 87 | harness = false 88 | name = "archive" 89 | -------------------------------------------------------------------------------- /postgresql_archive/README.md: -------------------------------------------------------------------------------- 1 | # PostgreSQL Archive 2 | 3 | [![ci](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml) 4 | [![Documentation](https://docs.rs/postgresql_archive/badge.svg)](https://docs.rs/postgresql_archive) 5 | [![Code Coverage](https://codecov.io/gh/theseus-rs/postgresql-embedded/branch/main/graph/badge.svg)](https://codecov.io/gh/theseus-rs/postgresql-embedded) 6 | [![Benchmarks](https://img.shields.io/badge/%F0%9F%90%B0_bencher-enabled-6ec241)](https://bencher.dev/perf/theseus-rs-postgresql-embedded) 7 | [![Latest version](https://img.shields.io/crates/v/postgresql_archive.svg)](https://crates.io/crates/postgresql_archive) 8 | [![License](https://img.shields.io/crates/l/postgresql_archive?)](https://github.com/theseus-rs/postgresql-embedded/tree/main/postgresql_archive#license) 9 | [![Semantic Versioning](https://img.shields.io/badge/%E2%9A%99%EF%B8%8F_SemVer-2.0.0-blue)](https://semver.org/spec/v2.0.0.html) 10 | 11 | A configurable library for downloading and extracting PostgreSQL archives. 12 | 13 | ## Examples 14 | 15 | ### Asynchronous API 16 | 17 | ```rust 18 | use postgresql_archive::{extract, get_archive, Result, VersionReq}; 19 | use postgresql_archive::configuration::theseus; 20 | 21 | #[tokio::main] 22 | async fn main() -> Result<()> { 23 | let url = theseus::URL; 24 | let (archive_version, archive) = get_archive(url, &VersionReq::STAR).await?; 25 | let out_dir = std::env::temp_dir(); 26 | extract(url, &archive, &out_dir).await 27 | } 28 | ``` 29 | 30 | ### Synchronous API 31 | 32 | ```rust 33 | use postgresql_archive::configuration::theseus; 34 | use postgresql_archive::{Result, VersionReq}; 35 | use postgresql_archive::blocking::{extract, get_archive}; 36 | 37 | fn main() -> Result<()> { 38 | let url = theseus::URL; 39 | let (archive_version, archive) = get_archive(url, &VersionReq::STAR)?; 40 | let out_dir = std::env::temp_dir(); 41 | extract(url, &archive, &out_dir) 42 | } 43 | ``` 44 | 45 | ## Feature flags 46 | 47 | postgresql_archive uses [feature flags] to address compile time and binary size 48 | uses. 49 | 50 | The following features are available: 51 | 52 | | Name | Description | Default? | 53 | |--------------|----------------------------------|----------| 54 | | `blocking` | Enables the blocking API | No | 55 | | `indicatif` | Enables tracing-indcatif support | No | 56 | | `native-tls` | Enables native-tls support | Yes | 57 | | `rustls` | Enables rustls support | No | 58 | 59 | ### Configurations 60 | 61 | | Name | Description | Default? | 62 | |-----------|-------------------------------------|----------| 63 | | `theseus` | Enables theseus PostgreSQL binaries | Yes | 64 | | `zonky` | Enables zonky PostgreSQL binaries | No | 65 | 66 | ### Hashers 67 | 68 | | Name | Description | Default? | 69 | |--------|----------------------|----------| 70 | | `md5` | Enables md5 hashers | No | 71 | | `sha1` | Enables sha1 hashers | No | 72 | | `sha2` | Enables sha2 hashers | Yes¹ | 73 | 74 | ¹ enabled by the `theseus` feature flag. 75 | 76 | ### Repositories 77 | 78 | | Name | Description | Default? | 79 | |----------|---------------------------|----------| 80 | | `github` | Enables github repository | Yes¹ | 81 | | `maven` | Enables maven repository | No | 82 | 83 | ¹ enabled by the `theseus` feature flag. 84 | 85 | ## Supported platforms 86 | 87 | `postgresql_archive` provides implementations for the following: 88 | 89 | * [theseus-rs/postgresql-binaries](https://github.com/theseus-rs/postgresql-binaries) 90 | * [zonkyio/embedded-postgres-binaries](https://github.com/zonkyio/embedded-postgres-binaries) 91 | 92 | ## Safety 93 | 94 | This crate uses `#![forbid(unsafe_code)]` to ensure everything is implemented in 100% safe Rust. 95 | 96 | ## License 97 | 98 | Licensed under either of 99 | 100 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0) 101 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT) 102 | 103 | at your option. 104 | 105 | ## Contribution 106 | 107 | Unless you explicitly state otherwise, any contribution intentionally submitted 108 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any 109 | additional terms or conditions. 110 | -------------------------------------------------------------------------------- /postgresql_archive/benches/archive.rs: -------------------------------------------------------------------------------- 1 | use criterion::{Criterion, criterion_group, criterion_main}; 2 | use postgresql_archive::blocking::{extract, get_archive}; 3 | use postgresql_archive::configuration::theseus; 4 | use postgresql_archive::{Result, VersionReq}; 5 | use std::fs::{create_dir_all, remove_dir_all}; 6 | use std::time::Duration; 7 | 8 | fn benchmarks(criterion: &mut Criterion) { 9 | bench_extract(criterion).ok(); 10 | } 11 | 12 | fn bench_extract(criterion: &mut Criterion) -> Result<()> { 13 | let version_req = VersionReq::STAR; 14 | let (_archive_version, archive) = get_archive(theseus::URL, &version_req)?; 15 | 16 | criterion.bench_function("extract", |bencher| { 17 | bencher.iter(|| { 18 | extract_archive(&archive).ok(); 19 | }); 20 | }); 21 | 22 | Ok(()) 23 | } 24 | 25 | fn extract_archive(archive: &Vec) -> Result<()> { 26 | let out_dir = tempfile::tempdir()?.path().to_path_buf(); 27 | create_dir_all(&out_dir)?; 28 | extract(theseus::URL, archive, &out_dir)?; 29 | remove_dir_all(&out_dir)?; 30 | Ok(()) 31 | } 32 | 33 | criterion_group!( 34 | name = benches; 35 | config = Criterion::default() 36 | .measurement_time(Duration::from_secs(30)) 37 | .sample_size(10); 38 | targets = benchmarks 39 | ); 40 | criterion_main!(benches); 41 | -------------------------------------------------------------------------------- /postgresql_archive/src/archive.rs: -------------------------------------------------------------------------------- 1 | //! Manage PostgreSQL archives 2 | #![allow(dead_code)] 3 | 4 | use crate::error::Result; 5 | use crate::{extractor, repository}; 6 | use regex_lite::Regex; 7 | use semver::{Version, VersionReq}; 8 | use std::path::{Path, PathBuf}; 9 | use tracing::instrument; 10 | 11 | /// Gets the version for the specified [version requirement](VersionReq). If a version for the 12 | /// [version requirement](VersionReq) is not found, then an error is returned. 13 | /// 14 | /// # Errors 15 | /// * If the version is not found. 16 | #[instrument(level = "debug")] 17 | pub async fn get_version(url: &str, version_req: &VersionReq) -> Result { 18 | let repository = repository::registry::get(url)?; 19 | let version = repository.get_version(version_req).await?; 20 | Ok(version) 21 | } 22 | 23 | /// Gets the archive for a given [version requirement](VersionReq) that passes the default 24 | /// matcher. If no archive is found for the [version requirement](VersionReq) and matcher then 25 | /// an [error](crate::error::Error) is returned. 26 | /// 27 | /// # Errors 28 | /// * If the archive is not found. 29 | /// * If the archive cannot be downloaded. 30 | #[instrument] 31 | pub async fn get_archive(url: &str, version_req: &VersionReq) -> Result<(Version, Vec)> { 32 | let repository = repository::registry::get(url)?; 33 | let archive = repository.get_archive(version_req).await?; 34 | let version = archive.version().clone(); 35 | let bytes = archive.bytes().to_vec(); 36 | Ok((version, bytes)) 37 | } 38 | 39 | /// Extracts the compressed tar `bytes` to the [out_dir](Path). 40 | /// 41 | /// # Errors 42 | /// Returns an error if the extraction fails. 43 | #[instrument(skip(bytes))] 44 | pub async fn extract(url: &str, bytes: &Vec, out_dir: &Path) -> Result> { 45 | let extractor_fn = extractor::registry::get(url)?; 46 | let mut extract_directories = extractor::ExtractDirectories::default(); 47 | extract_directories.add_mapping(Regex::new(".*")?, out_dir.to_path_buf()); 48 | extractor_fn(bytes, extract_directories) 49 | } 50 | 51 | #[cfg(test)] 52 | mod tests { 53 | use super::*; 54 | use crate::configuration::theseus::URL; 55 | 56 | #[tokio::test] 57 | async fn test_get_version() -> Result<()> { 58 | let version_req = VersionReq::parse("=16.4.0")?; 59 | let version = get_version(URL, &version_req).await?; 60 | assert_eq!(Version::new(16, 4, 0), version); 61 | Ok(()) 62 | } 63 | 64 | #[tokio::test] 65 | async fn test_get_archive() -> Result<()> { 66 | let version_req = VersionReq::parse("=16.4.0")?; 67 | let (version, bytes) = get_archive(URL, &version_req).await?; 68 | assert_eq!(Version::new(16, 4, 0), version); 69 | assert!(!bytes.is_empty()); 70 | Ok(()) 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /postgresql_archive/src/blocking/archive.rs: -------------------------------------------------------------------------------- 1 | use crate::{Version, VersionReq}; 2 | use std::path::{Path, PathBuf}; 3 | use std::sync::LazyLock; 4 | use tokio::runtime::Runtime; 5 | 6 | static RUNTIME: LazyLock = LazyLock::new(|| Runtime::new().unwrap()); 7 | 8 | /// Gets the version for the specified [version requirement](VersionReq). If a version for the 9 | /// [version requirement](VersionReq) is not found, then an error is returned. 10 | /// 11 | /// # Errors 12 | /// * If the version is not found. 13 | pub fn get_version(url: &str, version_req: &VersionReq) -> crate::Result { 14 | RUNTIME 15 | .handle() 16 | .block_on(async move { crate::get_version(url, version_req).await }) 17 | } 18 | 19 | /// Gets the archive for a given [version requirement](VersionReq) that passes the default 20 | /// matcher. 21 | /// 22 | /// If no archive is found for the [version requirement](VersionReq) and matcher then 23 | /// an [error](crate::error::Error) is returned. 24 | /// 25 | /// # Errors 26 | /// * If the archive is not found. 27 | /// * If the archive cannot be downloaded. 28 | pub fn get_archive(url: &str, version_req: &VersionReq) -> crate::Result<(Version, Vec)> { 29 | RUNTIME 30 | .handle() 31 | .block_on(async move { crate::get_archive(url, version_req).await }) 32 | } 33 | 34 | /// Extracts the compressed tar `bytes` to the [out_dir](Path). 35 | /// 36 | /// # Errors 37 | /// Returns an error if the extraction fails. 38 | pub fn extract(url: &str, bytes: &Vec, out_dir: &Path) -> crate::Result> { 39 | RUNTIME 40 | .handle() 41 | .block_on(async move { crate::extract(url, bytes, out_dir).await }) 42 | } 43 | -------------------------------------------------------------------------------- /postgresql_archive/src/blocking/mod.rs: -------------------------------------------------------------------------------- 1 | mod archive; 2 | 3 | pub use archive::{extract, get_archive, get_version}; 4 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/custom/matcher.rs: -------------------------------------------------------------------------------- 1 | use semver::Version; 2 | 3 | /// Matcher for PostgreSQL binaries from custom GitHub release repositories following the same 4 | /// pattern as 5 | /// 6 | /// # Errors 7 | /// * If the asset matcher fails. 8 | pub fn matcher(_url: &str, name: &str, version: &Version) -> crate::Result { 9 | let target = target_triple::TARGET; 10 | // TODO: consider relaxing the version format to allow for more flexibility in where the version 11 | // and target appear in the filename. 12 | let expected_name = format!("postgresql-{version}-{target}.tar.gz"); 13 | Ok(name == expected_name) 14 | } 15 | 16 | #[cfg(test)] 17 | mod tests { 18 | use super::*; 19 | use crate::{Result, matcher}; 20 | 21 | const TEST_URL: &str = "https://github.com/owner/repo"; 22 | 23 | #[test] 24 | fn test_register_custom_repo() -> Result<()> { 25 | #[expect(clippy::unnecessary_wraps)] 26 | fn supports_fn(url: &str) -> Result { 27 | Ok(url == TEST_URL) 28 | } 29 | matcher::registry::register(supports_fn, matcher)?; 30 | 31 | let matcher = matcher::registry::get(TEST_URL)?; 32 | let version = Version::new(16, 3, 0); 33 | let expected_name = format!("postgresql-{}-{}.tar.gz", version, target_triple::TARGET); 34 | assert!(matcher("", &expected_name, &version)?); 35 | Ok(()) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/custom/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod matcher; 2 | 3 | pub use matcher::matcher; 4 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod custom; 2 | #[cfg(feature = "theseus")] 3 | pub mod theseus; 4 | #[cfg(feature = "zonky")] 5 | pub mod zonky; 6 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/theseus/extractor.rs: -------------------------------------------------------------------------------- 1 | use crate::Error::Unexpected; 2 | use crate::Result; 3 | use crate::extractor::{ExtractDirectories, tar_gz_extract}; 4 | use regex_lite::Regex; 5 | use std::fs::{create_dir_all, remove_dir_all, remove_file, rename}; 6 | use std::path::{Path, PathBuf}; 7 | use std::thread::sleep; 8 | use std::time::Duration; 9 | use tracing::{debug, instrument, warn}; 10 | 11 | /// Extracts the compressed tar `bytes` to the [out_dir](Path). 12 | /// 13 | /// # Errors 14 | /// Returns an error if the extraction fails. 15 | #[instrument(skip(bytes))] 16 | pub fn extract(bytes: &Vec, extract_directories: ExtractDirectories) -> Result> { 17 | let out_dir = extract_directories.get_path(".")?; 18 | 19 | let parent_dir = if let Some(parent) = out_dir.parent() { 20 | parent 21 | } else { 22 | debug!("No parent directory for {}", out_dir.to_string_lossy()); 23 | out_dir.as_path() 24 | }; 25 | 26 | create_dir_all(parent_dir)?; 27 | 28 | let lock_file = acquire_lock(parent_dir)?; 29 | // If the directory already exists, then the archive has already been 30 | // extracted by another process. 31 | if out_dir.exists() { 32 | debug!( 33 | "Directory already exists {}; skipping extraction: ", 34 | out_dir.to_string_lossy() 35 | ); 36 | remove_file(&lock_file)?; 37 | return Ok(Vec::new()); 38 | } 39 | 40 | let extract_dir = tempfile::tempdir_in(parent_dir)?.keep(); 41 | debug!("Extracting archive to {}", extract_dir.to_string_lossy()); 42 | let mut archive_extract_directories = ExtractDirectories::default(); 43 | archive_extract_directories.add_mapping(Regex::new(".*")?, extract_dir.clone()); 44 | let files = tar_gz_extract(bytes, archive_extract_directories)?; 45 | 46 | if out_dir.exists() { 47 | debug!( 48 | "Directory already exists {}; skipping rename and removing extraction directory: {}", 49 | out_dir.to_string_lossy(), 50 | extract_dir.to_string_lossy() 51 | ); 52 | remove_dir_all(&extract_dir)?; 53 | } else { 54 | debug!( 55 | "Renaming {} to {}", 56 | extract_dir.to_string_lossy(), 57 | out_dir.to_string_lossy() 58 | ); 59 | rename(extract_dir, out_dir)?; 60 | } 61 | 62 | if lock_file.is_file() { 63 | debug!("Removing lock file: {}", lock_file.to_string_lossy()); 64 | remove_file(lock_file)?; 65 | } 66 | 67 | Ok(files) 68 | } 69 | 70 | /// Acquires a lock file in the [out_dir](Path) to prevent multiple processes from extracting the 71 | /// archive at the same time. 72 | /// 73 | /// # Errors 74 | /// * If the lock file cannot be acquired. 75 | #[instrument(level = "debug")] 76 | fn acquire_lock(out_dir: &Path) -> Result { 77 | let lock_file = out_dir.join("postgresql-archive.lock"); 78 | 79 | if lock_file.is_file() { 80 | let metadata = lock_file.metadata()?; 81 | let created = metadata.created()?; 82 | 83 | if created.elapsed()?.as_secs() > 300 { 84 | warn!( 85 | "Stale lock file detected; removing file to attempt process recovery: {}", 86 | lock_file.to_string_lossy() 87 | ); 88 | remove_file(&lock_file)?; 89 | } 90 | } 91 | 92 | debug!( 93 | "Attempting to acquire lock: {}", 94 | lock_file.to_string_lossy() 95 | ); 96 | 97 | for _ in 0..30 { 98 | let lock = std::fs::OpenOptions::new() 99 | .create(true) 100 | .truncate(true) 101 | .write(true) 102 | .open(&lock_file); 103 | 104 | match lock { 105 | Ok(_) => { 106 | debug!("Lock acquired: {}", lock_file.to_string_lossy()); 107 | return Ok(lock_file); 108 | } 109 | Err(error) => { 110 | warn!("unable to acquire lock: {error}"); 111 | sleep(Duration::from_secs(1)); 112 | } 113 | } 114 | } 115 | 116 | Err(Unexpected("Failed to acquire lock".to_string())) 117 | } 118 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/theseus/matcher.rs: -------------------------------------------------------------------------------- 1 | use semver::Version; 2 | 3 | /// Matcher for PostgreSQL binaries from 4 | /// 5 | /// # Errors 6 | /// * If the asset matcher fails. 7 | pub fn matcher(_url: &str, name: &str, version: &Version) -> crate::Result { 8 | let target = target_triple::TARGET; 9 | let expected_name = format!("postgresql-{version}-{target}.tar.gz"); 10 | Ok(name == expected_name) 11 | } 12 | 13 | #[cfg(test)] 14 | mod tests { 15 | use super::*; 16 | use crate::Result; 17 | 18 | #[test] 19 | fn test_asset_match_success() -> Result<()> { 20 | let url = ""; 21 | let version = Version::parse("16.4.0")?; 22 | let target = target_triple::TARGET; 23 | let name = format!("postgresql-{version}-{target}.tar.gz"); 24 | 25 | assert!(matcher(url, name.as_str(), &version)?, "{}", name); 26 | Ok(()) 27 | } 28 | 29 | #[test] 30 | fn test_asset_match_errors() -> Result<()> { 31 | let url = ""; 32 | let version = Version::parse("16.4.0")?; 33 | let target = target_triple::TARGET; 34 | let names = vec![ 35 | format!("foo-{version}-{target}.tar.gz"), 36 | format!("postgresql-{target}.tar.gz"), 37 | format!("postgresql-{version}.tar.gz"), 38 | format!("postgresql-{version}-{target}.tar"), 39 | format!("postgresql-{version}-{target}"), 40 | ]; 41 | 42 | for name in names { 43 | assert!(!matcher(url, name.as_str(), &version)?, "{}", name); 44 | } 45 | Ok(()) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/theseus/mod.rs: -------------------------------------------------------------------------------- 1 | mod extractor; 2 | mod matcher; 3 | 4 | pub const URL: &str = "https://github.com/theseus-rs/postgresql-binaries"; 5 | 6 | pub use extractor::extract; 7 | pub use matcher::matcher; 8 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/zonky/extractor.rs: -------------------------------------------------------------------------------- 1 | use crate::Error::Unexpected; 2 | use crate::Result; 3 | use crate::extractor::{ExtractDirectories, tar_xz_extract}; 4 | use regex_lite::Regex; 5 | use std::fs::{create_dir_all, remove_dir_all, remove_file, rename}; 6 | use std::io::Cursor; 7 | use std::path::{Path, PathBuf}; 8 | use std::thread::sleep; 9 | use std::time::Duration; 10 | use tracing::{debug, instrument, warn}; 11 | use zip::ZipArchive; 12 | 13 | /// Extracts the compressed tar `bytes` to the [out_dir](Path). 14 | /// 15 | /// # Errors 16 | /// Returns an error if the extraction fails. 17 | #[expect(clippy::case_sensitive_file_extension_comparisons)] 18 | #[instrument(skip(bytes))] 19 | pub fn extract(bytes: &Vec, extract_directories: ExtractDirectories) -> Result> { 20 | let out_dir = extract_directories.get_path(".")?; 21 | let parent_dir = if let Some(parent) = out_dir.parent() { 22 | parent 23 | } else { 24 | debug!("No parent directory for {}", out_dir.to_string_lossy()); 25 | out_dir.as_path() 26 | }; 27 | 28 | create_dir_all(parent_dir)?; 29 | 30 | let lock_file = acquire_lock(parent_dir)?; 31 | // If the directory already exists, then the archive has already been 32 | // extracted by another process. 33 | if out_dir.exists() { 34 | debug!( 35 | "Directory already exists {}; skipping extraction: ", 36 | out_dir.to_string_lossy() 37 | ); 38 | remove_file(&lock_file)?; 39 | return Ok(Vec::new()); 40 | } 41 | 42 | let extract_dir = tempfile::tempdir_in(parent_dir)?.keep(); 43 | debug!("Extracting archive to {}", extract_dir.to_string_lossy()); 44 | 45 | let reader = Cursor::new(bytes); 46 | let mut archive = ZipArchive::new(reader).map_err(|error| Unexpected(error.to_string()))?; 47 | let mut archive_bytes = Vec::new(); 48 | for i in 0..archive.len() { 49 | let mut file = archive 50 | .by_index(i) 51 | .map_err(|error| Unexpected(error.to_string()))?; 52 | let file_name = file.name().to_string(); 53 | if file_name.ends_with(".txz") { 54 | debug!("Found archive file: {file_name}"); 55 | std::io::copy(&mut file, &mut archive_bytes)?; 56 | break; 57 | } 58 | } 59 | 60 | if archive_bytes.is_empty() { 61 | return Err(Unexpected("Failed to find archive file".to_string())); 62 | } 63 | 64 | let mut archive_extract_directories = ExtractDirectories::default(); 65 | archive_extract_directories.add_mapping(Regex::new(".*")?, extract_dir.clone()); 66 | let files = tar_xz_extract(&archive_bytes, archive_extract_directories)?; 67 | 68 | if out_dir.exists() { 69 | debug!( 70 | "Directory already exists {}; skipping rename and removing extraction directory: {}", 71 | out_dir.to_string_lossy(), 72 | extract_dir.to_string_lossy() 73 | ); 74 | remove_dir_all(&extract_dir)?; 75 | } else { 76 | debug!( 77 | "Renaming {} to {}", 78 | extract_dir.to_string_lossy(), 79 | out_dir.to_string_lossy() 80 | ); 81 | rename(extract_dir, out_dir)?; 82 | } 83 | 84 | if lock_file.is_file() { 85 | debug!("Removing lock file: {}", lock_file.to_string_lossy()); 86 | remove_file(lock_file)?; 87 | } 88 | 89 | Ok(files) 90 | } 91 | 92 | /// Acquires a lock file in the [out_dir](Path) to prevent multiple processes from extracting the 93 | /// archive at the same time. 94 | /// 95 | /// # Errors 96 | /// * If the lock file cannot be acquired. 97 | #[instrument(level = "debug")] 98 | fn acquire_lock(out_dir: &Path) -> crate::Result { 99 | let lock_file = out_dir.join("postgresql-archive.lock"); 100 | 101 | if lock_file.is_file() { 102 | let metadata = lock_file.metadata()?; 103 | let created = metadata.created()?; 104 | 105 | if created.elapsed()?.as_secs() > 300 { 106 | warn!( 107 | "Stale lock file detected; removing file to attempt process recovery: {}", 108 | lock_file.to_string_lossy() 109 | ); 110 | remove_file(&lock_file)?; 111 | } 112 | } 113 | 114 | debug!( 115 | "Attempting to acquire lock: {}", 116 | lock_file.to_string_lossy() 117 | ); 118 | 119 | for _ in 0..30 { 120 | let lock = std::fs::OpenOptions::new() 121 | .create(true) 122 | .truncate(true) 123 | .write(true) 124 | .open(&lock_file); 125 | 126 | match lock { 127 | Ok(_) => { 128 | debug!("Lock acquired: {}", lock_file.to_string_lossy()); 129 | return Ok(lock_file); 130 | } 131 | Err(error) => { 132 | warn!("unable to acquire lock: {error}"); 133 | sleep(Duration::from_secs(1)); 134 | } 135 | } 136 | } 137 | 138 | Err(Unexpected("Failed to acquire lock".to_string())) 139 | } 140 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/zonky/matcher.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use semver::Version; 3 | use std::env; 4 | 5 | /// Matcher for PostgreSQL binaries from 6 | /// 7 | /// # Errors 8 | /// * If the asset matcher fails. 9 | pub fn matcher(_url: &str, name: &str, version: &Version) -> Result { 10 | let os = get_os(); 11 | let arch = get_arch(); 12 | let expected_name = format!("embedded-postgres-binaries-{os}-{arch}-{version}.jar"); 13 | Ok(name == expected_name) 14 | } 15 | 16 | /// Returns the operating system of the current system. 17 | pub(crate) fn get_os() -> &'static str { 18 | match env::consts::OS { 19 | "macos" => "darwin", 20 | os => os, 21 | } 22 | } 23 | 24 | /// Returns the architecture of the current system. 25 | pub(crate) fn get_arch() -> &'static str { 26 | match env::consts::ARCH { 27 | "arm" => "arm32v7", 28 | "x86_64" => "amd64", 29 | "aarch64" => "arm64v8", 30 | "powerpc64" => "ppc64le", 31 | "x86" => "i386", 32 | arch => arch, 33 | } 34 | } 35 | 36 | #[cfg(test)] 37 | mod tests { 38 | use super::*; 39 | use crate::Result; 40 | 41 | #[test] 42 | fn test_asset_match_success() -> Result<()> { 43 | let url = ""; 44 | let os = get_os(); 45 | let arch = get_arch(); 46 | let version = Version::parse("16.4.0")?; 47 | let name = format!("embedded-postgres-binaries-{os}-{arch}-{version}.jar"); 48 | 49 | assert!(matcher(url, name.as_str(), &version)?, "{}", name); 50 | Ok(()) 51 | } 52 | 53 | #[test] 54 | fn test_asset_match_errors() -> Result<()> { 55 | let url = ""; 56 | let os = get_os(); 57 | let arch = get_arch(); 58 | let version = Version::parse("16.4.0")?; 59 | let names = vec![ 60 | format!("foo-{os}-{arch}-{version}.jar"), 61 | format!("embedded-postgres-binaries-{arch}-{version}.jar"), 62 | format!("embedded-postgres-binaries-{os}-{version}.jar"), 63 | format!("embedded-postgres-binaries-{os}-{arch}.jar"), 64 | format!("embedded-postgres-binaries-{os}-{arch}-{version}.zip"), 65 | ]; 66 | 67 | for name in names { 68 | assert!(!matcher(url, name.as_str(), &version)?, "{}", name); 69 | } 70 | Ok(()) 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/zonky/mod.rs: -------------------------------------------------------------------------------- 1 | mod extractor; 2 | mod matcher; 3 | mod repository; 4 | 5 | pub const URL: &str = "https://github.com/zonkyio/embedded-postgres-binaries"; 6 | 7 | pub use extractor::extract; 8 | pub use matcher::matcher; 9 | pub use repository::Zonky; 10 | -------------------------------------------------------------------------------- /postgresql_archive/src/configuration/zonky/repository.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use crate::configuration::zonky::matcher::{get_arch, get_os}; 3 | use crate::repository::Archive; 4 | use crate::repository::maven::repository::Maven; 5 | use crate::repository::model::Repository; 6 | use async_trait::async_trait; 7 | use semver::{Version, VersionReq}; 8 | use tracing::instrument; 9 | 10 | /// Zonky repository. 11 | /// 12 | /// This repository is used to interact with Zonky Maven repositories 13 | /// (e.g. ). 14 | #[derive(Debug)] 15 | pub struct Zonky { 16 | maven: Box, 17 | } 18 | 19 | const MAVEN_URL: &str = "https://repo1.maven.org/maven2/io/zonky/test/postgres"; 20 | 21 | impl Zonky { 22 | /// Creates a new Zonky repository from the specified URL in the format 23 | /// 24 | /// 25 | /// # Errors 26 | /// * If the URL is invalid. 27 | #[expect(clippy::new_ret_no_self)] 28 | pub fn new(_url: &str) -> Result> { 29 | let os = get_os(); 30 | let arch = get_arch(); 31 | let archive = format!("embedded-postgres-binaries-{os}-{arch}"); 32 | let url = format!("{MAVEN_URL}/{archive}"); 33 | let maven = Maven::new(url.as_str())?; 34 | Ok(Box::new(Zonky { maven })) 35 | } 36 | } 37 | 38 | #[async_trait] 39 | impl Repository for Zonky { 40 | #[instrument(level = "debug")] 41 | fn name(&self) -> &str { 42 | "Zonky" 43 | } 44 | 45 | #[instrument(level = "debug")] 46 | async fn get_version(&self, version_req: &VersionReq) -> Result { 47 | self.maven.get_version(version_req).await 48 | } 49 | 50 | #[instrument] 51 | async fn get_archive(&self, version_req: &VersionReq) -> Result { 52 | self.maven.get_archive(version_req).await 53 | } 54 | } 55 | 56 | #[cfg(test)] 57 | mod tests { 58 | use super::*; 59 | use crate::configuration::zonky; 60 | 61 | #[test] 62 | fn test_name() { 63 | let zonky = Zonky::new(zonky::URL).unwrap(); 64 | assert_eq!("Zonky", zonky.name()); 65 | } 66 | 67 | // 68 | // get_version tests 69 | // 70 | 71 | #[tokio::test] 72 | async fn test_get_version() -> Result<()> { 73 | let maven = Zonky::new(zonky::URL)?; 74 | let version_req = VersionReq::STAR; 75 | let version = maven.get_version(&version_req).await?; 76 | assert!(version > Version::new(0, 0, 0)); 77 | Ok(()) 78 | } 79 | 80 | #[tokio::test] 81 | async fn test_get_specific_version() -> Result<()> { 82 | let zonky = Zonky::new(zonky::URL)?; 83 | let version_req = VersionReq::parse("=16.2.0")?; 84 | let version = zonky.get_version(&version_req).await?; 85 | assert_eq!(Version::new(16, 2, 0), version); 86 | Ok(()) 87 | } 88 | 89 | #[tokio::test] 90 | async fn test_get_specific_not_found() -> Result<()> { 91 | let zonky = Zonky::new(zonky::URL)?; 92 | let version_req = VersionReq::parse("=0.0.0")?; 93 | let error = zonky.get_version(&version_req).await.unwrap_err(); 94 | assert_eq!("version not found for '=0.0.0'", error.to_string()); 95 | Ok(()) 96 | } 97 | 98 | // 99 | // get_archive tests 100 | // 101 | 102 | #[tokio::test] 103 | async fn test_get_archive() -> Result<()> { 104 | let zonky = Zonky::new(zonky::URL)?; 105 | let os = get_os(); 106 | let arch = get_arch(); 107 | let version = Version::new(16, 2, 0); 108 | let version_req = VersionReq::parse(format!("={version}").as_str())?; 109 | let archive = zonky.get_archive(&version_req).await?; 110 | assert_eq!( 111 | format!("embedded-postgres-binaries-{os}-{arch}-{version}.jar"), 112 | archive.name() 113 | ); 114 | assert_eq!(&version, archive.version()); 115 | assert!(!archive.bytes().is_empty()); 116 | Ok(()) 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /postgresql_archive/src/extractor/mod.rs: -------------------------------------------------------------------------------- 1 | mod model; 2 | pub mod registry; 3 | mod tar_gz_extractor; 4 | mod tar_xz_extractor; 5 | mod zip_extractor; 6 | 7 | pub use model::ExtractDirectories; 8 | pub use tar_gz_extractor::extract as tar_gz_extract; 9 | pub use tar_xz_extractor::extract as tar_xz_extract; 10 | pub use zip_extractor::extract as zip_extract; 11 | -------------------------------------------------------------------------------- /postgresql_archive/src/extractor/model.rs: -------------------------------------------------------------------------------- 1 | use crate::{Error, Result}; 2 | use regex_lite::Regex; 3 | use std::fmt::Display; 4 | use std::path::PathBuf; 5 | 6 | /// Extract directories manage the directories to extract a file in an archive to based upon the 7 | /// associated regex matching the file path. 8 | #[derive(Debug)] 9 | pub struct ExtractDirectories { 10 | mappings: Vec<(Regex, PathBuf)>, 11 | } 12 | 13 | impl ExtractDirectories { 14 | /// Creates a new ExtractDirectories instance. 15 | #[must_use] 16 | pub fn new(mappings: Vec<(Regex, PathBuf)>) -> Self { 17 | Self { mappings } 18 | } 19 | 20 | /// Adds a new mapping to the ExtractDirectories instance. 21 | pub fn add_mapping(&mut self, regex: Regex, path: PathBuf) { 22 | self.mappings.push((regex, path)); 23 | } 24 | 25 | /// Returns the path associated with the first regex that matches the file path. 26 | /// If no regex matches, then the file path is returned. 27 | /// 28 | /// # Errors 29 | /// Returns an error if the file path cannot be converted to a string. 30 | pub fn get_path(&self, file_path: &str) -> Result { 31 | for (regex, path) in &self.mappings { 32 | if regex.is_match(file_path) { 33 | return Ok(path.clone()); 34 | } 35 | } 36 | Err(Error::Unexpected(format!( 37 | "No regex matched the file path: {file_path}" 38 | ))) 39 | } 40 | } 41 | 42 | /// Default implementation for ExtractDirectories. 43 | impl Default for ExtractDirectories { 44 | /// Creates a new ExtractDirectories instance with an empty mappings vector. 45 | fn default() -> Self { 46 | ExtractDirectories::new(Vec::new()) 47 | } 48 | } 49 | 50 | /// Display implementation for ExtractDirectories. 51 | impl Display for ExtractDirectories { 52 | /// Formats the ExtractDirectories instance. 53 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 54 | for (regex, path) in &self.mappings { 55 | writeln!(f, "{} -> {}", regex, path.display())?; 56 | } 57 | Ok(()) 58 | } 59 | } 60 | 61 | #[cfg(test)] 62 | mod tests { 63 | use super::*; 64 | 65 | #[test] 66 | fn test_new() -> Result<()> { 67 | let mappings = vec![(Regex::new(".*")?, PathBuf::from("test"))]; 68 | let extract_directories = ExtractDirectories::new(mappings); 69 | let path = extract_directories.get_path("foo")?; 70 | assert_eq!("test", path.to_string_lossy()); 71 | Ok(()) 72 | } 73 | 74 | #[test] 75 | fn test_default() { 76 | let extract_directories = ExtractDirectories::default(); 77 | let result = extract_directories.get_path("foo"); 78 | assert!(result.is_err()); 79 | } 80 | 81 | #[test] 82 | fn test_add_mapping() -> Result<()> { 83 | let mut extract_directories = ExtractDirectories::default(); 84 | extract_directories.add_mapping(Regex::new(".*")?, PathBuf::from("test")); 85 | let path = extract_directories.get_path("foo")?; 86 | assert_eq!("test", path.to_string_lossy()); 87 | Ok(()) 88 | } 89 | 90 | #[test] 91 | fn test_get_path() -> Result<()> { 92 | let mappings = vec![ 93 | (Regex::new("test")?, PathBuf::from("test")), 94 | (Regex::new("foo")?, PathBuf::from("bar")), 95 | ]; 96 | let extract_directories = ExtractDirectories::new(mappings); 97 | let path = extract_directories.get_path("foo")?; 98 | assert_eq!("bar", path.to_string_lossy()); 99 | Ok(()) 100 | } 101 | 102 | #[test] 103 | fn test_display() -> Result<()> { 104 | let mappings = vec![ 105 | (Regex::new("test")?, PathBuf::from("test")), 106 | (Regex::new("foo")?, PathBuf::from("bar")), 107 | ]; 108 | let extract_directories = ExtractDirectories::new(mappings); 109 | let display = extract_directories.to_string(); 110 | assert_eq!("test -> test\nfoo -> bar\n", display); 111 | Ok(()) 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /postgresql_archive/src/extractor/registry.rs: -------------------------------------------------------------------------------- 1 | use crate::Error::{PoisonedLock, UnsupportedExtractor}; 2 | use crate::Result; 3 | #[cfg(feature = "theseus")] 4 | use crate::configuration::theseus; 5 | #[cfg(feature = "zonky")] 6 | use crate::configuration::zonky; 7 | use crate::extractor::ExtractDirectories; 8 | use std::path::PathBuf; 9 | use std::sync::{Arc, LazyLock, Mutex, RwLock}; 10 | 11 | static REGISTRY: LazyLock>> = 12 | LazyLock::new(|| Arc::new(Mutex::new(RepositoryRegistry::default()))); 13 | 14 | type SupportsFn = fn(&str) -> Result; 15 | type ExtractFn = fn(&Vec, ExtractDirectories) -> Result>; 16 | 17 | /// Singleton struct to store extractors 18 | #[expect(clippy::type_complexity)] 19 | struct RepositoryRegistry { 20 | extractors: Vec<(Arc>, Arc>)>, 21 | } 22 | 23 | impl RepositoryRegistry { 24 | /// Creates a new extractor registry. 25 | fn new() -> Self { 26 | Self { 27 | extractors: Vec::new(), 28 | } 29 | } 30 | 31 | /// Registers an extractor. Newly registered extractors take precedence over existing ones. 32 | fn register(&mut self, supports_fn: SupportsFn, extract_fn: ExtractFn) { 33 | self.extractors.insert( 34 | 0, 35 | ( 36 | Arc::new(RwLock::new(supports_fn)), 37 | Arc::new(RwLock::new(extract_fn)), 38 | ), 39 | ); 40 | } 41 | 42 | /// Gets an extractor that supports the specified URL 43 | /// 44 | /// # Errors 45 | /// * If the URL is not supported. 46 | fn get(&self, url: &str) -> Result { 47 | for (supports_fn, extractor_fn) in &self.extractors { 48 | let supports_function = supports_fn 49 | .read() 50 | .map_err(|error| PoisonedLock(error.to_string()))?; 51 | if supports_function(url)? { 52 | let extractor_function = extractor_fn 53 | .read() 54 | .map_err(|error| PoisonedLock(error.to_string()))?; 55 | return Ok(*extractor_function); 56 | } 57 | } 58 | 59 | Err(UnsupportedExtractor(url.to_string())) 60 | } 61 | } 62 | 63 | impl Default for RepositoryRegistry { 64 | /// Creates a new repository registry with the default repositories registered. 65 | fn default() -> Self { 66 | let mut registry = Self::new(); 67 | #[cfg(feature = "theseus")] 68 | registry.register(|url| Ok(url.starts_with(theseus::URL)), theseus::extract); 69 | #[cfg(feature = "zonky")] 70 | registry.register(|url| Ok(url.starts_with(zonky::URL)), zonky::extract); 71 | registry 72 | } 73 | } 74 | 75 | /// Registers an extractor. Newly registered extractors take precedence over existing ones. 76 | /// 77 | /// # Errors 78 | /// * If the registry is poisoned. 79 | pub fn register(supports_fn: SupportsFn, extractor_fn: ExtractFn) -> Result<()> { 80 | let mut registry = REGISTRY 81 | .lock() 82 | .map_err(|error| PoisonedLock(error.to_string()))?; 83 | registry.register(supports_fn, extractor_fn); 84 | Ok(()) 85 | } 86 | 87 | /// Gets an extractor that supports the specified URL 88 | /// 89 | /// # Errors 90 | /// * If the URL is not supported. 91 | pub fn get(url: &str) -> Result { 92 | let registry = REGISTRY 93 | .lock() 94 | .map_err(|error| PoisonedLock(error.to_string()))?; 95 | registry.get(url) 96 | } 97 | 98 | #[cfg(test)] 99 | mod tests { 100 | use super::*; 101 | use regex_lite::Regex; 102 | 103 | #[test] 104 | fn test_register() -> Result<()> { 105 | register(|url| Ok(url == "https://foo.com"), |_, _| Ok(Vec::new()))?; 106 | let url = "https://foo.com"; 107 | let extractor = get(url)?; 108 | let mut extract_directories = ExtractDirectories::default(); 109 | extract_directories.add_mapping(Regex::new(".*")?, PathBuf::from("test")); 110 | assert!(extractor(&Vec::new(), extract_directories).is_ok()); 111 | Ok(()) 112 | } 113 | 114 | #[test] 115 | fn test_get_error() { 116 | let error = get("foo").unwrap_err(); 117 | assert_eq!("unsupported extractor for 'foo'", error.to_string()); 118 | } 119 | 120 | #[test] 121 | #[cfg(feature = "theseus")] 122 | fn test_get_theseus_postgresql_binaries() { 123 | assert!(get(theseus::URL).is_ok()); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /postgresql_archive/src/extractor/tar_gz_extractor.rs: -------------------------------------------------------------------------------- 1 | use crate::Error::Unexpected; 2 | use crate::Result; 3 | use crate::extractor::ExtractDirectories; 4 | use flate2::bufread::GzDecoder; 5 | use num_format::{Locale, ToFormattedString}; 6 | use std::fs::{File, create_dir_all}; 7 | use std::io::{BufReader, Cursor, copy}; 8 | use std::path::PathBuf; 9 | use tar::Archive; 10 | use tracing::{debug, instrument, warn}; 11 | 12 | /// Extracts the compressed tar `bytes` to paths defined in `extract_directories`. 13 | /// 14 | /// # Errors 15 | /// Returns an error if the extraction fails. 16 | #[instrument(skip(bytes))] 17 | pub fn extract(bytes: &Vec, extract_directories: ExtractDirectories) -> Result> { 18 | let mut files = Vec::new(); 19 | let input = BufReader::new(Cursor::new(bytes)); 20 | let decoder = GzDecoder::new(input); 21 | let mut archive = Archive::new(decoder); 22 | let mut extracted_bytes = 0; 23 | 24 | for archive_entry in archive.entries()? { 25 | let mut entry = archive_entry?; 26 | let entry_header = entry.header(); 27 | let entry_type = entry_header.entry_type(); 28 | let entry_size = entry_header.size()?; 29 | #[cfg(unix)] 30 | let file_mode = entry_header.mode()?; 31 | 32 | let entry_header_path = entry_header.path()?.to_path_buf(); 33 | let prefix = match entry_header_path.components().next() { 34 | Some(component) => component.as_os_str().to_str().unwrap_or_default(), 35 | None => { 36 | return Err(Unexpected( 37 | "Failed to get file header path prefix".to_string(), 38 | )); 39 | } 40 | }; 41 | let stripped_entry_header_path = entry_header_path.strip_prefix(prefix)?.to_path_buf(); 42 | let Ok(extract_dir) = extract_directories.get_path(prefix) else { 43 | continue; 44 | }; 45 | let mut entry_name = extract_dir.clone(); 46 | entry_name.push(stripped_entry_header_path); 47 | 48 | if entry_type.is_dir() || entry_name.is_dir() { 49 | create_dir_all(&entry_name)?; 50 | } else if entry_type.is_file() { 51 | let mut output_file = File::create(&entry_name)?; 52 | copy(&mut entry, &mut output_file)?; 53 | extracted_bytes += entry_size; 54 | 55 | #[cfg(unix)] 56 | { 57 | use std::os::unix::fs::PermissionsExt; 58 | output_file.set_permissions(std::fs::Permissions::from_mode(file_mode))?; 59 | } 60 | files.push(entry_name); 61 | } else if entry_type.is_symlink() { 62 | #[cfg(unix)] 63 | if let Some(symlink_target) = entry.link_name()? { 64 | let symlink_path = entry_name.clone(); 65 | std::os::unix::fs::symlink(symlink_target.as_ref(), symlink_path)?; 66 | files.push(entry_name); 67 | } 68 | } 69 | } 70 | 71 | let number_of_files = files.len(); 72 | debug!( 73 | "Extracted {} files totalling {}", 74 | number_of_files.to_formatted_string(&Locale::en), 75 | extracted_bytes, 76 | ); 77 | 78 | Ok(files) 79 | } 80 | -------------------------------------------------------------------------------- /postgresql_archive/src/extractor/tar_xz_extractor.rs: -------------------------------------------------------------------------------- 1 | use crate::Error::Unexpected; 2 | use crate::Result; 3 | use crate::extractor::ExtractDirectories; 4 | use liblzma::bufread::XzDecoder; 5 | use num_format::{Locale, ToFormattedString}; 6 | use std::fs::{File, create_dir_all}; 7 | use std::io::{BufReader, Cursor, copy}; 8 | use std::path::PathBuf; 9 | use tar::Archive; 10 | use tracing::{debug, instrument, warn}; 11 | 12 | /// Extracts the compressed tar `bytes` to paths defined in `extract_directories`. 13 | /// 14 | /// # Errors 15 | /// Returns an error if the extraction fails. 16 | #[instrument(skip(bytes))] 17 | pub fn extract(bytes: &Vec, extract_directories: ExtractDirectories) -> Result> { 18 | let mut files = Vec::new(); 19 | let input = BufReader::new(Cursor::new(bytes)); 20 | let decoder = XzDecoder::new(input); 21 | let mut archive = Archive::new(decoder); 22 | let mut extracted_bytes = 0; 23 | 24 | for archive_entry in archive.entries()? { 25 | let mut entry = archive_entry?; 26 | let entry_header = entry.header(); 27 | let entry_type = entry_header.entry_type(); 28 | let entry_size = entry_header.size()?; 29 | #[cfg(unix)] 30 | let file_mode = entry_header.mode()?; 31 | 32 | let entry_header_path = entry_header.path()?.to_path_buf(); 33 | let prefix = match entry_header_path.components().next() { 34 | Some(component) => component.as_os_str().to_str().unwrap_or_default(), 35 | None => { 36 | return Err(Unexpected( 37 | "Failed to get file header path prefix".to_string(), 38 | )); 39 | } 40 | }; 41 | let Ok(extract_dir) = extract_directories.get_path(prefix) else { 42 | continue; 43 | }; 44 | let mut entry_name = extract_dir.clone(); 45 | entry_name.push(entry_header_path); 46 | 47 | if entry_type.is_dir() || entry_name.is_dir() { 48 | create_dir_all(&entry_name)?; 49 | } else if entry_type.is_file() { 50 | if let Some(parent) = entry_name.parent() { 51 | create_dir_all(parent)?; 52 | } 53 | let mut output_file = File::create(&entry_name)?; 54 | copy(&mut entry, &mut output_file)?; 55 | extracted_bytes += entry_size; 56 | 57 | #[cfg(unix)] 58 | { 59 | use std::os::unix::fs::PermissionsExt; 60 | output_file.set_permissions(std::fs::Permissions::from_mode(file_mode))?; 61 | } 62 | files.push(entry_name); 63 | } else if entry_type.is_symlink() { 64 | #[cfg(unix)] 65 | if let Some(symlink_target) = entry.link_name()? { 66 | let symlink_path = entry_name.clone(); 67 | std::os::unix::fs::symlink(symlink_target.as_ref(), symlink_path)?; 68 | files.push(entry_name); 69 | } 70 | } 71 | } 72 | 73 | let number_of_files = files.len(); 74 | debug!( 75 | "Extracted {} files totalling {}", 76 | number_of_files.to_formatted_string(&Locale::en), 77 | extracted_bytes, 78 | ); 79 | 80 | Ok(files) 81 | } 82 | -------------------------------------------------------------------------------- /postgresql_archive/src/extractor/zip_extractor.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use crate::extractor::ExtractDirectories; 3 | use num_format::{Locale, ToFormattedString}; 4 | use std::fs::create_dir_all; 5 | use std::io::Cursor; 6 | use std::path::PathBuf; 7 | use std::{fs, io}; 8 | use tracing::{debug, instrument, warn}; 9 | use zip::ZipArchive; 10 | 11 | /// Extracts the compressed tar `bytes` to paths defined in `extract_directories`. 12 | /// 13 | /// # Errors 14 | /// Returns an error if the extraction fails. 15 | #[instrument(skip(bytes))] 16 | pub fn extract(bytes: &Vec, extract_directories: ExtractDirectories) -> Result> { 17 | let mut files = Vec::new(); 18 | let reader = Cursor::new(bytes); 19 | let mut archive = ZipArchive::new(reader).map_err(|_| io::Error::other("Zip error"))?; 20 | let mut extracted_bytes = 0; 21 | 22 | for i in 0..archive.len() { 23 | let mut file = archive 24 | .by_index(i) 25 | .map_err(|_| io::Error::other("Zip error"))?; 26 | let file_path = PathBuf::from(file.name()); 27 | let file_path = PathBuf::from(file_path.file_name().unwrap_or_default()); 28 | let file_name = file_path.to_string_lossy(); 29 | 30 | let Ok(extract_dir) = extract_directories.get_path(&file_name) else { 31 | continue; 32 | }; 33 | create_dir_all(&extract_dir)?; 34 | 35 | let mut out = Vec::new(); 36 | io::copy(&mut file, &mut out)?; 37 | extracted_bytes += out.len() as u64; 38 | let path = PathBuf::from(&extract_dir).join(file_path); 39 | fs::write(&path, out)?; 40 | files.push(path); 41 | } 42 | 43 | let number_of_files = files.len(); 44 | debug!( 45 | "Extracted {} files totalling {}", 46 | number_of_files.to_formatted_string(&Locale::en), 47 | extracted_bytes, 48 | ); 49 | 50 | Ok(files) 51 | } 52 | -------------------------------------------------------------------------------- /postgresql_archive/src/hasher/md5.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use md5::{Digest, Md5}; 3 | 4 | /// Hashes the data using MD5. 5 | /// 6 | /// # Errors 7 | /// * If the data cannot be hashed. 8 | pub fn hash(data: &Vec) -> Result { 9 | let mut hasher = Md5::new(); 10 | hasher.update(data); 11 | let hash = hex::encode(hasher.finalize()); 12 | Ok(hash) 13 | } 14 | 15 | #[cfg(test)] 16 | mod tests { 17 | use super::*; 18 | 19 | #[test] 20 | fn test_hash() -> Result<()> { 21 | let data = vec![4, 2]; 22 | let hash = hash(&data)?; 23 | assert_eq!("21fb3d1d1a91a7e80dff456205f3380b", hash); 24 | Ok(()) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /postgresql_archive/src/hasher/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "md5")] 2 | pub mod md5; 3 | pub mod registry; 4 | #[cfg(feature = "sha1")] 5 | pub mod sha1; 6 | #[cfg(feature = "sha2")] 7 | pub mod sha2_256; 8 | #[cfg(feature = "sha2")] 9 | pub mod sha2_512; 10 | -------------------------------------------------------------------------------- /postgresql_archive/src/hasher/sha1.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use sha1::{Digest, Sha1}; 3 | 4 | /// Hashes the data using SHA1. 5 | /// 6 | /// # Errors 7 | /// * If the data cannot be hashed. 8 | pub fn hash(data: &Vec) -> Result { 9 | let mut hasher = Sha1::new(); 10 | hasher.update(data); 11 | let hash = hex::encode(hasher.finalize()); 12 | Ok(hash) 13 | } 14 | 15 | #[cfg(test)] 16 | mod tests { 17 | use super::*; 18 | 19 | #[test] 20 | fn test_hash() -> Result<()> { 21 | let data = vec![4, 2]; 22 | let hash = hash(&data)?; 23 | assert_eq!("1f3e1678e699640dfa5173d3a52b004f5e164d87", hash); 24 | Ok(()) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /postgresql_archive/src/hasher/sha2_256.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use sha2::{Digest, Sha256}; 3 | 4 | /// Hashes the data using SHA2-256. 5 | /// 6 | /// # Errors 7 | /// * If the data cannot be hashed. 8 | pub fn hash(data: &Vec) -> Result { 9 | let mut hasher = Sha256::new(); 10 | hasher.update(data); 11 | let hash = hex::encode(hasher.finalize()); 12 | Ok(hash) 13 | } 14 | 15 | #[cfg(test)] 16 | mod tests { 17 | use super::*; 18 | 19 | #[test] 20 | fn test_hash() -> Result<()> { 21 | let data = vec![4, 2]; 22 | let hash = hash(&data)?; 23 | assert_eq!( 24 | "b7586d310e5efb1b7d10a917ba5af403adbf54f4f77fe7fdcb4880a95dac7e7e", 25 | hash 26 | ); 27 | Ok(()) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /postgresql_archive/src/hasher/sha2_512.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use sha2::{Digest, Sha512}; 3 | 4 | /// Hashes the data using SHA2-512. 5 | /// 6 | /// # Errors 7 | /// * If the data cannot be hashed. 8 | pub fn hash(data: &Vec) -> Result { 9 | let mut hasher = Sha512::new(); 10 | hasher.update(data); 11 | let hash = hex::encode(hasher.finalize()); 12 | Ok(hash) 13 | } 14 | 15 | #[cfg(test)] 16 | mod tests { 17 | use super::*; 18 | 19 | #[test] 20 | fn test_hash() -> Result<()> { 21 | let data = vec![4, 2]; 22 | let hash = hash(&data)?; 23 | assert_eq!( 24 | "7df6418d1791a6fe80e726319f16f107534a663346f99e0d155e359a54f6c74391e2f3be19c995c3c903926d348bd86c339bd982e10f09aa776e4ff85d36387a", 25 | hash 26 | ); 27 | Ok(()) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /postgresql_archive/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # postgresql_archive 2 | //! 3 | //! [![Code Coverage](https://codecov.io/gh/theseus-rs/postgresql-embedded/branch/main/graph/badge.svg)](https://codecov.io/gh/theseus-rs/postgresql-embedded) 4 | //! [![Benchmarks](https://img.shields.io/badge/%F0%9F%90%B0_bencher-enabled-6ec241)](https://bencher.dev/perf/theseus-rs-postgresql-embedded) 5 | //! [![License](https://img.shields.io/crates/l/postgresql_archive?)](https://github.com/theseus-rs/postgresql-embedded/tree/main/postgresql_archive#license) 6 | //! [![Semantic Versioning](https://img.shields.io/badge/%E2%9A%99%EF%B8%8F_SemVer-2.0.0-blue)](https://semver.org/spec/v2.0.0.html) 7 | //! 8 | //! Retrieve and extract PostgreSQL on Linux, MacOS or Windows. 9 | //! 10 | //! ## Table of contents 11 | //! 12 | //! - [Examples](#examples) 13 | //! - [Feature flags](#feature-flags) 14 | //! - [Supported platforms](#supported-platforms) 15 | //! - [Safety](#safety) 16 | //! - [License](#license) 17 | //! - [Notes](#notes) 18 | //! 19 | //! ## Examples 20 | //! 21 | //! ### Asynchronous API 22 | //! 23 | //! ```no_run 24 | //! use postgresql_archive::{extract, get_archive, Result, VersionReq }; 25 | //! use postgresql_archive::configuration::theseus; 26 | //! 27 | //! #[tokio::main] 28 | //! async fn main() -> Result<()> { 29 | //! let url = theseus::URL; 30 | //! let (archive_version, archive) = get_archive(url, &VersionReq::STAR).await?; 31 | //! let out_dir = std::env::temp_dir(); 32 | //! let files = extract(url, &archive, &out_dir).await?; 33 | //! Ok(()) 34 | //! } 35 | //! ``` 36 | //! 37 | //! ### Synchronous API 38 | //! ```no_run 39 | //! #[cfg(feature = "blocking")] { 40 | //! use postgresql_archive::configuration::theseus; 41 | //! use postgresql_archive::VersionReq; 42 | //! use postgresql_archive::blocking::{extract, get_archive}; 43 | //! 44 | //! let url = theseus::URL; 45 | //! let (archive_version, archive) = get_archive(url, &VersionReq::STAR).unwrap(); 46 | //! let out_dir = std::env::temp_dir(); 47 | //! let result = extract(url, &archive, &out_dir).unwrap(); 48 | //! } 49 | //! ``` 50 | //! 51 | //! ## Feature flags 52 | //! 53 | //! postgresql_archive uses [feature flags] to address compile time and binary size 54 | //! uses. 55 | //! 56 | //! The following features are available: 57 | //! 58 | //! | Name | Description | Default? | 59 | //! |--------------|----------------------------|----------| 60 | //! | `blocking` | Enables the blocking API | No | 61 | //! | `native-tls` | Enables native-tls support | Yes | 62 | //! | `rustls` | Enables rustls support | No | 63 | //! 64 | //! ### Configurations 65 | //! 66 | //! | Name | Description | Default? | 67 | //! |-----------|-------------------------------------|----------| 68 | //! | `theseus` | Enables theseus PostgreSQL binaries | Yes | 69 | //! | `zonky` | Enables zonky PostgreSQL binaries | No | 70 | //! 71 | //! ### Hashers 72 | //! 73 | //! | Name | Description | Default? | 74 | //! |--------|----------------------|----------| 75 | //! | `md5` | Enables md5 hashers | No | 76 | //! | `sha1` | Enables sha1 hashers | No | 77 | //! | `sha2` | Enables sha2 hashers | Yes¹ | 78 | //! 79 | //! ¹ enabled by the `theseus` feature flag. 80 | //! 81 | //! ### Repositories 82 | //! 83 | //! | Name | Description | Default? | 84 | //! |----------|---------------------------|----------| 85 | //! | `github` | Enables github repository | Yes¹ | 86 | //! | `maven` | Enables maven repository | No | 87 | //! 88 | //! ¹ enabled by the `theseus` feature flag. 89 | //! 90 | //! ## Supported platforms 91 | //! 92 | //! `postgresql_archive` provides implementations for the following: 93 | //! 94 | //! * [theseus-rs/postgresql-binaries](https://github.com/theseus-rs/postgresql-binaries) 95 | //! * [zonkyio/embedded-postgres-binaries](https://github.com/zonkyio/embedded-postgres-binaries) 96 | //! 97 | //! ## Safety 98 | //! 99 | //! This crate uses `#![forbid(unsafe_code)]` to ensure everything is implemented in 100% safe Rust. 100 | //! 101 | //! ## License 102 | //! 103 | //! Licensed under either of 104 | //! 105 | //! * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or ) 106 | //! * MIT license ([LICENSE-MIT](LICENSE-MIT) or ) 107 | //! 108 | //! at your option. 109 | //! 110 | //! PostgreSQL is covered under [The PostgreSQL License](https://opensource.org/licenses/postgresql). 111 | 112 | #![forbid(unsafe_code)] 113 | #![forbid(clippy::allow_attributes)] 114 | #![deny(clippy::pedantic)] 115 | #![allow(clippy::doc_markdown)] 116 | #![allow(clippy::module_name_repetitions)] 117 | 118 | mod archive; 119 | #[cfg(feature = "blocking")] 120 | pub mod blocking; 121 | pub mod configuration; 122 | mod error; 123 | pub mod extractor; 124 | pub mod hasher; 125 | pub mod matcher; 126 | pub mod repository; 127 | mod version; 128 | 129 | pub use archive::{extract, get_archive, get_version}; 130 | pub use error::{Error, Result}; 131 | pub use semver::{Version, VersionReq}; 132 | pub use version::{ExactVersion, ExactVersionReq}; 133 | -------------------------------------------------------------------------------- /postgresql_archive/src/matcher/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod registry; 2 | -------------------------------------------------------------------------------- /postgresql_archive/src/matcher/registry.rs: -------------------------------------------------------------------------------- 1 | use crate::Error::{PoisonedLock, UnsupportedMatcher}; 2 | use crate::Result; 3 | #[cfg(feature = "theseus")] 4 | use crate::configuration::theseus; 5 | #[cfg(feature = "zonky")] 6 | use crate::configuration::zonky; 7 | use semver::Version; 8 | use std::sync::{Arc, LazyLock, Mutex, RwLock}; 9 | 10 | static REGISTRY: LazyLock>> = 11 | LazyLock::new(|| Arc::new(Mutex::new(MatchersRegistry::default()))); 12 | 13 | pub type SupportsFn = fn(&str) -> Result; 14 | pub type MatcherFn = fn(&str, &str, &Version) -> Result; 15 | 16 | /// Singleton struct to store matchers 17 | #[expect(clippy::type_complexity)] 18 | struct MatchersRegistry { 19 | matchers: Vec<(Arc>, Arc>)>, 20 | } 21 | 22 | impl MatchersRegistry { 23 | /// Creates a new matcher registry. 24 | fn new() -> Self { 25 | Self { 26 | matchers: Vec::new(), 27 | } 28 | } 29 | 30 | /// Registers a matcher for a supports function. Newly registered matchers with the take 31 | /// precedence over existing ones. 32 | fn register(&mut self, supports_fn: SupportsFn, matcher_fn: MatcherFn) { 33 | self.matchers.insert( 34 | 0, 35 | ( 36 | Arc::new(RwLock::new(supports_fn)), 37 | Arc::new(RwLock::new(matcher_fn)), 38 | ), 39 | ); 40 | } 41 | 42 | /// Get a matcher for the specified URL. 43 | /// 44 | /// # Errors 45 | /// * If the registry is poisoned. 46 | fn get>(&self, url: S) -> Result { 47 | let url = url.as_ref(); 48 | for (supports_fn, matcher_fn) in &self.matchers { 49 | let supports_function = supports_fn 50 | .read() 51 | .map_err(|error| PoisonedLock(error.to_string()))?; 52 | if supports_function(url)? { 53 | let matcher_function = matcher_fn 54 | .read() 55 | .map_err(|error| PoisonedLock(error.to_string()))?; 56 | return Ok(*matcher_function); 57 | } 58 | } 59 | 60 | Err(UnsupportedMatcher(url.to_string())) 61 | } 62 | } 63 | 64 | impl Default for MatchersRegistry { 65 | /// Creates a new matcher registry with the default matchers registered. 66 | fn default() -> Self { 67 | let mut registry = Self::new(); 68 | #[cfg(feature = "theseus")] 69 | registry.register(|url| Ok(url == theseus::URL), theseus::matcher); 70 | #[cfg(feature = "zonky")] 71 | registry.register(|url| Ok(url == zonky::URL), zonky::matcher); 72 | registry 73 | } 74 | } 75 | 76 | /// Registers a matcher for a supports function. Newly registered matchers with the take 77 | /// precedence over existing ones. 78 | /// 79 | /// # Errors 80 | /// * If the registry is poisoned. 81 | pub fn register(supports_fn: SupportsFn, matcher_fn: MatcherFn) -> Result<()> { 82 | let mut registry = REGISTRY 83 | .lock() 84 | .map_err(|error| PoisonedLock(error.to_string()))?; 85 | registry.register(supports_fn, matcher_fn); 86 | Ok(()) 87 | } 88 | 89 | /// Get a matcher for the specified URL. 90 | /// 91 | /// # Errors 92 | /// * If the registry is poisoned. 93 | pub fn get>(url: S) -> Result { 94 | let registry = REGISTRY 95 | .lock() 96 | .map_err(|error| PoisonedLock(error.to_string()))?; 97 | registry.get(url) 98 | } 99 | 100 | #[cfg(test)] 101 | mod tests { 102 | use super::*; 103 | 104 | #[test] 105 | fn test_register() -> Result<()> { 106 | register( 107 | |url| Ok(url == "https://foo.com"), 108 | |_url, name, _version| Ok(name == "foo"), 109 | )?; 110 | 111 | let matcher = get("https://foo.com")?; 112 | let version = Version::new(16, 3, 0); 113 | 114 | assert!(matcher("", "foo", &version)?); 115 | Ok(()) 116 | } 117 | 118 | #[test] 119 | fn test_get_error() { 120 | let result = get("foo").unwrap_err(); 121 | assert_eq!("unsupported matcher for 'foo'", result.to_string()); 122 | } 123 | 124 | #[test] 125 | #[cfg(feature = "theseus")] 126 | fn test_get_theseus_postgresql_binaries() { 127 | assert!(get(theseus::URL).is_ok()); 128 | } 129 | 130 | #[test] 131 | #[cfg(feature = "zonky")] 132 | fn test_get_zonyk_postgresql_binaries() { 133 | assert!(get(zonky::URL).is_ok()); 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /postgresql_archive/src/repository/github/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod models; 2 | pub mod repository; 3 | -------------------------------------------------------------------------------- /postgresql_archive/src/repository/github/models.rs: -------------------------------------------------------------------------------- 1 | //! Structs for GitHub API responses 2 | use serde::{Deserialize, Serialize}; 3 | 4 | /// Represents a GitHub release 5 | #[derive(Clone, Debug, Deserialize, Serialize)] 6 | pub(crate) struct Release { 7 | pub url: String, 8 | pub assets_url: String, 9 | pub upload_url: String, 10 | pub html_url: String, 11 | pub id: i64, 12 | pub tag_name: String, 13 | pub name: String, 14 | pub draft: bool, 15 | pub prerelease: bool, 16 | pub assets: Vec, 17 | } 18 | 19 | /// Represents a GitHub asset 20 | #[derive(Clone, Debug, Deserialize, Serialize)] 21 | pub(crate) struct Asset { 22 | pub url: String, 23 | pub id: i64, 24 | pub node_id: String, 25 | pub name: String, 26 | pub label: String, 27 | pub content_type: String, 28 | pub state: String, 29 | pub size: i64, 30 | pub browser_download_url: String, 31 | } 32 | -------------------------------------------------------------------------------- /postgresql_archive/src/repository/maven/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod models; 2 | pub mod repository; 3 | 4 | pub const URL: &str = "https://repo1.maven.org/maven2"; 5 | -------------------------------------------------------------------------------- /postgresql_archive/src/repository/maven/models.rs: -------------------------------------------------------------------------------- 1 | /// Maven metadata XML structure 2 | /// 3 | /// ```xml 4 | /// 5 | /// io.zonky.test.postgres 6 | /// embedded-postgres-binaries-linux-amd64 7 | /// 8 | /// 16.2.0 9 | /// 16.2.0 10 | /// 11 | /// ... 12 | /// 15.6.0 13 | /// 16.2.0 14 | /// 15 | /// 20240210235512 16 | /// 17 | /// 18 | /// ``` 19 | use serde::{Deserialize, Serialize}; 20 | 21 | /// Represents a Maven artifact metadata 22 | #[derive(Clone, Debug, Deserialize, Serialize)] 23 | pub(crate) struct Metadata { 24 | #[serde(rename = "groupId")] 25 | pub(crate) group_id: String, 26 | #[serde(rename = "artifactId")] 27 | pub(crate) artifact_id: String, 28 | pub(crate) versioning: Versioning, 29 | } 30 | 31 | /// Represents Maven versioning information 32 | #[derive(Clone, Debug, Deserialize, Serialize)] 33 | pub(crate) struct Versioning { 34 | pub(crate) latest: String, 35 | pub(crate) release: String, 36 | pub(crate) versions: Versions, 37 | #[serde(rename = "lastUpdated")] 38 | pub(crate) last_updated: String, 39 | } 40 | 41 | /// Represents Maven versions 42 | #[derive(Clone, Debug, Deserialize, Serialize)] 43 | pub(crate) struct Versions { 44 | pub(crate) version: Vec, 45 | } 46 | -------------------------------------------------------------------------------- /postgresql_archive/src/repository/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "github")] 2 | pub mod github; 3 | #[cfg(feature = "maven")] 4 | pub mod maven; 5 | pub mod model; 6 | pub mod registry; 7 | 8 | pub use model::{Archive, Repository}; 9 | -------------------------------------------------------------------------------- /postgresql_archive/src/repository/model.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use semver::{Version, VersionReq}; 3 | use std::fmt::Debug; 4 | 5 | /// A trait for archive repository implementations. 6 | #[async_trait] 7 | pub trait Repository: Debug + Send + Sync { 8 | /// Gets the name of the repository. 9 | fn name(&self) -> &str; 10 | 11 | /// Gets the version for the specified [version requirement](VersionReq). If a 12 | /// [version](Version) for the [version requirement](VersionReq) is not found, 13 | /// then an error is returned. 14 | /// 15 | /// # Errors 16 | /// * If the version is not found. 17 | async fn get_version(&self, version_req: &VersionReq) -> crate::Result; 18 | 19 | /// Gets the archive for a given [version requirement](VersionReq) that passes the default 20 | /// matcher. If no archive is found for the [version requirement](VersionReq) and matcher then 21 | /// an [error](crate::error::Error) is returned. 22 | /// 23 | /// # Errors 24 | /// * If the archive is not found. 25 | /// * If the archive cannot be downloaded. 26 | async fn get_archive(&self, version_req: &VersionReq) -> crate::Result; 27 | } 28 | 29 | /// A struct representing an archive. 30 | #[derive(Clone, Debug)] 31 | pub struct Archive { 32 | name: String, 33 | version: Version, 34 | bytes: Vec, 35 | } 36 | 37 | impl Archive { 38 | /// Creates a new archive. 39 | #[must_use] 40 | pub fn new(name: String, version: Version, bytes: Vec) -> Self { 41 | Self { 42 | name, 43 | version, 44 | bytes, 45 | } 46 | } 47 | 48 | /// Gets the name of the archive. 49 | #[must_use] 50 | pub fn name(&self) -> &str { 51 | &self.name 52 | } 53 | 54 | /// Gets the version of the archive. 55 | #[must_use] 56 | pub fn version(&self) -> &Version { 57 | &self.version 58 | } 59 | 60 | /// Gets the bytes of the archive. 61 | #[must_use] 62 | pub fn bytes(&self) -> &[u8] { 63 | &self.bytes 64 | } 65 | } 66 | 67 | #[cfg(test)] 68 | mod tests { 69 | use super::*; 70 | use semver::Version; 71 | 72 | #[test] 73 | fn test_archive() { 74 | let name = "test".to_string(); 75 | let version = Version::parse("1.0.0").unwrap(); 76 | let bytes = vec![0, 1, 2, 3]; 77 | let archive = Archive::new(name.clone(), version.clone(), bytes.clone()); 78 | assert_eq!(archive.name(), name); 79 | assert_eq!(archive.version(), &version); 80 | assert_eq!(archive.bytes(), bytes.as_slice()); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /postgresql_archive/src/version.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use semver::{Version, VersionReq}; 3 | 4 | /// A trait for getting the exact version from a [version requirement](VersionReq). 5 | pub trait ExactVersion { 6 | /// Gets the exact version from a [version requirement](VersionReq) or `None`. 7 | fn exact_version(&self) -> Option; 8 | } 9 | 10 | impl ExactVersion for VersionReq { 11 | /// Gets the exact version from a [version requirement](VersionReq) or `None`. 12 | fn exact_version(&self) -> Option { 13 | if self.comparators.len() != 1 { 14 | return None; 15 | } 16 | let comparator = self.comparators.first()?; 17 | if comparator.op != semver::Op::Exact { 18 | return None; 19 | } 20 | let minor = comparator.minor?; 21 | let patch = comparator.patch?; 22 | let version = Version::new(comparator.major, minor, patch); 23 | Some(version) 24 | } 25 | } 26 | 27 | /// A trait for getting the exact version requirement from a [version](Version). 28 | pub trait ExactVersionReq { 29 | /// Gets the exact version requirement from a [version](Version). 30 | /// 31 | /// # Errors 32 | /// * If the version requirement cannot be parsed. 33 | fn exact_version_req(&self) -> Result; 34 | } 35 | 36 | impl ExactVersionReq for Version { 37 | /// Gets the exact version requirement from a [version](Version). 38 | /// 39 | /// # Errors 40 | /// * If the version requirement cannot be parsed. 41 | fn exact_version_req(&self) -> Result { 42 | let version = format!("={self}"); 43 | let version_req = VersionReq::parse(&version)?; 44 | Ok(version_req) 45 | } 46 | } 47 | 48 | #[cfg(test)] 49 | mod tests { 50 | use super::*; 51 | use crate::Result; 52 | 53 | #[test] 54 | fn test_exact_version_star() { 55 | let version_req = VersionReq::STAR; 56 | assert_eq!(None, version_req.exact_version()); 57 | } 58 | 59 | #[test] 60 | fn test_exact_version_greater_than() -> Result<()> { 61 | let version_req = VersionReq::parse(">16")?; 62 | assert_eq!(None, version_req.exact_version()); 63 | Ok(()) 64 | } 65 | 66 | #[test] 67 | fn test_exact_version_full_no_equals() -> Result<()> { 68 | let version_req = VersionReq::parse("16.4.0")?; 69 | assert_eq!(None, version_req.exact_version()); 70 | Ok(()) 71 | } 72 | 73 | #[test] 74 | fn test_exact_version_full_equals() -> Result<()> { 75 | let version_req = VersionReq::parse("=16.4.0")?; 76 | let version = Version::new(16, 4, 0); 77 | assert_eq!(Some(version), version_req.exact_version()); 78 | Ok(()) 79 | } 80 | 81 | #[test] 82 | fn test_exact_version_major_minor() -> Result<()> { 83 | let version_req = VersionReq::parse("=16.4")?; 84 | assert_eq!(None, version_req.exact_version()); 85 | Ok(()) 86 | } 87 | 88 | #[test] 89 | fn test_exact_version_major() -> Result<()> { 90 | let version_req = VersionReq::parse("=16")?; 91 | assert_eq!(None, version_req.exact_version()); 92 | Ok(()) 93 | } 94 | 95 | #[test] 96 | fn test_exact_version_range() -> Result<()> { 97 | let version_req = VersionReq::parse(">= 16, < 17")?; 98 | assert_eq!(None, version_req.exact_version()); 99 | Ok(()) 100 | } 101 | 102 | #[test] 103 | fn test_exact_version_req_not_equal() -> Result<()> { 104 | let version = Version::new(1, 2, 3); 105 | assert_ne!(VersionReq::parse("=1.0.0")?, version.exact_version_req()?); 106 | Ok(()) 107 | } 108 | 109 | #[test] 110 | fn test_exact_version_req_major_minor_patch() -> Result<()> { 111 | let version = Version::new(16, 4, 0); 112 | assert_eq!(VersionReq::parse("=16.4.0")?, version.exact_version_req()?); 113 | Ok(()) 114 | } 115 | 116 | #[test] 117 | fn test_exact_version_prerelease() -> Result<()> { 118 | let version = Version::parse("1.2.3-alpha")?; 119 | assert_eq!( 120 | VersionReq::parse("=1.2.3-alpha")?, 121 | version.exact_version_req()? 122 | ); 123 | Ok(()) 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /postgresql_archive/tests/archive.rs: -------------------------------------------------------------------------------- 1 | use postgresql_archive::configuration::theseus; 2 | use postgresql_archive::extract; 3 | use postgresql_archive::{get_archive, get_version}; 4 | use semver::VersionReq; 5 | use std::fs::remove_dir_all; 6 | use test_log::test; 7 | 8 | #[test(tokio::test)] 9 | async fn test_get_version_not_found() -> postgresql_archive::Result<()> { 10 | let invalid_version_req = VersionReq::parse("=1.0.0")?; 11 | let result = get_version(theseus::URL, &invalid_version_req).await; 12 | 13 | assert!(result.is_err()); 14 | Ok(()) 15 | } 16 | 17 | #[test(tokio::test)] 18 | async fn test_get_version() -> anyhow::Result<()> { 19 | let version_req = VersionReq::parse("=16.4.0")?; 20 | let latest_version = get_version(theseus::URL, &version_req).await?; 21 | 22 | assert!(version_req.matches(&latest_version)); 23 | Ok(()) 24 | } 25 | 26 | #[test(tokio::test)] 27 | async fn test_get_archive_and_extract() -> anyhow::Result<()> { 28 | let url = theseus::URL; 29 | let version_req = VersionReq::parse("=16.4.0")?; 30 | let (archive_version, archive) = get_archive(url, &version_req).await?; 31 | 32 | assert!(version_req.matches(&archive_version)); 33 | 34 | let out_dir = tempfile::tempdir()?.path().to_path_buf(); 35 | let files = extract(url, &archive, &out_dir).await?; 36 | #[cfg(all(target_os = "linux", target_arch = "x86_64"))] 37 | assert_eq!(1_312, files.len()); 38 | #[cfg(all(target_os = "macos", target_arch = "aarch64"))] 39 | assert_eq!(1_271, files.len()); 40 | #[cfg(all(target_os = "macos", target_arch = "x86_64"))] 41 | assert_eq!(1_271, files.len()); 42 | #[cfg(all(target_os = "windows", target_arch = "x86_64"))] 43 | assert_eq!(3_092, files.len()); 44 | remove_dir_all(&out_dir)?; 45 | Ok(()) 46 | } 47 | 48 | #[test(tokio::test)] 49 | async fn test_get_archive_version_not_found() -> postgresql_archive::Result<()> { 50 | let invalid_version_req = VersionReq::parse("=1.0.0")?; 51 | let result = get_archive(theseus::URL, &invalid_version_req).await; 52 | 53 | assert!(result.is_err()); 54 | Ok(()) 55 | } 56 | -------------------------------------------------------------------------------- /postgresql_archive/tests/blocking.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "blocking")] 2 | use postgresql_archive::VersionReq; 3 | #[cfg(feature = "blocking")] 4 | use postgresql_archive::blocking::{extract, get_archive, get_version}; 5 | #[cfg(feature = "blocking")] 6 | use postgresql_archive::configuration::theseus; 7 | #[cfg(feature = "blocking")] 8 | use std::fs::remove_dir_all; 9 | #[cfg(feature = "blocking")] 10 | use test_log::test; 11 | 12 | #[cfg(feature = "blocking")] 13 | #[test] 14 | fn test_get_version() -> anyhow::Result<()> { 15 | let version_req = VersionReq::STAR; 16 | let latest_version = get_version(theseus::URL, &version_req)?; 17 | 18 | assert!(version_req.matches(&latest_version)); 19 | Ok(()) 20 | } 21 | 22 | #[cfg(feature = "blocking")] 23 | #[test] 24 | fn test_get_archive_and_extract() -> anyhow::Result<()> { 25 | let url = theseus::URL; 26 | let version_req = &VersionReq::parse("=16.4.0")?; 27 | let (archive_version, archive) = get_archive(url, version_req)?; 28 | 29 | assert!(version_req.matches(&archive_version)); 30 | 31 | let out_dir = tempfile::tempdir()?.path().to_path_buf(); 32 | let files = extract(url, &archive, &out_dir)?; 33 | assert!(!files.is_empty()); 34 | remove_dir_all(&out_dir)?; 35 | Ok(()) 36 | } 37 | -------------------------------------------------------------------------------- /postgresql_archive/tests/zonky.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "zonky")] 2 | use postgresql_archive::configuration::zonky; 3 | #[cfg(feature = "zonky")] 4 | use postgresql_archive::extract; 5 | #[cfg(feature = "zonky")] 6 | use postgresql_archive::{get_archive, get_version}; 7 | #[cfg(feature = "zonky")] 8 | use semver::VersionReq; 9 | #[cfg(feature = "zonky")] 10 | use std::fs::remove_dir_all; 11 | #[cfg(feature = "zonky")] 12 | use test_log::test; 13 | 14 | #[test(tokio::test)] 15 | #[cfg(feature = "zonky")] 16 | async fn test_get_version_not_found() -> postgresql_archive::Result<()> { 17 | let invalid_version_req = VersionReq::parse("=1.0.0")?; 18 | let result = get_version(zonky::URL, &invalid_version_req).await; 19 | 20 | assert!(result.is_err()); 21 | Ok(()) 22 | } 23 | 24 | #[test(tokio::test)] 25 | #[cfg(feature = "zonky")] 26 | async fn test_get_version() -> anyhow::Result<()> { 27 | let version_req = VersionReq::parse("=16.2.0")?; 28 | let latest_version = get_version(zonky::URL, &version_req).await?; 29 | 30 | assert!(version_req.matches(&latest_version)); 31 | Ok(()) 32 | } 33 | 34 | #[test(tokio::test)] 35 | #[cfg(feature = "zonky")] 36 | async fn test_get_archive_and_extract() -> anyhow::Result<()> { 37 | let url = zonky::URL; 38 | let version_req = VersionReq::parse("=16.4.0")?; 39 | let (archive_version, archive) = get_archive(url, &version_req).await?; 40 | 41 | assert!(version_req.matches(&archive_version)); 42 | 43 | let out_dir = tempfile::tempdir()?.path().to_path_buf(); 44 | let files = extract(url, &archive, &out_dir).await?; 45 | assert!(files.len() > 1_000); 46 | remove_dir_all(&out_dir)?; 47 | Ok(()) 48 | } 49 | 50 | #[test(tokio::test)] 51 | #[cfg(feature = "zonky")] 52 | async fn test_get_archive_version_not_found() -> postgresql_archive::Result<()> { 53 | let invalid_version_req = VersionReq::parse("=1.0.0")?; 54 | let result = get_archive(zonky::URL, &invalid_version_req).await; 55 | 56 | assert!(result.is_err()); 57 | Ok(()) 58 | } 59 | -------------------------------------------------------------------------------- /postgresql_commands/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors.workspace = true 3 | categories.workspace = true 4 | description = "PostgreSQL commands for interacting with a PostgreSQL server." 5 | edition.workspace = true 6 | keywords.workspace = true 7 | license.workspace = true 8 | name = "postgresql_commands" 9 | repository = "https://github.com/theseus-rs/postgresql-embedded" 10 | rust-version.workspace = true 11 | version.workspace = true 12 | 13 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 14 | 15 | [dependencies] 16 | thiserror = { workspace = true } 17 | tokio = { workspace = true, features = ["full"], optional = true } 18 | tracing = { workspace = true, features = ["log"] } 19 | 20 | [dev-dependencies] 21 | test-log = { workspace = true } 22 | tokio = { workspace = true, features = ["full"] } 23 | 24 | [features] 25 | default = [] 26 | tokio = ["dep:tokio"] 27 | -------------------------------------------------------------------------------- /postgresql_commands/README.md: -------------------------------------------------------------------------------- 1 | # PostgreSQL Commands 2 | 3 | [![ci](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml) 4 | [![Documentation](https://docs.rs/postgresql_commands/badge.svg)](https://docs.rs/postgresql_commands) 5 | [![Code Coverage](https://codecov.io/gh/theseus-rs/postgresql-embedded/branch/main/graph/badge.svg)](https://codecov.io/gh/theseus-rs/postgresql-embedded) 6 | [![Latest version](https://img.shields.io/crates/v/postgresql_commands.svg)](https://crates.io/crates/postgresql_commands) 7 | [![License](https://img.shields.io/crates/l/postgresql_commands?)](https://github.com/theseus-rs/postgresql-embedded/tree/main/postgresql_commands#license) 8 | [![Semantic Versioning](https://img.shields.io/badge/%E2%9A%99%EF%B8%8F_SemVer-2.0.0-blue)](https://semver.org/spec/v2.0.0.html) 9 | 10 | A library for executing PostgreSQL command line utilities. 11 | 12 | ## Examples 13 | 14 | ```rust 15 | use postgresql_commands::Result; 16 | use postgresql_commands::psql::PsqlBuilder; 17 | 18 | fn main() -> Result<()> { 19 | let psql = PsqlBuilder::new() 20 | .command("CREATE DATABASE \"test\"") 21 | .host("127.0.0.1") 22 | .port(5432) 23 | .username("postgresql") 24 | .pg_password("password") 25 | .build(); 26 | 27 | let (stdout, stderr) = psql.execute()?; 28 | Ok(()) 29 | } 30 | ``` 31 | 32 | ## Feature flags 33 | 34 | The following features are available: 35 | 36 | | Name | Description | Default? | 37 | |---------|-----------------------------------|----------| 38 | | `tokio` | Enables the use of tokio commands | No | 39 | 40 | ## Safety 41 | 42 | This crate uses `#![forbid(unsafe_code)]` to ensure everything is implemented in 100% safe Rust. 43 | 44 | ## License 45 | 46 | Licensed under either of 47 | 48 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0) 49 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT) 50 | 51 | at your option. 52 | 53 | ## Contribution 54 | 55 | Unless you explicitly state otherwise, any contribution intentionally submitted 56 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any 57 | additional terms or conditions. 58 | -------------------------------------------------------------------------------- /postgresql_commands/src/error.rs: -------------------------------------------------------------------------------- 1 | /// `PostgreSQL` command result type 2 | pub type Result = core::result::Result; 3 | 4 | /// `PostgreSQL` command errors 5 | #[derive(Debug, thiserror::Error)] 6 | pub enum Error { 7 | /// Error when a command fails 8 | #[error("Command error: stdout={stdout}; stderr={stderr}")] 9 | CommandError { stdout: String, stderr: String }, 10 | /// Error when IO operations fail 11 | #[error("{0}")] 12 | IoError(String), 13 | /// Error when a command fails to execute before the timeout is reached 14 | #[error("{0}")] 15 | TimeoutError(String), 16 | } 17 | 18 | /// Convert [standard IO errors](std::io::Error) to a [embedded errors](Error::IoError) 19 | impl From for Error { 20 | fn from(error: std::io::Error) -> Self { 21 | Error::IoError(error.to_string()) 22 | } 23 | } 24 | 25 | #[cfg(feature = "tokio")] 26 | /// Convert [elapsed time errors](tokio::time::error::Elapsed) to [embedded errors](Error::TimeoutError) 27 | impl From for Error { 28 | fn from(error: tokio::time::error::Elapsed) -> Self { 29 | Error::TimeoutError(error.to_string()) 30 | } 31 | } 32 | 33 | /// These are relatively low value tests; they are here to reduce the coverage gap and 34 | /// ensure that the error conversions are working as expected. 35 | #[cfg(test)] 36 | mod test { 37 | use super::*; 38 | 39 | #[test] 40 | fn test_from_io_error() { 41 | let io_error = std::io::Error::other("test"); 42 | let error = Error::from(io_error); 43 | assert_eq!(error.to_string(), "test"); 44 | } 45 | 46 | #[cfg(feature = "tokio")] 47 | #[tokio::test] 48 | async fn test_from_elapsed_error() { 49 | let result = tokio::time::timeout(std::time::Duration::from_nanos(1), async { 50 | tokio::time::sleep(std::time::Duration::from_secs(1)).await; 51 | }) 52 | .await; 53 | assert!(result.is_err()); 54 | if let Err(elapsed_error) = result { 55 | let error = Error::from(elapsed_error); 56 | assert_eq!(error.to_string(), "deadline has elapsed"); 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /postgresql_commands/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![forbid(clippy::allow_attributes)] 3 | #![deny(clippy::pedantic)] 4 | #![deny(clippy::unwrap_used)] 5 | #![allow(async_fn_in_trait)] 6 | #![allow(clippy::module_name_repetitions)] 7 | #![allow(clippy::struct_excessive_bools)] 8 | 9 | //! Command builders for interacting with `PostgreSQL` via CLI. 10 | //! 11 | //! The commands are implemented as builders, which can be used to construct a 12 | //! [standard Command](std::process::Command) or [tokio Command](tokio::process::Command). 13 | 14 | pub mod clusterdb; 15 | pub mod createdb; 16 | pub mod createuser; 17 | pub mod dropdb; 18 | pub mod dropuser; 19 | pub mod ecpg; 20 | pub mod error; 21 | pub mod initdb; 22 | pub mod oid2name; 23 | pub mod pg_amcheck; 24 | pub mod pg_archivecleanup; 25 | pub mod pg_basebackup; 26 | pub mod pg_checksums; 27 | pub mod pg_config; 28 | pub mod pg_controldata; 29 | pub mod pg_ctl; 30 | pub mod pg_dump; 31 | pub mod pg_dumpall; 32 | pub mod pg_isready; 33 | pub mod pg_receivewal; 34 | pub mod pg_recvlogical; 35 | pub mod pg_resetwal; 36 | pub mod pg_restore; 37 | pub mod pg_rewind; 38 | pub mod pg_test_fsync; 39 | pub mod pg_test_timing; 40 | pub mod pg_upgrade; 41 | pub mod pg_verifybackup; 42 | pub mod pg_waldump; 43 | pub mod pgbench; 44 | pub mod postgres; 45 | pub mod psql; 46 | pub mod reindexdb; 47 | pub mod traits; 48 | pub mod vacuumdb; 49 | pub mod vacuumlo; 50 | 51 | pub use error::{Error, Result}; 52 | #[cfg(test)] 53 | pub use traits::TestSettings; 54 | pub use traits::{AsyncCommandExecutor, CommandBuilder, CommandExecutor, Settings}; 55 | -------------------------------------------------------------------------------- /postgresql_commands/src/pg_controldata.rs: -------------------------------------------------------------------------------- 1 | use crate::Settings; 2 | use crate::traits::CommandBuilder; 3 | use std::ffi::{OsStr, OsString}; 4 | use std::path::PathBuf; 5 | 6 | /// `pg_controldata` displays control information of a `PostgreSQL` database cluster. 7 | #[derive(Clone, Debug, Default)] 8 | pub struct PgControlDataBuilder { 9 | program_dir: Option, 10 | envs: Vec<(OsString, OsString)>, 11 | pgdata: Option, 12 | version: bool, 13 | help: bool, 14 | } 15 | 16 | impl PgControlDataBuilder { 17 | /// Create a new [`PgControlDataBuilder`] 18 | #[must_use] 19 | pub fn new() -> Self { 20 | Self::default() 21 | } 22 | 23 | /// Create a new [`PgControlDataBuilder`] from [Settings] 24 | pub fn from(settings: &dyn Settings) -> Self { 25 | Self::new().program_dir(settings.get_binary_dir()) 26 | } 27 | 28 | /// Location of the program binary 29 | #[must_use] 30 | pub fn program_dir>(mut self, path: P) -> Self { 31 | self.program_dir = Some(path.into()); 32 | self 33 | } 34 | 35 | /// Set the data directory 36 | #[must_use] 37 | pub fn pgdata>(mut self, pgdata: P) -> Self { 38 | self.pgdata = Some(pgdata.into()); 39 | self 40 | } 41 | 42 | /// output version information, then exit 43 | #[must_use] 44 | pub fn version(mut self) -> Self { 45 | self.version = true; 46 | self 47 | } 48 | 49 | /// show help, then exit 50 | #[must_use] 51 | pub fn help(mut self) -> Self { 52 | self.help = true; 53 | self 54 | } 55 | } 56 | 57 | impl CommandBuilder for PgControlDataBuilder { 58 | /// Get the program name 59 | fn get_program(&self) -> &'static OsStr { 60 | "pg_controldata".as_ref() 61 | } 62 | 63 | /// Location of the program binary 64 | fn get_program_dir(&self) -> &Option { 65 | &self.program_dir 66 | } 67 | 68 | /// Get the arguments for the command 69 | fn get_args(&self) -> Vec { 70 | let mut args: Vec = Vec::new(); 71 | 72 | if let Some(pgdata) = &self.pgdata { 73 | args.push("--pgdata".into()); 74 | args.push(pgdata.into()); 75 | } 76 | 77 | if self.version { 78 | args.push("--version".into()); 79 | } 80 | 81 | if self.help { 82 | args.push("--help".into()); 83 | } 84 | 85 | args 86 | } 87 | 88 | /// Get the environment variables for the command 89 | fn get_envs(&self) -> Vec<(OsString, OsString)> { 90 | self.envs.clone() 91 | } 92 | 93 | /// Set an environment variable for the command 94 | fn env>(mut self, key: S, value: S) -> Self { 95 | self.envs 96 | .push((key.as_ref().to_os_string(), value.as_ref().to_os_string())); 97 | self 98 | } 99 | } 100 | 101 | #[cfg(test)] 102 | mod tests { 103 | use super::*; 104 | use crate::TestSettings; 105 | use crate::traits::CommandToString; 106 | use test_log::test; 107 | 108 | #[test] 109 | fn test_builder_new() { 110 | let command = PgControlDataBuilder::new().program_dir(".").build(); 111 | assert_eq!( 112 | PathBuf::from(".").join("pg_controldata"), 113 | PathBuf::from(command.to_command_string().replace('"', "")) 114 | ); 115 | } 116 | 117 | #[test] 118 | fn test_builder_from() { 119 | let command = PgControlDataBuilder::from(&TestSettings).build(); 120 | #[cfg(not(target_os = "windows"))] 121 | let command_prefix = r#""./pg_controldata""#; 122 | #[cfg(target_os = "windows")] 123 | let command_prefix = r#"".\\pg_controldata""#; 124 | 125 | assert_eq!(format!("{command_prefix}"), command.to_command_string()); 126 | } 127 | 128 | #[test] 129 | fn test_builder() { 130 | let command = PgControlDataBuilder::new() 131 | .env("PGDATABASE", "database") 132 | .pgdata("pgdata") 133 | .version() 134 | .help() 135 | .build(); 136 | #[cfg(not(target_os = "windows"))] 137 | let command_prefix = r#"PGDATABASE="database" "#; 138 | #[cfg(target_os = "windows")] 139 | let command_prefix = String::new(); 140 | 141 | assert_eq!( 142 | format!(r#"{command_prefix}"pg_controldata" "--pgdata" "pgdata" "--version" "--help""#), 143 | command.to_command_string() 144 | ); 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /postgresql_commands/src/pg_test_fsync.rs: -------------------------------------------------------------------------------- 1 | use crate::Settings; 2 | use crate::traits::CommandBuilder; 3 | use std::convert::AsRef; 4 | use std::ffi::{OsStr, OsString}; 5 | use std::path::PathBuf; 6 | 7 | /// `pg_test_fsync` command to determine fastest `wal_sync_method` for `PostgreSQL` 8 | #[derive(Clone, Debug, Default)] 9 | pub struct PgTestFsyncBuilder { 10 | program_dir: Option, 11 | envs: Vec<(OsString, OsString)>, 12 | filename: Option, 13 | secs_per_test: Option, 14 | } 15 | 16 | impl PgTestFsyncBuilder { 17 | /// Create a new [`PgTestFsyncBuilder`] 18 | #[must_use] 19 | pub fn new() -> Self { 20 | Self::default() 21 | } 22 | 23 | /// Create a new [`PgTestFsyncBuilder`] from [Settings] 24 | pub fn from(settings: &dyn Settings) -> Self { 25 | Self::new().program_dir(settings.get_binary_dir()) 26 | } 27 | 28 | /// Location of the program binary 29 | #[must_use] 30 | pub fn program_dir>(mut self, path: P) -> Self { 31 | self.program_dir = Some(path.into()); 32 | self 33 | } 34 | 35 | /// Set the filename 36 | #[must_use] 37 | pub fn filename>(mut self, filename: S) -> Self { 38 | self.filename = Some(filename.as_ref().to_os_string()); 39 | self 40 | } 41 | 42 | /// Set the seconds per test 43 | #[must_use] 44 | pub fn secs_per_test(mut self, secs: usize) -> Self { 45 | self.secs_per_test = Some(secs); 46 | self 47 | } 48 | } 49 | 50 | impl CommandBuilder for PgTestFsyncBuilder { 51 | /// Get the program name 52 | fn get_program(&self) -> &'static OsStr { 53 | "pg_test_fsync".as_ref() 54 | } 55 | 56 | /// Location of the program binary 57 | fn get_program_dir(&self) -> &Option { 58 | &self.program_dir 59 | } 60 | 61 | /// Get the arguments for the command 62 | fn get_args(&self) -> Vec { 63 | let mut args: Vec = Vec::new(); 64 | 65 | if let Some(filename) = &self.filename { 66 | args.push("-f".into()); 67 | args.push(filename.into()); 68 | } 69 | 70 | if let Some(secs) = &self.secs_per_test { 71 | args.push("-s".into()); 72 | args.push(secs.to_string().into()); 73 | } 74 | 75 | args 76 | } 77 | 78 | /// Get the environment variables for the command 79 | fn get_envs(&self) -> Vec<(OsString, OsString)> { 80 | self.envs.clone() 81 | } 82 | 83 | /// Set an environment variable for the command 84 | fn env>(mut self, key: S, value: S) -> Self { 85 | self.envs 86 | .push((key.as_ref().to_os_string(), value.as_ref().to_os_string())); 87 | self 88 | } 89 | } 90 | 91 | #[cfg(test)] 92 | mod tests { 93 | use super::*; 94 | use crate::TestSettings; 95 | use crate::traits::CommandToString; 96 | use test_log::test; 97 | 98 | #[test] 99 | fn test_builder_new() { 100 | let command = PgTestFsyncBuilder::new().program_dir(".").build(); 101 | assert_eq!( 102 | PathBuf::from(".").join("pg_test_fsync"), 103 | PathBuf::from(command.to_command_string().replace('"', "")) 104 | ); 105 | } 106 | 107 | #[test] 108 | fn test_builder_from() { 109 | let command = PgTestFsyncBuilder::from(&TestSettings).build(); 110 | #[cfg(not(target_os = "windows"))] 111 | let command_prefix = r#""./pg_test_fsync""#; 112 | #[cfg(target_os = "windows")] 113 | let command_prefix = r#"".\\pg_test_fsync""#; 114 | 115 | assert_eq!(format!("{command_prefix}"), command.to_command_string()); 116 | } 117 | 118 | #[test] 119 | fn test_builder() { 120 | let command = PgTestFsyncBuilder::new() 121 | .env("PGDATABASE", "database") 122 | .filename("filename") 123 | .secs_per_test(10) 124 | .build(); 125 | #[cfg(not(target_os = "windows"))] 126 | let command_prefix = r#"PGDATABASE="database" "#; 127 | #[cfg(target_os = "windows")] 128 | let command_prefix = String::new(); 129 | 130 | assert_eq!( 131 | format!(r#"{command_prefix}"pg_test_fsync" "-f" "filename" "-s" "10""#), 132 | command.to_command_string() 133 | ); 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /postgresql_commands/src/pg_test_timing.rs: -------------------------------------------------------------------------------- 1 | use crate::Settings; 2 | use crate::traits::CommandBuilder; 3 | use std::convert::AsRef; 4 | use std::ffi::{OsStr, OsString}; 5 | use std::path::PathBuf; 6 | 7 | /// `pg_test_timing` tests the timing of a `PostgreSQL` instance. 8 | #[derive(Clone, Debug, Default)] 9 | pub struct PgTestTimingBuilder { 10 | program_dir: Option, 11 | envs: Vec<(OsString, OsString)>, 12 | duration: Option, 13 | } 14 | 15 | impl PgTestTimingBuilder { 16 | /// Create a new [`PgTestTimingBuilder`] 17 | #[must_use] 18 | pub fn new() -> Self { 19 | Self::default() 20 | } 21 | 22 | /// Create a new [`PgTestTimingBuilder`] from [Settings] 23 | pub fn from(settings: &dyn Settings) -> Self { 24 | Self::new().program_dir(settings.get_binary_dir()) 25 | } 26 | 27 | /// Location of the program binary 28 | #[must_use] 29 | pub fn program_dir>(mut self, path: P) -> Self { 30 | self.program_dir = Some(path.into()); 31 | self 32 | } 33 | 34 | /// set the duration for the test 35 | #[must_use] 36 | pub fn duration>(mut self, duration: S) -> Self { 37 | self.duration = Some(duration.as_ref().to_os_string()); 38 | self 39 | } 40 | } 41 | 42 | impl CommandBuilder for PgTestTimingBuilder { 43 | /// Get the program name 44 | fn get_program(&self) -> &'static OsStr { 45 | "pg_test_timing".as_ref() 46 | } 47 | 48 | /// Location of the program binary 49 | fn get_program_dir(&self) -> &Option { 50 | &self.program_dir 51 | } 52 | 53 | /// Get the arguments for the command 54 | fn get_args(&self) -> Vec { 55 | let mut args: Vec = Vec::new(); 56 | 57 | if let Some(duration) = &self.duration { 58 | args.push("-d".into()); 59 | args.push(duration.into()); 60 | } 61 | 62 | args 63 | } 64 | 65 | /// Get the environment variables for the command 66 | fn get_envs(&self) -> Vec<(OsString, OsString)> { 67 | self.envs.clone() 68 | } 69 | 70 | /// Set an environment variable for the command 71 | fn env>(mut self, key: S, value: S) -> Self { 72 | self.envs 73 | .push((key.as_ref().to_os_string(), value.as_ref().to_os_string())); 74 | self 75 | } 76 | } 77 | 78 | #[cfg(test)] 79 | mod tests { 80 | use super::*; 81 | use crate::TestSettings; 82 | use crate::traits::CommandToString; 83 | use test_log::test; 84 | 85 | #[test] 86 | fn test_builder_new() { 87 | let command = PgTestTimingBuilder::new().program_dir(".").build(); 88 | assert_eq!( 89 | PathBuf::from(".").join("pg_test_timing"), 90 | PathBuf::from(command.to_command_string().replace('"', "")) 91 | ); 92 | } 93 | 94 | #[test] 95 | fn test_builder_from() { 96 | let command = PgTestTimingBuilder::from(&TestSettings).build(); 97 | #[cfg(not(target_os = "windows"))] 98 | let command_prefix = r#""./pg_test_timing""#; 99 | #[cfg(target_os = "windows")] 100 | let command_prefix = r#"".\\pg_test_timing""#; 101 | 102 | assert_eq!(format!("{command_prefix}"), command.to_command_string()); 103 | } 104 | 105 | #[test] 106 | fn test_builder() { 107 | let command = PgTestTimingBuilder::new() 108 | .env("PGDATABASE", "database") 109 | .duration("10") 110 | .build(); 111 | #[cfg(not(target_os = "windows"))] 112 | let command_prefix = r#"PGDATABASE="database" "#; 113 | #[cfg(target_os = "windows")] 114 | let command_prefix = String::new(); 115 | 116 | assert_eq!( 117 | format!(r#"{command_prefix}"pg_test_timing" "-d" "10""#), 118 | command.to_command_string() 119 | ); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /postgresql_embedded/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors.workspace = true 3 | build = "build/build.rs" 4 | categories.workspace = true 5 | description = "Install and run a PostgreSQL database locally on Linux, MacOS or Windows. PostgreSQL can be bundled with your application, or downloaded on demand." 6 | edition.workspace = true 7 | keywords.workspace = true 8 | license.workspace = true 9 | name = "postgresql_embedded" 10 | repository = "https://github.com/theseus-rs/postgresql-embedded" 11 | rust-version.workspace = true 12 | version.workspace = true 13 | 14 | [build-dependencies] 15 | anyhow = { workspace = true } 16 | postgresql_archive = { path = "../postgresql_archive", version = "0.18.5", default-features = false } 17 | target-triple = { workspace = true } 18 | tokio = { workspace = true, features = ["full"] } 19 | url = { workspace = true } 20 | 21 | [dependencies] 22 | postgresql_archive = { path = "../postgresql_archive", version = "0.18.5", default-features = false } 23 | postgresql_commands = { path = "../postgresql_commands", version = "0.18.5" } 24 | rand = { workspace = true } 25 | semver = { workspace = true } 26 | sqlx = { workspace = true, features = ["runtime-tokio"] } 27 | tempfile = { workspace = true } 28 | thiserror = { workspace = true } 29 | tokio = { workspace = true, features = ["full"], optional = true } 30 | tracing = { workspace = true, features = ["log"] } 31 | url = { workspace = true } 32 | 33 | [dev-dependencies] 34 | anyhow = { workspace = true } 35 | criterion = { workspace = true } 36 | test-log = { workspace = true } 37 | tokio = { workspace = true, features = ["full"] } 38 | 39 | [features] 40 | default = [ 41 | "native-tls", 42 | "theseus", 43 | ] 44 | blocking = ["tokio"] 45 | bundled = ["postgresql_archive/github"] 46 | indicatif = [ 47 | "postgresql_archive/indicatif", 48 | ] 49 | native-tls = [ 50 | "postgresql_archive/native-tls", 51 | "sqlx/tls-native-tls", 52 | ] 53 | rustls = [ 54 | "postgresql_archive/rustls", 55 | "sqlx/tls-rustls", 56 | ] 57 | theseus = [ 58 | "postgresql_archive/theseus", 59 | ] 60 | tokio = [ 61 | "dep:tokio", 62 | "postgresql_commands/tokio", 63 | "sqlx/runtime-tokio", 64 | ] 65 | zonky = [ 66 | "postgresql_archive/zonky", 67 | ] 68 | 69 | [package.metadata.release] 70 | dependent-version = "upgrade" 71 | 72 | [package.metadata.docs.rs] 73 | no-default-features = true 74 | features = ["blocking", "theseus", "tokio"] 75 | targets = ["x86_64-unknown-linux-gnu"] 76 | 77 | [[bench]] 78 | harness = false 79 | name = "embedded" 80 | -------------------------------------------------------------------------------- /postgresql_embedded/benches/embedded.rs: -------------------------------------------------------------------------------- 1 | use criterion::{Criterion, criterion_group, criterion_main}; 2 | use postgresql_embedded::Result; 3 | use postgresql_embedded::blocking::PostgreSQL; 4 | use std::time::Duration; 5 | 6 | fn benchmarks(criterion: &mut Criterion) { 7 | bench_lifecycle(criterion).ok(); 8 | } 9 | 10 | fn bench_lifecycle(criterion: &mut Criterion) -> Result<()> { 11 | criterion.bench_function("lifecycle", |bencher| { 12 | bencher.iter(|| { 13 | lifecycle().ok(); 14 | }); 15 | }); 16 | 17 | Ok(()) 18 | } 19 | 20 | fn lifecycle() -> Result<()> { 21 | let mut postgresql = PostgreSQL::default(); 22 | postgresql.setup()?; 23 | postgresql.start()?; 24 | postgresql.stop() 25 | } 26 | 27 | criterion_group!( 28 | name = benches; 29 | config = Criterion::default() 30 | .measurement_time(Duration::from_secs(30)) 31 | .sample_size(10); 32 | targets = benchmarks 33 | ); 34 | criterion_main!(benches); 35 | -------------------------------------------------------------------------------- /postgresql_embedded/build/build.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "bundled")] 2 | mod bundle; 3 | 4 | use anyhow::Result; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<()> { 8 | #[cfg(feature = "bundled")] 9 | bundle::stage_postgresql_archive().await?; 10 | Ok(()) 11 | } 12 | -------------------------------------------------------------------------------- /postgresql_embedded/build/bundle.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | 3 | use anyhow::Result; 4 | use postgresql_archive::configuration::{custom, theseus}; 5 | use postgresql_archive::repository::github::repository::GitHub; 6 | use postgresql_archive::{VersionReq, matcher}; 7 | use postgresql_archive::{get_archive, repository}; 8 | use std::fs::File; 9 | use std::io::Write; 10 | use std::path::PathBuf; 11 | use std::str::FromStr; 12 | use std::{env, fs}; 13 | use url::Url; 14 | 15 | /// Stage the PostgreSQL archive when the `bundled` feature is enabled so that 16 | /// it can be included in the final binary. This is useful for creating a 17 | /// self-contained binary that does not require the PostgreSQL archive to be 18 | /// downloaded at runtime. 19 | pub(crate) async fn stage_postgresql_archive() -> Result<()> { 20 | #[cfg(feature = "theseus")] 21 | let default_releases_url = postgresql_archive::configuration::theseus::URL.to_string(); 22 | #[cfg(not(feature = "theseus"))] 23 | let default_releases_url = String::new(); 24 | 25 | let releases_url = match env::var("POSTGRESQL_RELEASES_URL") { 26 | Ok(custom_url) if !default_releases_url.is_empty() => { 27 | register_custom_repository()?; 28 | custom_url 29 | } 30 | _ => { 31 | register_theseus_repository()?; 32 | default_releases_url 33 | } 34 | }; 35 | println!("PostgreSQL releases URL: {releases_url}"); 36 | let postgres_version_req = env::var("POSTGRESQL_VERSION").unwrap_or("*".to_string()); 37 | let version_req = VersionReq::from_str(postgres_version_req.as_str())?; 38 | println!("PostgreSQL version: {postgres_version_req}"); 39 | println!("Target: {}", target_triple::TARGET); 40 | 41 | let out_dir = PathBuf::from(env::var("OUT_DIR")?); 42 | println!("OUT_DIR: {:?}", out_dir); 43 | 44 | let mut archive_version_file = out_dir.clone(); 45 | archive_version_file.push("postgresql.version"); 46 | let mut archive_file = out_dir.clone(); 47 | archive_file.push("postgresql.tar.gz"); 48 | 49 | if archive_version_file.exists() && archive_file.exists() { 50 | println!("PostgreSQL archive exists: {:?}", archive_file); 51 | return Ok(()); 52 | } 53 | 54 | let (asset_version, archive) = get_archive(&releases_url, &version_req).await?; 55 | 56 | fs::write(archive_version_file.clone(), asset_version.to_string())?; 57 | let mut file = File::create(archive_file.clone())?; 58 | file.write_all(&archive)?; 59 | file.sync_data()?; 60 | println!("PostgreSQL archive written to: {:?}", archive_file); 61 | 62 | Ok(()) 63 | } 64 | 65 | fn supports_github_url(url: &str) -> postgresql_archive::Result { 66 | let parsed_url = Url::parse(url)?; 67 | let host = parsed_url.host_str().unwrap_or_default(); 68 | Ok(host.ends_with("github.com")) 69 | } 70 | 71 | fn register_custom_repository() -> Result<()> { 72 | repository::registry::register(supports_github_url, Box::new(GitHub::new))?; 73 | matcher::registry::register(supports_github_url, custom::matcher)?; 74 | Ok(()) 75 | } 76 | 77 | fn register_theseus_repository() -> Result<()> { 78 | repository::registry::register(supports_github_url, Box::new(GitHub::new))?; 79 | matcher::registry::register(supports_github_url, theseus::matcher)?; 80 | Ok(()) 81 | } 82 | -------------------------------------------------------------------------------- /postgresql_embedded/src/blocking/mod.rs: -------------------------------------------------------------------------------- 1 | mod postgresql; 2 | 3 | pub use postgresql::PostgreSQL; 4 | -------------------------------------------------------------------------------- /postgresql_embedded/src/blocking/postgresql.rs: -------------------------------------------------------------------------------- 1 | use crate::{Result, Settings, Status}; 2 | use std::sync::LazyLock; 3 | use tokio::runtime::Runtime; 4 | 5 | static RUNTIME: LazyLock = LazyLock::new(|| Runtime::new().unwrap()); 6 | 7 | /// `PostgreSQL` server 8 | #[derive(Clone, Debug, Default)] 9 | pub struct PostgreSQL { 10 | inner: crate::postgresql::PostgreSQL, 11 | } 12 | 13 | /// `PostgreSQL` server methods 14 | impl PostgreSQL { 15 | /// Create a new [`crate::postgresql::PostgreSQL`] instance 16 | #[must_use] 17 | pub fn new(settings: Settings) -> Self { 18 | Self { 19 | inner: crate::postgresql::PostgreSQL::new(settings), 20 | } 21 | } 22 | 23 | /// Get the [status](Status) of the `PostgreSQL` server 24 | #[must_use] 25 | pub fn status(&self) -> Status { 26 | self.inner.status() 27 | } 28 | 29 | /// Get the [settings](Settings) of the `PostgreSQL` server 30 | #[must_use] 31 | pub fn settings(&self) -> &Settings { 32 | self.inner.settings() 33 | } 34 | 35 | /// Set up the database by extracting the archive and initializing the database. 36 | /// If the installation directory already exists, the archive will not be extracted. 37 | /// If the data directory already exists, the database will not be initialized. 38 | /// 39 | /// # Errors 40 | /// 41 | /// Returns an error if the setup fails. 42 | pub fn setup(&mut self) -> Result<()> { 43 | RUNTIME 44 | .handle() 45 | .block_on(async move { self.inner.setup().await }) 46 | } 47 | 48 | /// Start the database and wait for the startup to complete. 49 | /// If the port is set to `0`, the database will be started on a random port. 50 | /// 51 | /// # Errors 52 | /// 53 | /// Returns an error if the startup fails. 54 | pub fn start(&mut self) -> Result<()> { 55 | RUNTIME 56 | .handle() 57 | .block_on(async move { self.inner.start().await }) 58 | } 59 | 60 | /// Stop the database gracefully (smart mode) and wait for the shutdown to complete. 61 | /// 62 | /// # Errors 63 | /// 64 | /// Returns an error if the shutdown fails. 65 | pub fn stop(&self) -> Result<()> { 66 | RUNTIME 67 | .handle() 68 | .block_on(async move { self.inner.stop().await }) 69 | } 70 | 71 | /// Create a new database with the given name. 72 | /// 73 | /// # Errors 74 | /// 75 | /// Returns an error if the database creation fails. 76 | pub fn create_database(&self, database_name: S) -> Result<()> 77 | where 78 | S: AsRef + std::fmt::Debug, 79 | { 80 | RUNTIME 81 | .handle() 82 | .block_on(async move { self.inner.create_database(database_name).await }) 83 | } 84 | 85 | /// Check if a database with the given name exists. 86 | /// 87 | /// # Errors 88 | /// 89 | /// Returns an error if the database existence check fails. 90 | pub fn database_exists(&self, database_name: S) -> Result 91 | where 92 | S: AsRef + std::fmt::Debug, 93 | { 94 | RUNTIME 95 | .handle() 96 | .block_on(async move { self.inner.database_exists(database_name).await }) 97 | } 98 | 99 | /// Drop a database with the given name. 100 | /// 101 | /// # Errors 102 | /// 103 | /// Returns an error if the database drop fails. 104 | pub fn drop_database(&self, database_name: S) -> Result<()> 105 | where 106 | S: AsRef + std::fmt::Debug, 107 | { 108 | RUNTIME 109 | .handle() 110 | .block_on(async move { self.inner.drop_database(database_name).await }) 111 | } 112 | } 113 | 114 | #[cfg(test)] 115 | mod test { 116 | use super::*; 117 | use crate::VersionReq; 118 | 119 | #[test] 120 | fn test_postgresql() -> Result<()> { 121 | let version = VersionReq::parse("=16.4.0")?; 122 | let settings = Settings { 123 | version, 124 | ..Settings::default() 125 | }; 126 | let postgresql = PostgreSQL::new(settings); 127 | let initial_statuses = [Status::NotInstalled, Status::Installed, Status::Stopped]; 128 | assert!(initial_statuses.contains(&postgresql.status())); 129 | Ok(()) 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /postgresql_embedded/src/error.rs: -------------------------------------------------------------------------------- 1 | use std::string::FromUtf8Error; 2 | 3 | /// `PostgreSQL` embedded result type 4 | pub type Result = core::result::Result; 5 | 6 | /// Errors that can occur when using `PostgreSQL` embedded 7 | #[derive(Debug, thiserror::Error)] 8 | pub enum Error { 9 | /// Error when `PostgreSQL` archive operations fail 10 | #[error(transparent)] 11 | ArchiveError(postgresql_archive::Error), 12 | /// Error when a command fails 13 | #[error("Command error: stdout={stdout}; stderr={stderr}")] 14 | CommandError { stdout: String, stderr: String }, 15 | /// Error when the database could not be created 16 | #[error("{0}")] 17 | CreateDatabaseError(String), 18 | /// Error when accessing the database 19 | #[error(transparent)] 20 | DatabaseError(#[from] sqlx::Error), 21 | /// Error when determining if the database exists 22 | #[error("{0}")] 23 | DatabaseExistsError(String), 24 | /// Error when the database could not be initialized 25 | #[error("{0}")] 26 | DatabaseInitializationError(String), 27 | /// Error when the database could not be started 28 | #[error("{0}")] 29 | DatabaseStartError(String), 30 | /// Error when the database could not be stopped 31 | #[error("{0}")] 32 | DatabaseStopError(String), 33 | /// Error when the database could not be dropped 34 | #[error("{0}")] 35 | DropDatabaseError(String), 36 | /// Error when an invalid URL is provided 37 | #[error("Invalid URL: {url}; {message}")] 38 | InvalidUrl { url: String, message: String }, 39 | /// Error when IO operations fail 40 | #[error("{0}")] 41 | IoError(String), 42 | /// Parse error 43 | #[error(transparent)] 44 | ParseError(#[from] semver::Error), 45 | } 46 | 47 | /// Convert `PostgreSQL` [archive errors](postgresql_archive::Error) to an [embedded errors](Error::ArchiveError) 48 | impl From for Error { 49 | fn from(error: postgresql_archive::Error) -> Self { 50 | Error::ArchiveError(error) 51 | } 52 | } 53 | 54 | /// Convert [standard IO errors](std::io::Error) to a [embedded errors](Error::IoError) 55 | impl From for Error { 56 | fn from(error: std::io::Error) -> Self { 57 | Error::IoError(error.to_string()) 58 | } 59 | } 60 | 61 | /// Convert [utf8 errors](FromUtf8Error) to [embedded errors](Error::IoError) 62 | impl From for Error { 63 | fn from(error: FromUtf8Error) -> Self { 64 | Error::IoError(error.to_string()) 65 | } 66 | } 67 | 68 | /// These are relatively low value tests; they are here to reduce the coverage gap and 69 | /// ensure that the error conversions are working as expected. 70 | #[cfg(test)] 71 | mod test { 72 | use super::*; 73 | 74 | #[test] 75 | fn test_from_archive_error() { 76 | let archive_error = postgresql_archive::Error::VersionNotFound("test".to_string()); 77 | let error = Error::from(archive_error); 78 | assert_eq!(error.to_string(), "version not found for 'test'"); 79 | } 80 | 81 | #[test] 82 | fn test_from_io_error() { 83 | let io_error = std::io::Error::other("test"); 84 | let error = Error::from(io_error); 85 | assert_eq!(error.to_string(), "test"); 86 | } 87 | 88 | #[test] 89 | fn test_from_utf8_error() { 90 | let invalid_utf8: Vec = vec![0, 159, 146, 150]; 91 | let from_utf8_error = String::from_utf8(invalid_utf8).expect_err("from utf8 error"); 92 | let error = Error::from(from_utf8_error); 93 | assert_eq!( 94 | error.to_string(), 95 | "invalid utf-8 sequence of 1 bytes from index 1" 96 | ); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /postgresql_embedded/tests/blocking.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "blocking")] 2 | use postgresql_embedded::blocking::PostgreSQL; 3 | #[cfg(feature = "blocking")] 4 | use postgresql_embedded::{Result, Status}; 5 | #[cfg(feature = "blocking")] 6 | use test_log::test; 7 | 8 | #[cfg(feature = "blocking")] 9 | #[test] 10 | fn test_lifecycle() -> Result<()> { 11 | let mut postgresql = PostgreSQL::default(); 12 | let settings = postgresql.settings(); 13 | 14 | // Verify that an ephemeral instance is created by default 15 | assert_eq!(0, settings.port); 16 | assert!(settings.temporary); 17 | 18 | let initial_statuses = [Status::NotInstalled, Status::Installed, Status::Stopped]; 19 | assert!(initial_statuses.contains(&postgresql.status())); 20 | 21 | postgresql.setup()?; 22 | assert_eq!(Status::Stopped, postgresql.status()); 23 | 24 | postgresql.start()?; 25 | assert_eq!(Status::Started, postgresql.status()); 26 | 27 | let database_name = "test"; 28 | assert!(!postgresql.database_exists(database_name)?); 29 | postgresql.create_database(database_name)?; 30 | assert!(postgresql.database_exists(database_name)?); 31 | postgresql.drop_database(database_name)?; 32 | 33 | postgresql.stop()?; 34 | assert_eq!(Status::Stopped, postgresql.status()); 35 | 36 | Ok(()) 37 | } 38 | -------------------------------------------------------------------------------- /postgresql_embedded/tests/dump_command.rs: -------------------------------------------------------------------------------- 1 | use postgresql_commands::pg_dump::PgDumpBuilder; 2 | use postgresql_commands::psql::PsqlBuilder; 3 | use postgresql_commands::{CommandBuilder, CommandExecutor}; 4 | use postgresql_embedded::PostgreSQL; 5 | use std::fs; 6 | use tempfile::NamedTempFile; 7 | use test_log::test; 8 | 9 | #[test(tokio::test)] 10 | async fn dump_command() -> anyhow::Result<()> { 11 | let mut postgresql = PostgreSQL::default(); 12 | 13 | postgresql.setup().await?; 14 | postgresql.start().await?; 15 | let settings = postgresql.settings(); 16 | 17 | let database_name = "test"; 18 | postgresql.create_database(database_name).await?; 19 | 20 | let mut psql = PsqlBuilder::from(settings) 21 | .command("CREATE TABLE person42 (id INTEGER, name VARCHAR(20))") 22 | .dbname(database_name) 23 | .no_psqlrc() 24 | .no_align() 25 | .tuples_only() 26 | .build(); 27 | let (_stdout, _stderr) = psql.execute()?; 28 | 29 | let temp_file = NamedTempFile::new()?; 30 | let file = temp_file.as_ref(); 31 | let mut pgdump = PgDumpBuilder::from(settings) 32 | .dbname(database_name) 33 | .schema_only() 34 | .file(file.to_string_lossy().to_string()) 35 | .build(); 36 | let (_stdout, _stderr) = pgdump.execute()?; 37 | 38 | let contents = fs::read_to_string(file)?; 39 | assert!(contents.contains("person42")); 40 | 41 | Ok(()) 42 | } 43 | -------------------------------------------------------------------------------- /postgresql_embedded/tests/environment_variables.rs: -------------------------------------------------------------------------------- 1 | use postgresql_embedded::{PostgreSQL, Status}; 2 | use std::env; 3 | use test_log::test; 4 | 5 | #[test(tokio::test)] 6 | async fn lifecycle() -> anyhow::Result<()> { 7 | // Explicitly set PGDATABASE environment variable to verify that the library behavior 8 | // is not affected by the environment 9 | unsafe { 10 | env::set_var("PGDATABASE", "foodb"); 11 | } 12 | 13 | let mut postgresql = PostgreSQL::default(); 14 | 15 | postgresql.setup().await?; 16 | postgresql.start().await?; 17 | 18 | let database_name = "test"; 19 | assert!(!postgresql.database_exists(database_name).await?); 20 | postgresql.create_database(database_name).await?; 21 | assert!(postgresql.database_exists(database_name).await?); 22 | postgresql.drop_database(database_name).await?; 23 | 24 | postgresql.stop().await?; 25 | assert_eq!(Status::Stopped, postgresql.status()); 26 | Ok(()) 27 | } 28 | -------------------------------------------------------------------------------- /postgresql_embedded/tests/start_config.rs: -------------------------------------------------------------------------------- 1 | use postgresql_embedded::{BOOTSTRAP_DATABASE, PostgreSQL, Settings}; 2 | use sqlx::{PgPool, Row}; 3 | use std::collections::HashMap; 4 | use test_log::test; 5 | 6 | #[test(tokio::test)] 7 | async fn start_config() -> anyhow::Result<()> { 8 | let configuration = HashMap::from([("max_connections".to_string(), "42".to_string())]); 9 | let settings = Settings { 10 | configuration, 11 | ..Default::default() 12 | }; 13 | let mut postgresql = PostgreSQL::new(settings); 14 | 15 | postgresql.setup().await?; 16 | postgresql.start().await?; 17 | let settings = postgresql.settings(); 18 | let database_url = settings.url(BOOTSTRAP_DATABASE); 19 | let pool = PgPool::connect(database_url.as_str()).await?; 20 | let row = sqlx::query("SELECT setting FROM pg_settings WHERE name = $1") 21 | .bind("max_connections".to_string()) 22 | .fetch_one(&pool) 23 | .await?; 24 | let max_connections: String = row.get(0); 25 | pool.close().await; 26 | 27 | assert_eq!("42".to_string(), max_connections); 28 | 29 | Ok(()) 30 | } 31 | -------------------------------------------------------------------------------- /postgresql_embedded/tests/zonky.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "zonky")] 2 | use postgresql_archive::configuration::zonky; 3 | #[cfg(feature = "zonky")] 4 | use postgresql_embedded::{PostgreSQL, Result, Settings, Status}; 5 | 6 | #[tokio::test] 7 | #[cfg(feature = "zonky")] 8 | async fn test_zonky() -> Result<()> { 9 | let settings = Settings { 10 | releases_url: zonky::URL.to_string(), 11 | ..Default::default() 12 | }; 13 | let mut postgresql = PostgreSQL::new(settings); 14 | let settings = postgresql.settings(); 15 | 16 | // Verify that an ephemeral instance is created by default 17 | assert_eq!(0, settings.port); 18 | assert!(settings.temporary); 19 | 20 | let initial_statuses = [Status::NotInstalled, Status::Installed, Status::Stopped]; 21 | assert!(initial_statuses.contains(&postgresql.status())); 22 | 23 | postgresql.setup().await?; 24 | assert_eq!(Status::Stopped, postgresql.status()); 25 | 26 | postgresql.start().await?; 27 | assert_eq!(Status::Started, postgresql.status()); 28 | 29 | let database_name = "test"; 30 | assert!(!postgresql.database_exists(database_name).await?); 31 | postgresql.create_database(database_name).await?; 32 | assert!(postgresql.database_exists(database_name).await?); 33 | postgresql.drop_database(database_name).await?; 34 | 35 | postgresql.stop().await?; 36 | assert_eq!(Status::Stopped, postgresql.status()); 37 | Ok(()) 38 | } 39 | -------------------------------------------------------------------------------- /postgresql_extensions/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors.workspace = true 3 | categories.workspace = true 4 | description = "A library for managing PostgreSQL extensions" 5 | edition.workspace = true 6 | keywords.workspace = true 7 | license.workspace = true 8 | name = "postgresql_extensions" 9 | repository = "https://github.com/theseus-rs/postgresql-embedded" 10 | rust-version.workspace = true 11 | version.workspace = true 12 | 13 | [dependencies] 14 | async-trait = { workspace = true } 15 | postgresql_archive = { path = "../postgresql_archive", version = "0.18.5", default-features = false } 16 | postgresql_commands = { path = "../postgresql_commands", version = "0.18.5", default-features = false } 17 | regex-lite = { workspace = true } 18 | reqwest = { workspace = true, default-features = false, features = ["json"] } 19 | semver = { workspace = true, features = ["serde"] } 20 | serde = { workspace = true, features = ["derive"] } 21 | serde_json = { workspace = true, optional = true } 22 | target-triple = { workspace = true, optional = true } 23 | tempfile = { workspace = true } 24 | thiserror = { workspace = true } 25 | tokio = { workspace = true, features = ["full"], optional = true } 26 | tracing = { workspace = true, features = ["log"] } 27 | url = { workspace = true } 28 | 29 | [dev-dependencies] 30 | anyhow = { workspace = true } 31 | postgresql_embedded = { path = "../postgresql_embedded", version = "0.18.5" } 32 | test-log = { workspace = true } 33 | tokio = { workspace = true, features = ["full"] } 34 | 35 | [features] 36 | default = [ 37 | "native-tls", 38 | "portal-corp", 39 | "steampipe", 40 | "tensor-chord", 41 | ] 42 | blocking = ["tokio"] 43 | portal-corp = [ 44 | "dep:target-triple", 45 | "postgresql_archive/github", 46 | ] 47 | steampipe = [ 48 | "dep:serde_json", 49 | "postgresql_archive/github", 50 | ] 51 | tensor-chord = [ 52 | "dep:target-triple", 53 | "postgresql_archive/github", 54 | ] 55 | tokio = [ 56 | "postgresql_commands/tokio", 57 | "dep:tokio" 58 | ] 59 | native-tls = [ 60 | "postgresql_archive/native-tls", 61 | "reqwest/native-tls", 62 | ] 63 | rustls = [ 64 | "postgresql_archive/rustls", 65 | "reqwest/rustls-tls-native-roots", 66 | ] 67 | -------------------------------------------------------------------------------- /postgresql_extensions/README.md: -------------------------------------------------------------------------------- 1 | # PostgreSQL Extensions 2 | 3 | [![ci](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml) 4 | [![Documentation](https://docs.rs/postgresql_extensions/badge.svg)](https://docs.rs/postgresql_extensions) 5 | [![Code Coverage](https://codecov.io/gh/theseus-rs/postgresql-embedded/branch/main/graph/badge.svg)](https://codecov.io/gh/theseus-rs/postgresql-embedded) 6 | [![Benchmarks](https://img.shields.io/badge/%F0%9F%90%B0_bencher-enabled-6ec241)](https://bencher.dev/perf/theseus-rs-postgresql-embedded) 7 | [![Latest version](https://img.shields.io/crates/v/postgresql_extensions.svg)](https://crates.io/crates/postgresql_extensions) 8 | [![License](https://img.shields.io/crates/l/postgresql_extensions?)](https://github.com/theseus-rs/postgresql-embedded/tree/main/postgresql_extensions#license) 9 | [![Semantic Versioning](https://img.shields.io/badge/%E2%9A%99%EF%B8%8F_SemVer-2.0.0-blue)](https://semver.org/spec/v2.0.0.html) 10 | 11 | A configurable library for managing PostgreSQL extensions. 12 | 13 | ## Examples 14 | 15 | ### Asynchronous API 16 | 17 | ```rust 18 | use postgresql_extensions::{get_available_extensions, Result}; 19 | 20 | #[tokio::main] 21 | async fn main() -> Result<()> { 22 | let extensions = get_available_extensions().await?; 23 | Ok(()) 24 | } 25 | ``` 26 | 27 | ### Synchronous API 28 | 29 | ```rust 30 | use postgresql_extensions::Result; 31 | use postgresql_extensions::blocking::get_available_extensions; 32 | 33 | async fn main() -> Result<()> { 34 | let extensions = get_available_extensions().await?; 35 | Ok(()) 36 | } 37 | ``` 38 | 39 | ## Feature flags 40 | 41 | postgresql_extensions uses [feature flags] to address compile time and binary size 42 | uses. 43 | 44 | The following features are available: 45 | 46 | | Name | Description | Default? | 47 | |--------------|----------------------------|----------| 48 | | `blocking` | Enables the blocking API | No | 49 | | `native-tls` | Enables native-tls support | Yes | 50 | | `rustls-tls` | Enables rustls-tls support | No | 51 | 52 | ### Repositories 53 | 54 | | Name | Description | Default? | 55 | |----------------|-------------------------------------------|----------| 56 | | `portal-corp` | Enables PortalCorp PostgreSQL extensions | Yes | 57 | | `steampipe` | Enables Steampipe PostgreSQL extensions | Yes | 58 | | `tensor-chord` | Enables TensorChord PostgreSQL extensions | Yes | 59 | 60 | ## Supported platforms 61 | 62 | `postgresql_extensions` provides implementations for the following: 63 | 64 | * [steampipe/repositories](https://github.com/orgs/turbot/repositories) 65 | * [tensor-chord/pgvecto.rs](https://github.com/tensor-chord/pgvecto.rs) 66 | 67 | ## Safety 68 | 69 | This crate uses `#![forbid(unsafe_code)]` to ensure everything is implemented in 100% safe Rust. 70 | 71 | ## License 72 | 73 | Licensed under either of 74 | 75 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0) 76 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT) 77 | 78 | at your option. 79 | 80 | ## Contribution 81 | 82 | Unless you explicitly state otherwise, any contribution intentionally submitted 83 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any 84 | additional terms or conditions. 85 | -------------------------------------------------------------------------------- /postgresql_extensions/src/blocking/extensions.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | use crate::model::AvailableExtension; 3 | use crate::{InstalledExtension, Result}; 4 | use postgresql_commands::Settings; 5 | use semver::VersionReq; 6 | use std::sync::LazyLock; 7 | use tokio::runtime::Runtime; 8 | 9 | static RUNTIME: LazyLock = LazyLock::new(|| Runtime::new().unwrap()); 10 | 11 | /// Gets the available extensions. 12 | /// 13 | /// # Errors 14 | /// * If an error occurs while getting the extensions. 15 | pub fn get_available_extensions() -> Result> { 16 | RUNTIME 17 | .handle() 18 | .block_on(async move { crate::get_available_extensions().await }) 19 | } 20 | 21 | /// Gets the installed extensions. 22 | /// 23 | /// # Errors 24 | /// * If an error occurs while getting the installed extensions. 25 | pub fn get_installed_extensions(settings: &impl Settings) -> Result> { 26 | RUNTIME 27 | .handle() 28 | .block_on(async move { crate::get_installed_extensions(settings).await }) 29 | } 30 | 31 | /// Installs the extension with the specified `namespace`, `name`, and `version`. 32 | /// 33 | /// # Errors 34 | /// * If an error occurs while installing the extension. 35 | pub fn install( 36 | settings: &impl Settings, 37 | namespace: &str, 38 | name: &str, 39 | version: &VersionReq, 40 | ) -> Result<()> { 41 | RUNTIME 42 | .handle() 43 | .block_on(async move { crate::install(settings, namespace, name, version).await }) 44 | } 45 | 46 | /// Uninstalls the extension with the specified `namespace` and `name`. 47 | /// 48 | /// # Errors 49 | /// * If an error occurs while uninstalling the extension. 50 | pub fn uninstall(settings: &impl Settings, namespace: &str, name: &str) -> Result<()> { 51 | RUNTIME 52 | .handle() 53 | .block_on(async move { crate::uninstall(settings, namespace, name).await }) 54 | } 55 | 56 | #[cfg(test)] 57 | mod tests { 58 | use super::*; 59 | use crate::TestSettings; 60 | 61 | #[test] 62 | fn test_get_installed_extensions() -> Result<()> { 63 | let extensions = get_installed_extensions(&TestSettings)?; 64 | assert!(extensions.is_empty()); 65 | Ok(()) 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /postgresql_extensions/src/blocking/mod.rs: -------------------------------------------------------------------------------- 1 | mod extensions; 2 | 3 | pub use extensions::{get_available_extensions, get_installed_extensions, install, uninstall}; 4 | -------------------------------------------------------------------------------- /postgresql_extensions/src/error.rs: -------------------------------------------------------------------------------- 1 | /// PostgreSQL extensions result type 2 | pub type Result = core::result::Result; 3 | 4 | /// PostgreSQL extensions errors 5 | #[derive(Debug, thiserror::Error)] 6 | pub enum Error { 7 | /// Archive error 8 | #[error(transparent)] 9 | ArchiveError(#[from] postgresql_archive::Error), 10 | /// Error when a command fails 11 | #[error(transparent)] 12 | CommandError(#[from] postgresql_commands::Error), 13 | /// Extension not found 14 | #[error("extension not found '{0}'")] 15 | ExtensionNotFound(String), 16 | /// Error when an IO operation fails 17 | #[error("{0}")] 18 | IoError(String), 19 | /// Poisoned lock 20 | #[error("poisoned lock '{0}'")] 21 | PoisonedLock(String), 22 | /// Error when a regex operation fails 23 | #[error(transparent)] 24 | RegexError(#[from] regex_lite::Error), 25 | /// Error when a deserialization or serialization operation fails 26 | #[error(transparent)] 27 | SerdeError(#[from] serde_json::Error), 28 | /// Unsupported namespace 29 | #[error("unsupported namespace '{0}'")] 30 | UnsupportedNamespace(String), 31 | } 32 | -------------------------------------------------------------------------------- /postgresql_extensions/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # PostgreSQL Extensions 2 | //! 3 | //! [![ci](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/theseus-rs/postgresql-embedded/actions/workflows/ci.yml) 4 | //! [![Documentation](https://docs.rs/postgresql_extensions/badge.svg)](https://docs.rs/postgresql_extensions) 5 | //! [![Code Coverage](https://codecov.io/gh/theseus-rs/postgresql-embedded/branch/main/graph/badge.svg)](https://codecov.io/gh/theseus-rs/postgresql-embedded) 6 | //! [![Benchmarks](https://img.shields.io/badge/%F0%9F%90%B0_bencher-enabled-6ec241)](https://bencher.dev/perf/theseus-rs-postgresql-embedded) 7 | //! [![Latest version](https://img.shields.io/crates/v/postgresql_extensions.svg)](https://crates.io/crates/postgresql_extensions) 8 | //! [![License](https://img.shields.io/crates/l/postgresql_extensions?)](https://github.com/theseus-rs/postgresql-embedded/tree/main/postgresql_extensions#license) 9 | //! [![Semantic Versioning](https://img.shields.io/badge/%E2%9A%99%EF%B8%8F_SemVer-2.0.0-blue)](https://semver.org/spec/v2.0.0.html) 10 | //! 11 | //! A configurable library for managing PostgreSQL extensions. 12 | //! 13 | //! ## Examples 14 | //! 15 | //! ### Asynchronous API 16 | //! 17 | //! ```rust 18 | //! use postgresql_extensions::{get_available_extensions, Result}; 19 | //! 20 | //! #[tokio::main] 21 | //! async fn main() -> Result<()> { 22 | //! let extensions = get_available_extensions().await?; 23 | //! Ok(()) 24 | //! } 25 | //! ``` 26 | //! 27 | //! ### Synchronous API 28 | //! 29 | //! ```rust 30 | //! #[cfg(feature = "blocking")] { 31 | //! use postgresql_extensions::Result; 32 | //! use postgresql_extensions::blocking::get_available_extensions; 33 | //! 34 | //! let extensions = get_available_extensions().unwrap(); 35 | //! } 36 | //! ``` 37 | //! 38 | //! ## Feature flags 39 | //! 40 | //! postgresql_extensions uses [feature flags] to address compile time and binary size 41 | //! uses. 42 | //! 43 | //! The following features are available: 44 | //! 45 | //! | Name | Description | Default? | 46 | //! |--------------|----------------------------|----------| 47 | //! | `blocking` | Enables the blocking API | No | 48 | //! | `native-tls` | Enables native-tls support | Yes | 49 | //! | `rustls-tls` | Enables rustls-tls support | No | 50 | //! 51 | //! ### Repositories 52 | //! 53 | //! | Name | Description | Default? | 54 | //! |----------------|-------------------------------------------|----------| 55 | //! | `portal-corp` | Enables PortalCorp PostgreSQL extensions | Yes | 56 | //! | `steampipe` | Enables Steampipe PostgreSQL extensions | Yes | 57 | //! | `tensor-chord` | Enables TensorChord PostgreSQL extensions | Yes | 58 | //! 59 | //! ## Supported platforms 60 | //! 61 | //! `postgresql_extensions` provides implementations for the following: 62 | //! 63 | //! * [steampipe/repositories](https://github.com/orgs/turbot/repositories) 64 | //! * [tensor-chord/pgvecto.rs](https://github.com/tensor-chord/pgvecto.rs) 65 | //! 66 | //! ## Safety 67 | //! 68 | //! This crate uses `#![forbid(unsafe_code)]` to ensure everything is implemented in 100% safe Rust. 69 | //! 70 | //! ## License 71 | //! 72 | //! Licensed under either of 73 | //! 74 | //! * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or ) 75 | //! * MIT license ([LICENSE-MIT](LICENSE-MIT) or ) 76 | //! 77 | //! at your option. 78 | //! 79 | //! ## Contribution 80 | //! 81 | //! Unless you explicitly state otherwise, any contribution intentionally submitted 82 | //! for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any 83 | //! additional terms or conditions. 84 | 85 | #![forbid(unsafe_code)] 86 | #![forbid(clippy::allow_attributes)] 87 | #![deny(clippy::pedantic)] 88 | #![allow(clippy::doc_markdown)] 89 | #![allow(clippy::module_name_repetitions)] 90 | 91 | #[cfg(feature = "blocking")] 92 | pub mod blocking; 93 | mod error; 94 | pub mod extensions; 95 | mod matcher; 96 | mod model; 97 | pub mod repository; 98 | 99 | pub use error::{Error, Result}; 100 | pub use extensions::{get_available_extensions, get_installed_extensions, install, uninstall}; 101 | pub use matcher::{matcher, tar_gz_matcher, zip_matcher}; 102 | #[cfg(test)] 103 | pub use model::TestSettings; 104 | pub use model::{AvailableExtension, InstalledConfiguration, InstalledExtension}; 105 | pub use semver::{Version, VersionReq}; 106 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod model; 2 | #[cfg(feature = "portal-corp")] 3 | pub mod portal_corp; 4 | pub mod registry; 5 | #[cfg(feature = "steampipe")] 6 | pub mod steampipe; 7 | #[cfg(feature = "tensor-chord")] 8 | pub mod tensor_chord; 9 | 10 | pub use model::Repository; 11 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/model.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use crate::model::AvailableExtension; 3 | use async_trait::async_trait; 4 | use semver::{Version, VersionReq}; 5 | use std::fmt::Debug; 6 | use std::path::PathBuf; 7 | 8 | /// A trait for archive repository implementations. 9 | #[async_trait] 10 | pub trait Repository: Debug + Send + Sync { 11 | /// Gets the name of the repository. 12 | fn name(&self) -> &str; 13 | 14 | /// Gets the available extensions. 15 | /// 16 | /// # Errors 17 | /// * if an error occurs while getting the extensions. 18 | async fn get_available_extensions(&self) -> Result>; 19 | 20 | /// Gets the archive for the extension with the specified `name` and `version`. 21 | /// 22 | /// # Errors 23 | /// * if an error occurs while getting the archive. 24 | async fn get_archive( 25 | &self, 26 | postgresql_version: &str, 27 | name: &str, 28 | version: &VersionReq, 29 | ) -> Result<(Version, Vec)>; 30 | 31 | /// Installs the extension with the specified `name` and `version`. 32 | /// 33 | /// # Errors 34 | /// * if an error occurs while installing the extension. 35 | async fn install( 36 | &self, 37 | name: &str, 38 | library_dir: PathBuf, 39 | extension_dir: PathBuf, 40 | archive: &[u8], 41 | ) -> Result>; 42 | } 43 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/portal_corp/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod repository; 2 | 3 | pub const URL: &str = "https://github.com/portalcorp"; 4 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/portal_corp/repository.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use crate::matcher::zip_matcher; 3 | use crate::model::AvailableExtension; 4 | use crate::repository::Repository; 5 | use crate::repository::portal_corp::URL; 6 | use async_trait::async_trait; 7 | use postgresql_archive::extractor::{ExtractDirectories, zip_extract}; 8 | use postgresql_archive::get_archive; 9 | use postgresql_archive::repository::github::repository::GitHub; 10 | use regex_lite::Regex; 11 | use semver::{Version, VersionReq}; 12 | use std::fmt::Debug; 13 | use std::path::PathBuf; 14 | 15 | /// PortalCorp repository. 16 | #[derive(Debug)] 17 | pub struct PortalCorp; 18 | 19 | impl PortalCorp { 20 | /// Creates a new PortalCorp repository. 21 | /// 22 | /// # Errors 23 | /// * If the repository cannot be created 24 | #[expect(clippy::new_ret_no_self)] 25 | pub fn new() -> Result> { 26 | Ok(Box::new(Self)) 27 | } 28 | 29 | /// Initializes the repository. 30 | /// 31 | /// # Errors 32 | /// * If the repository cannot be initialized. 33 | pub fn initialize() -> Result<()> { 34 | postgresql_archive::matcher::registry::register( 35 | |url| Ok(url.starts_with(URL)), 36 | zip_matcher, 37 | )?; 38 | postgresql_archive::repository::registry::register( 39 | |url| Ok(url.starts_with(URL)), 40 | Box::new(GitHub::new), 41 | )?; 42 | Ok(()) 43 | } 44 | } 45 | 46 | #[async_trait] 47 | impl Repository for PortalCorp { 48 | fn name(&self) -> &'static str { 49 | "portal-corp" 50 | } 51 | 52 | async fn get_available_extensions(&self) -> Result> { 53 | let extensions = vec![AvailableExtension::new( 54 | self.name(), 55 | "pgvector_compiled", 56 | "Precompiled OS packages for pgvector", 57 | )]; 58 | Ok(extensions) 59 | } 60 | 61 | async fn get_archive( 62 | &self, 63 | postgresql_version: &str, 64 | name: &str, 65 | version: &VersionReq, 66 | ) -> Result<(Version, Vec)> { 67 | let url = format!("{URL}/{name}?postgresql_version={postgresql_version}"); 68 | let archive = get_archive(url.as_str(), version).await?; 69 | Ok(archive) 70 | } 71 | 72 | async fn install( 73 | &self, 74 | _name: &str, 75 | library_dir: PathBuf, 76 | extension_dir: PathBuf, 77 | archive: &[u8], 78 | ) -> Result> { 79 | let mut extract_directories = ExtractDirectories::default(); 80 | extract_directories.add_mapping(Regex::new(r"\.(dll|dylib|so)$")?, library_dir); 81 | extract_directories.add_mapping(Regex::new(r"\.(control|sql)$")?, extension_dir); 82 | let bytes = &archive.to_vec(); 83 | let files = zip_extract(bytes, extract_directories)?; 84 | Ok(files) 85 | } 86 | } 87 | 88 | #[cfg(test)] 89 | mod tests { 90 | use super::*; 91 | use crate::repository::Repository; 92 | 93 | #[test] 94 | fn test_name() { 95 | let repository = PortalCorp; 96 | assert_eq!("portal-corp", repository.name()); 97 | } 98 | 99 | #[tokio::test] 100 | async fn test_get_available_extensions() -> Result<()> { 101 | let repository = PortalCorp; 102 | let extensions = repository.get_available_extensions().await?; 103 | let extension = &extensions[0]; 104 | 105 | assert_eq!("pgvector_compiled", extension.name()); 106 | assert_eq!( 107 | "Precompiled OS packages for pgvector", 108 | extension.description() 109 | ); 110 | Ok(()) 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/steampipe/mod.rs: -------------------------------------------------------------------------------- 1 | mod extensions; 2 | pub mod repository; 3 | 4 | pub const URL: &str = "https://github.com/turbot"; 5 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/steampipe/repository.rs: -------------------------------------------------------------------------------- 1 | use crate::Error::ExtensionNotFound; 2 | use crate::Result; 3 | use crate::matcher::tar_gz_matcher; 4 | use crate::model::AvailableExtension; 5 | use crate::repository::steampipe::URL; 6 | use crate::repository::{Repository, steampipe}; 7 | use async_trait::async_trait; 8 | use postgresql_archive::extractor::{ExtractDirectories, tar_gz_extract}; 9 | use postgresql_archive::get_archive; 10 | use postgresql_archive::repository::github::repository::GitHub; 11 | use regex_lite::Regex; 12 | use semver::{Version, VersionReq}; 13 | use std::fmt::Debug; 14 | use std::path::PathBuf; 15 | 16 | /// Steampipe repository. 17 | #[derive(Debug)] 18 | pub struct Steampipe; 19 | 20 | impl Steampipe { 21 | /// Creates a new Steampipe repository. 22 | /// 23 | /// # Errors 24 | /// * If the repository cannot be created 25 | #[expect(clippy::new_ret_no_self)] 26 | pub fn new() -> Result> { 27 | Ok(Box::new(Self)) 28 | } 29 | 30 | /// Initializes the repository. 31 | /// 32 | /// # Errors 33 | /// * If the repository cannot be initialized. 34 | pub fn initialize() -> Result<()> { 35 | postgresql_archive::matcher::registry::register( 36 | |url| Ok(url.starts_with(URL)), 37 | tar_gz_matcher, 38 | )?; 39 | postgresql_archive::repository::registry::register( 40 | |url| Ok(url.starts_with(URL)), 41 | Box::new(GitHub::new), 42 | )?; 43 | Ok(()) 44 | } 45 | } 46 | 47 | #[async_trait] 48 | impl Repository for Steampipe { 49 | fn name(&self) -> &'static str { 50 | "steampipe" 51 | } 52 | 53 | async fn get_available_extensions(&self) -> Result> { 54 | let mut extensions = Vec::new(); 55 | for steampipe_extension in steampipe::extensions::get() { 56 | let extension = AvailableExtension::new( 57 | self.name(), 58 | steampipe_extension.name.as_str(), 59 | steampipe_extension.description.as_str(), 60 | ); 61 | 62 | extensions.push(extension); 63 | } 64 | Ok(extensions) 65 | } 66 | 67 | async fn get_archive( 68 | &self, 69 | postgresql_version: &str, 70 | name: &str, 71 | version: &VersionReq, 72 | ) -> Result<(Version, Vec)> { 73 | let Some(extension) = steampipe::extensions::get() 74 | .iter() 75 | .find(|extension| extension.name == name) 76 | else { 77 | let extension = format!("{}:{}:{}", self.name(), name, version); 78 | return Err(ExtensionNotFound(extension)); 79 | }; 80 | let url = format!("{}?postgresql_version={postgresql_version}", extension.url); 81 | let archive = get_archive(url.as_str(), version).await?; 82 | Ok(archive) 83 | } 84 | 85 | async fn install( 86 | &self, 87 | _name: &str, 88 | library_dir: PathBuf, 89 | extension_dir: PathBuf, 90 | archive: &[u8], 91 | ) -> Result> { 92 | let mut extract_directories = ExtractDirectories::default(); 93 | extract_directories.add_mapping(Regex::new(r"\.(dll|dylib|so)$")?, library_dir); 94 | extract_directories.add_mapping(Regex::new(r"\.(control|sql)$")?, extension_dir); 95 | let bytes = &archive.to_vec(); 96 | let files = tar_gz_extract(bytes, extract_directories)?; 97 | Ok(files) 98 | } 99 | } 100 | 101 | #[cfg(test)] 102 | mod tests { 103 | use super::*; 104 | use crate::repository::Repository; 105 | 106 | #[test] 107 | fn test_name() { 108 | let repository = Steampipe; 109 | assert_eq!("steampipe", repository.name()); 110 | } 111 | 112 | #[tokio::test] 113 | async fn test_get_available_extensions() -> Result<()> { 114 | let repository = Steampipe; 115 | let extensions = repository.get_available_extensions().await?; 116 | let extension = &extensions[0]; 117 | 118 | assert_eq!("abuseipdb", extension.name()); 119 | assert_eq!( 120 | "Steampipe plugin to query IP address abuse data and more from AbuseIPDB.", 121 | extension.description() 122 | ); 123 | assert_eq!(143, extensions.len()); 124 | Ok(()) 125 | } 126 | 127 | #[tokio::test] 128 | async fn test_get_archive_error() -> anyhow::Result<()> { 129 | let repository = Steampipe; 130 | let postgresql_version = "15.7"; 131 | let name = "does-not-exist"; 132 | let version = VersionReq::parse("=0.12.0")?; 133 | let result = repository 134 | .get_archive(postgresql_version, name, &version) 135 | .await; 136 | assert!(result.is_err()); 137 | Ok(()) 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/tensor_chord/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod repository; 2 | 3 | pub const URL: &str = "https://github.com/tensorchord"; 4 | -------------------------------------------------------------------------------- /postgresql_extensions/src/repository/tensor_chord/repository.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use crate::matcher::zip_matcher; 3 | use crate::model::AvailableExtension; 4 | use crate::repository::Repository; 5 | use crate::repository::tensor_chord::URL; 6 | use async_trait::async_trait; 7 | use postgresql_archive::extractor::{ExtractDirectories, zip_extract}; 8 | use postgresql_archive::get_archive; 9 | use postgresql_archive::repository::github::repository::GitHub; 10 | use regex_lite::Regex; 11 | use semver::{Version, VersionReq}; 12 | use std::fmt::Debug; 13 | use std::path::PathBuf; 14 | 15 | /// TensorChord repository. 16 | #[derive(Debug)] 17 | pub struct TensorChord; 18 | 19 | impl TensorChord { 20 | /// Creates a new TensorChord repository. 21 | /// 22 | /// # Errors 23 | /// * If the repository cannot be created 24 | #[expect(clippy::new_ret_no_self)] 25 | pub fn new() -> Result> { 26 | Ok(Box::new(Self)) 27 | } 28 | 29 | /// Initializes the repository. 30 | /// 31 | /// # Errors 32 | /// * If the repository cannot be initialized. 33 | pub fn initialize() -> Result<()> { 34 | postgresql_archive::matcher::registry::register( 35 | |url| Ok(url.starts_with(URL)), 36 | zip_matcher, 37 | )?; 38 | postgresql_archive::repository::registry::register( 39 | |url| Ok(url.starts_with(URL)), 40 | Box::new(GitHub::new), 41 | )?; 42 | Ok(()) 43 | } 44 | } 45 | 46 | #[async_trait] 47 | impl Repository for TensorChord { 48 | fn name(&self) -> &'static str { 49 | "tensor-chord" 50 | } 51 | 52 | async fn get_available_extensions(&self) -> Result> { 53 | let extensions = vec![AvailableExtension::new( 54 | self.name(), 55 | "pgvecto.rs", 56 | "Scalable, Low-latency and Hybrid-enabled Vector Search", 57 | )]; 58 | Ok(extensions) 59 | } 60 | 61 | async fn get_archive( 62 | &self, 63 | postgresql_version: &str, 64 | name: &str, 65 | version: &VersionReq, 66 | ) -> Result<(Version, Vec)> { 67 | let url = format!("{URL}/{name}?postgresql_version={postgresql_version}"); 68 | let archive = get_archive(url.as_str(), version).await?; 69 | Ok(archive) 70 | } 71 | 72 | async fn install( 73 | &self, 74 | _name: &str, 75 | library_dir: PathBuf, 76 | extension_dir: PathBuf, 77 | archive: &[u8], 78 | ) -> Result> { 79 | let mut extract_directories = ExtractDirectories::default(); 80 | extract_directories.add_mapping(Regex::new(r"\.(dll|dylib|so)$")?, library_dir); 81 | extract_directories.add_mapping(Regex::new(r"\.(control|sql)$")?, extension_dir); 82 | let bytes = &archive.to_vec(); 83 | let files = zip_extract(bytes, extract_directories)?; 84 | Ok(files) 85 | } 86 | } 87 | 88 | #[cfg(test)] 89 | mod tests { 90 | use super::*; 91 | use crate::repository::Repository; 92 | 93 | #[test] 94 | fn test_name() { 95 | let repository = TensorChord; 96 | assert_eq!("tensor-chord", repository.name()); 97 | } 98 | 99 | #[tokio::test] 100 | async fn test_get_available_extensions() -> Result<()> { 101 | let repository = TensorChord; 102 | let extensions = repository.get_available_extensions().await?; 103 | let extension = &extensions[0]; 104 | 105 | assert_eq!("pgvecto.rs", extension.name()); 106 | assert_eq!( 107 | "Scalable, Low-latency and Hybrid-enabled Vector Search", 108 | extension.description() 109 | ); 110 | Ok(()) 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /postgresql_extensions/tests/blocking.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "blocking")] 2 | use test_log::test; 3 | 4 | #[cfg(feature = "blocking")] 5 | #[test] 6 | fn test_get_available_extensions() -> anyhow::Result<()> { 7 | let extensions = postgresql_extensions::blocking::get_available_extensions()?; 8 | #[cfg(feature = "steampipe")] 9 | assert!( 10 | extensions 11 | .iter() 12 | .any(|extension| extension.namespace() == "steampipe") 13 | ); 14 | #[cfg(feature = "tensor-chord")] 15 | assert!( 16 | extensions 17 | .iter() 18 | .any(|extension| extension.namespace() == "tensor-chord") 19 | ); 20 | Ok(()) 21 | } 22 | 23 | #[cfg(all(target_os = "linux", feature = "blocking", feature = "tensor-chord"))] 24 | #[test] 25 | fn test_lifecycle() -> anyhow::Result<()> { 26 | let installation_dir = tempfile::tempdir()?.path().to_path_buf(); 27 | let settings = postgresql_embedded::Settings { 28 | version: postgresql_embedded::VersionReq::parse("=16.4.0")?, 29 | installation_dir: installation_dir.clone(), 30 | ..Default::default() 31 | }; 32 | let mut postgresql = postgresql_embedded::blocking::PostgreSQL::new(settings); 33 | 34 | postgresql.setup()?; 35 | 36 | let settings = postgresql.settings(); 37 | let namespace = "tensor-chord"; 38 | let name = "pgvecto.rs"; 39 | let version = semver::VersionReq::parse("=0.3.0")?; 40 | 41 | let installed_extensions = postgresql_extensions::blocking::get_installed_extensions(settings)?; 42 | assert!(installed_extensions.is_empty()); 43 | 44 | postgresql_extensions::blocking::install(settings, namespace, name, &version)?; 45 | 46 | let installed_extensions = postgresql_extensions::blocking::get_installed_extensions(settings)?; 47 | assert!(!installed_extensions.is_empty()); 48 | 49 | postgresql_extensions::blocking::uninstall(settings, namespace, name)?; 50 | 51 | let installed_extensions = postgresql_extensions::blocking::get_installed_extensions(settings)?; 52 | assert!(installed_extensions.is_empty()); 53 | 54 | std::fs::remove_dir_all(&installation_dir)?; 55 | Ok(()) 56 | } 57 | -------------------------------------------------------------------------------- /postgresql_extensions/tests/extensions.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use postgresql_extensions::get_available_extensions; 3 | 4 | #[tokio::test] 5 | async fn test_get_available_extensions() -> Result<()> { 6 | let extensions = get_available_extensions().await?; 7 | #[cfg(feature = "steampipe")] 8 | assert!( 9 | extensions 10 | .iter() 11 | .any(|extension| extension.namespace() == "steampipe") 12 | ); 13 | #[cfg(feature = "tensor-chord")] 14 | assert!( 15 | extensions 16 | .iter() 17 | .any(|extension| extension.namespace() == "tensor-chord") 18 | ); 19 | Ok(()) 20 | } 21 | 22 | #[cfg(all(target_os = "linux", feature = "tensor-chord"))] 23 | #[tokio::test] 24 | async fn test_lifecycle() -> Result<()> { 25 | let installation_dir = tempfile::tempdir()?.path().to_path_buf(); 26 | let settings = postgresql_embedded::Settings { 27 | version: postgresql_embedded::VersionReq::parse("=16.4.0")?, 28 | installation_dir: installation_dir.clone(), 29 | ..Default::default() 30 | }; 31 | let mut postgresql = postgresql_embedded::PostgreSQL::new(settings); 32 | 33 | postgresql.setup().await?; 34 | 35 | let settings = postgresql.settings(); 36 | let namespace = "tensor-chord"; 37 | let name = "pgvecto.rs"; 38 | let version = semver::VersionReq::parse("=0.3.0")?; 39 | 40 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 41 | assert!(installed_extensions.is_empty()); 42 | 43 | postgresql_extensions::install(settings, namespace, name, &version).await?; 44 | 45 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 46 | assert!(!installed_extensions.is_empty()); 47 | 48 | postgresql_extensions::uninstall(settings, namespace, name).await?; 49 | 50 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 51 | assert!(installed_extensions.is_empty()); 52 | 53 | tokio::fs::remove_dir_all(&installation_dir).await?; 54 | Ok(()) 55 | } 56 | -------------------------------------------------------------------------------- /postgresql_extensions/tests/portal_corp.rs: -------------------------------------------------------------------------------- 1 | #[cfg(not(any( 2 | all(target_os = "linux", target_arch = "aarch64"), 3 | all(target_os = "macos", target_arch = "x86_64") 4 | )))] 5 | #[cfg(feature = "portal-corp")] 6 | #[tokio::test] 7 | async fn test_lifecycle() -> anyhow::Result<()> { 8 | let installation_dir = tempfile::tempdir()?.path().to_path_buf(); 9 | let postgresql_version = semver::VersionReq::parse("=16.4.0")?; 10 | let settings = postgresql_embedded::Settings { 11 | version: postgresql_version, 12 | installation_dir: installation_dir.clone(), 13 | ..Default::default() 14 | }; 15 | let mut postgresql = postgresql_embedded::PostgreSQL::new(settings); 16 | 17 | postgresql.setup().await?; 18 | 19 | let settings = postgresql.settings(); 20 | let namespace = "portal-corp"; 21 | let name = "pgvector_compiled"; 22 | let version = semver::VersionReq::parse("=0.16.12")?; 23 | 24 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 25 | assert!(installed_extensions.is_empty()); 26 | 27 | postgresql_extensions::install(settings, namespace, name, &version).await?; 28 | 29 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 30 | assert!(!installed_extensions.is_empty()); 31 | 32 | postgresql_extensions::uninstall(settings, namespace, name).await?; 33 | 34 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 35 | assert!(installed_extensions.is_empty()); 36 | 37 | tokio::fs::remove_dir_all(&installation_dir).await?; 38 | Ok(()) 39 | } 40 | -------------------------------------------------------------------------------- /postgresql_extensions/tests/steampipe.rs: -------------------------------------------------------------------------------- 1 | #[cfg(any(target_os = "linux", target_os = "macos"))] 2 | #[cfg(feature = "steampipe")] 3 | #[tokio::test] 4 | async fn test_lifecycle() -> anyhow::Result<()> { 5 | let installation_dir = tempfile::tempdir()?.path().to_path_buf(); 6 | let postgresql_version = semver::VersionReq::parse("=15.7.0")?; 7 | let settings = postgresql_embedded::Settings { 8 | version: postgresql_version, 9 | installation_dir: installation_dir.clone(), 10 | ..Default::default() 11 | }; 12 | let mut postgresql = postgresql_embedded::PostgreSQL::new(settings); 13 | 14 | postgresql.setup().await?; 15 | 16 | let settings = postgresql.settings(); 17 | let namespace = "steampipe"; 18 | let name = "csv"; 19 | let version = semver::VersionReq::parse("=0.12.0")?; 20 | 21 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 22 | assert!(installed_extensions.is_empty()); 23 | 24 | postgresql_extensions::install(settings, namespace, name, &version).await?; 25 | 26 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 27 | assert!(!installed_extensions.is_empty()); 28 | 29 | postgresql_extensions::uninstall(settings, namespace, name).await?; 30 | 31 | let installed_extensions = postgresql_extensions::get_installed_extensions(settings).await?; 32 | assert!(installed_extensions.is_empty()); 33 | 34 | tokio::fs::remove_dir_all(&installation_dir).await?; 35 | Ok(()) 36 | } 37 | -------------------------------------------------------------------------------- /release-plz.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | changelog_path = "./CHANGELOG.md" 3 | git_release_enable = false 4 | git_tag_enable = false 5 | pr_name = "postgresql-embedded-v{{ version }}" 6 | release_always = false 7 | 8 | [[package]] 9 | name = "postgresql_embedded" 10 | changelog_update = true 11 | changelog_include = [ 12 | "postgresql_archive", 13 | "postgresql_commands", 14 | "postgresql_extensions", 15 | ] 16 | git_release_enable = true 17 | git_release_name = "v{{ version }}" 18 | git_tag_enable = true 19 | git_tag_name = "v{{ version }}" 20 | 21 | [changelog] 22 | body = """ 23 | 24 | ## `{{ package }}` - [{{ version | trim_start_matches(pat="v") }}]{%- if release_link -%}({{ release_link }}){% endif %} - {{ timestamp | date(format="%Y-%m-%d") }} 25 | {% for group, commits in commits | group_by(attribute="group") %} 26 | ### {{ group | upper_first }} 27 | {% for commit in commits %} 28 | {%- if commit.scope -%} 29 | - *({{commit.scope}})* {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }}{%- if commit.links %} ({% for link in commit.links %}[{{link.text}}]({{link.href}}) {% endfor -%}){% endif %} 30 | {% else -%} 31 | - {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }} 32 | {% endif -%} 33 | {% endfor -%} 34 | {% endfor -%} 35 | """ 36 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.87.0" 3 | profile = "default" 4 | --------------------------------------------------------------------------------