├── .cargo └── config.toml ├── .clippy.toml ├── .editorconfig ├── .github ├── renovate.json5 ├── settings.yml └── workflows │ ├── audit.yml │ ├── ci.yml │ ├── committed.yml │ ├── pre-commit.yml │ ├── rust-next.yml │ └── spelling.yml ├── .gitignore ├── .gitlint ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── _typos.toml ├── committed.toml ├── deny.toml ├── examples ├── async_source │ └── main.rs ├── custom_file_format │ ├── files │ │ ├── private.pem │ │ └── public.pem │ └── main.rs ├── custom_str_format │ └── main.rs ├── env-list │ └── main.rs ├── glob │ ├── conf │ │ ├── 00-default.toml │ │ ├── 05-some.yml │ │ └── 99-extra.json │ └── main.rs ├── hierarchical-env │ ├── config │ │ ├── default.toml │ │ ├── development.toml │ │ └── production.toml │ ├── main.rs │ └── settings.rs ├── simple │ ├── Settings.toml │ └── main.rs ├── static_env.rs └── watch │ ├── Settings.toml │ └── main.rs ├── release.toml ├── src ├── builder.rs ├── config.rs ├── de.rs ├── env.rs ├── error.rs ├── file │ ├── format │ │ ├── ini.rs │ │ ├── json.rs │ │ ├── json5.rs │ │ ├── mod.rs │ │ ├── ron.rs │ │ ├── toml.rs │ │ └── yaml.rs │ ├── mod.rs │ └── source │ │ ├── file.rs │ │ ├── mod.rs │ │ └── string.rs ├── format.rs ├── lib.rs ├── map.rs ├── path │ ├── mod.rs │ └── parser.rs ├── ser.rs ├── source.rs └── value.rs └── tests └── testsuite ├── async_builder.rs ├── case.rs ├── defaults.rs ├── deserialize-invalid-type.json ├── deserialize-missing-field.json ├── empty.rs ├── env.rs ├── errors.rs ├── file-auto.json ├── file-ext.json ├── file-second-ext.default.json ├── file.rs ├── file_ini.rs ├── file_json.rs ├── file_json5.rs ├── file_ron.rs ├── file_toml.rs ├── file_yaml.rs ├── get-invalid-type.json ├── get-missing-field.json ├── get.rs ├── integer_range.rs ├── log.rs ├── main.rs ├── merge.rs ├── ron_enum.rs ├── set.rs ├── unsigned_int.rs ├── unsigned_int_hm.rs └── weird_keys.rs /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [resolver] 2 | incompatible-rust-versions = "fallback" 3 | -------------------------------------------------------------------------------- /.clippy.toml: -------------------------------------------------------------------------------- 1 | allow-print-in-tests = true 2 | allow-expect-in-tests = true 3 | allow-unwrap-in-tests = true 4 | allow-dbg-in-tests = true 5 | # disallowed-methods = [ 6 | # { path = "std::option::Option::map_or", reason = "prefer `map(..).unwrap_or(..)` for legibility" }, 7 | # { path = "std::option::Option::map_or_else", reason = "prefer `map(..).unwrap_or_else(..)` for legibility" }, 8 | # { path = "std::result::Result::map_or", reason = "prefer `map(..).unwrap_or(..)` for legibility" }, 9 | # { path = "std::result::Result::map_or_else", reason = "prefer `map(..).unwrap_or_else(..)` for legibility" }, 10 | # { path = "std::iter::Iterator::for_each", reason = "prefer `for` for side-effects" }, 11 | # { path = "std::iter::Iterator::try_for_each", reason = "prefer `for` for side-effects" }, 12 | # ] 13 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 4 6 | insert_final_newline = true 7 | trim_trailing_whitespace = true 8 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | schedule: [ 3 | 'before 5am on the first day of the month', 4 | ], 5 | semanticCommits: 'enabled', 6 | commitMessageLowerCase: 'never', 7 | configMigration: true, 8 | dependencyDashboard: true, 9 | customManagers: [ 10 | { 11 | customType: 'regex', 12 | managerFilePatterns: [ 13 | '/^rust-toolchain\\.toml$/', 14 | '/Cargo.toml$/', 15 | '/clippy.toml$/', 16 | '/\\.clippy.toml$/', 17 | '/^\\.github/workflows/ci.yml$/', 18 | '/^\\.github/workflows/rust-next.yml$/', 19 | ], 20 | matchStrings: [ 21 | 'STABLE.*?(?\\d+\\.\\d+(\\.\\d+)?)', 22 | '(?\\d+\\.\\d+(\\.\\d+)?).*?STABLE', 23 | ], 24 | depNameTemplate: 'STABLE', 25 | packageNameTemplate: 'rust-lang/rust', 26 | datasourceTemplate: 'github-releases', 27 | }, 28 | ], 29 | packageRules: [ 30 | { 31 | commitMessageTopic: 'Rust Stable', 32 | matchManagers: [ 33 | 'custom.regex', 34 | ], 35 | matchDepNames: [ 36 | 'STABLE', 37 | ], 38 | extractVersion: '^(?\\d+\\.\\d+)', // Drop the patch version 39 | schedule: [ 40 | '* * * * *', 41 | ], 42 | automerge: true, 43 | }, 44 | // Goals: 45 | // - Keep version reqs low, ignoring compatible normal/build dependencies 46 | // - Take advantage of latest dev-dependencies 47 | // - Rollup safe upgrades to reduce CI runner load 48 | // - Help keep number of versions down by always using latest breaking change 49 | // - Have lockfile and manifest in-sync 50 | { 51 | matchManagers: [ 52 | 'cargo', 53 | ], 54 | matchDepTypes: [ 55 | 'build-dependencies', 56 | 'dependencies', 57 | ], 58 | matchCurrentVersion: '>=0.1.0', 59 | matchUpdateTypes: [ 60 | 'patch', 61 | ], 62 | enabled: false, 63 | }, 64 | { 65 | matchManagers: [ 66 | 'cargo', 67 | ], 68 | matchDepTypes: [ 69 | 'build-dependencies', 70 | 'dependencies', 71 | ], 72 | matchCurrentVersion: '>=1.0.0', 73 | matchUpdateTypes: [ 74 | 'minor', 75 | 'patch', 76 | ], 77 | enabled: false, 78 | }, 79 | { 80 | matchManagers: [ 81 | 'cargo', 82 | ], 83 | matchDepTypes: [ 84 | 'dev-dependencies', 85 | ], 86 | matchCurrentVersion: '>=0.1.0', 87 | matchUpdateTypes: [ 88 | 'patch', 89 | ], 90 | automerge: true, 91 | groupName: 'compatible (dev)', 92 | }, 93 | { 94 | matchManagers: [ 95 | 'cargo', 96 | ], 97 | matchDepTypes: [ 98 | 'dev-dependencies', 99 | ], 100 | matchCurrentVersion: '>=1.0.0', 101 | matchUpdateTypes: [ 102 | 'minor', 103 | 'patch', 104 | ], 105 | automerge: true, 106 | groupName: 'compatible (dev)', 107 | }, 108 | ], 109 | } 110 | -------------------------------------------------------------------------------- /.github/settings.yml: -------------------------------------------------------------------------------- 1 | # These settings are synced to GitHub by https://probot.github.io/apps/settings/ 2 | 3 | repository: 4 | description: "⚙️ Layered configuration system for Rust applications (with strong support for 12-factor applications). " 5 | homepage: "https://docs.rs/config/latest/config/" 6 | topics: "" 7 | has_issues: true 8 | has_projects: false 9 | has_wiki: false 10 | has_downloads: true 11 | default_branch: main 12 | 13 | # Preference: people do clean commits 14 | allow_merge_commit: true 15 | # Backup in case we need to clean up commits 16 | allow_squash_merge: true 17 | # Not really needed 18 | allow_rebase_merge: false 19 | 20 | allow_auto_merge: true 21 | delete_branch_on_merge: true 22 | 23 | squash_merge_commit_title: "PR_TITLE" 24 | squash_merge_commit_message: "PR_BODY" 25 | merge_commit_message: "PR_BODY" 26 | 27 | # labels: 28 | # # Type 29 | # - name: bug 30 | # color: '#b60205' 31 | # description: "Not as expected" 32 | # - name: enhancement 33 | # color: '#1d76db' 34 | # description: "Improve the expected" 35 | # # Flavor 36 | # - name: question 37 | # color: "#cc317c" 38 | # description: "Uncertainty is involved" 39 | # - name: breaking-change 40 | # color: "#e99695" 41 | # - name: good first issue 42 | # color: '#c2e0c6' 43 | # description: "Help wanted!" 44 | 45 | # This serves more as documentation. 46 | # Branch protection API was replaced by rulesets but settings isn't updated. 47 | # See https://github.com/repository-settings/app/issues/825 48 | # 49 | # branches: 50 | # - name: main 51 | # protection: 52 | # required_pull_request_reviews: null 53 | # required_conversation_resolution: true 54 | # required_status_checks: 55 | # # Required. Require branches to be up to date before merging. 56 | # strict: false 57 | # contexts: ["CI", "Spell Check with Typos"] 58 | # enforce_admins: false 59 | # restrictions: null 60 | -------------------------------------------------------------------------------- /.github/workflows/audit.yml: -------------------------------------------------------------------------------- 1 | name: Security audit 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | pull_request: 8 | paths: 9 | - '**/Cargo.toml' 10 | - '**/Cargo.lock' 11 | push: 12 | branches: 13 | - main 14 | 15 | env: 16 | RUST_BACKTRACE: 1 17 | CARGO_TERM_COLOR: always 18 | CLICOLOR: 1 19 | 20 | concurrency: 21 | group: "${{ github.workflow }}-${{ github.ref }}" 22 | cancel-in-progress: true 23 | 24 | jobs: 25 | security_audit: 26 | permissions: 27 | issues: write # to create issues (actions-rs/audit-check) 28 | checks: write # to create check (actions-rs/audit-check) 29 | runs-on: ubuntu-latest 30 | # Prevent sudden announcement of a new advisory from failing ci: 31 | continue-on-error: true 32 | steps: 33 | - name: Checkout repository 34 | uses: actions/checkout@v4 35 | - uses: actions-rs/audit-check@v1 36 | with: 37 | token: ${{ secrets.GITHUB_TOKEN }} 38 | 39 | cargo_deny: 40 | permissions: 41 | issues: write # to create issues (actions-rs/audit-check) 42 | checks: write # to create check (actions-rs/audit-check) 43 | runs-on: ubuntu-latest 44 | strategy: 45 | matrix: 46 | checks: 47 | - bans licenses sources 48 | steps: 49 | - uses: actions/checkout@v4 50 | - uses: EmbarkStudios/cargo-deny-action@v2 51 | with: 52 | command: check ${{ matrix.checks }} 53 | rust-version: stable 54 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | pull_request: 8 | push: 9 | branches: 10 | - main 11 | 12 | env: 13 | RUST_BACKTRACE: 1 14 | CARGO_TERM_COLOR: always 15 | CLICOLOR: 1 16 | 17 | concurrency: 18 | group: "${{ github.workflow }}-${{ github.ref }}" 19 | cancel-in-progress: true 20 | 21 | jobs: 22 | ci: 23 | permissions: 24 | contents: none 25 | name: CI 26 | needs: [test, msrv, lockfile, docs, rustfmt, clippy, minimal-versions] 27 | runs-on: ubuntu-latest 28 | if: "always()" 29 | steps: 30 | - name: Failed 31 | run: exit 1 32 | if: "contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped')" 33 | test: 34 | name: Test 35 | strategy: 36 | matrix: 37 | os: ["ubuntu-latest", "windows-latest", "macos-latest"] 38 | rust: ["stable"] 39 | continue-on-error: ${{ matrix.rust != 'stable' }} 40 | runs-on: ${{ matrix.os }} 41 | steps: 42 | - name: Checkout repository 43 | uses: actions/checkout@v4 44 | - name: Install Rust 45 | uses: dtolnay/rust-toolchain@stable 46 | with: 47 | toolchain: ${{ matrix.rust }} 48 | - uses: Swatinem/rust-cache@v2 49 | - uses: taiki-e/install-action@cargo-hack 50 | - name: Build 51 | run: cargo test --workspace --no-run 52 | - name: Test 53 | run: cargo hack test --each-feature --workspace 54 | msrv: 55 | name: "Check MSRV" 56 | runs-on: ubuntu-latest 57 | steps: 58 | - name: Checkout repository 59 | uses: actions/checkout@v4 60 | - name: Install Rust 61 | uses: dtolnay/rust-toolchain@stable 62 | with: 63 | toolchain: stable 64 | - uses: Swatinem/rust-cache@v2 65 | - uses: taiki-e/install-action@cargo-hack 66 | - name: Default features 67 | run: cargo hack check --each-feature --locked --rust-version --ignore-private --workspace --all-targets --keep-going 68 | minimal-versions: 69 | name: Minimal versions 70 | runs-on: ubuntu-latest 71 | steps: 72 | - name: Checkout repository 73 | uses: actions/checkout@v4 74 | - name: Install stable Rust 75 | uses: dtolnay/rust-toolchain@stable 76 | with: 77 | toolchain: stable 78 | - name: Install nightly Rust 79 | uses: dtolnay/rust-toolchain@stable 80 | with: 81 | toolchain: nightly 82 | - name: Downgrade dependencies to minimal versions 83 | run: cargo +nightly generate-lockfile -Z minimal-versions 84 | - name: Hack around bad deps 85 | run: cargo update --recursive json5 86 | - name: Compile with minimal versions 87 | run: cargo +stable check --workspace --all-features --locked --keep-going 88 | lockfile: 89 | runs-on: ubuntu-latest 90 | steps: 91 | - name: Checkout repository 92 | uses: actions/checkout@v4 93 | - name: Install Rust 94 | uses: dtolnay/rust-toolchain@stable 95 | with: 96 | toolchain: stable 97 | - uses: Swatinem/rust-cache@v2 98 | - name: "Is lockfile updated?" 99 | run: cargo update --workspace --locked 100 | docs: 101 | name: Docs 102 | runs-on: ubuntu-latest 103 | steps: 104 | - name: Checkout repository 105 | uses: actions/checkout@v4 106 | - name: Install Rust 107 | uses: dtolnay/rust-toolchain@stable 108 | with: 109 | toolchain: "1.87" # STABLE 110 | - uses: Swatinem/rust-cache@v2 111 | - name: Check documentation 112 | env: 113 | RUSTDOCFLAGS: -D warnings 114 | run: cargo doc --workspace --all-features --no-deps --document-private-items --keep-going 115 | rustfmt: 116 | name: rustfmt 117 | runs-on: ubuntu-latest 118 | steps: 119 | - name: Checkout repository 120 | uses: actions/checkout@v4 121 | - name: Install Rust 122 | uses: dtolnay/rust-toolchain@stable 123 | with: 124 | toolchain: "1.87" # STABLE 125 | components: rustfmt 126 | - uses: Swatinem/rust-cache@v2 127 | - name: Check formatting 128 | run: cargo fmt --all -- --check 129 | clippy: 130 | name: clippy 131 | runs-on: ubuntu-latest 132 | permissions: 133 | security-events: write # to upload sarif results 134 | steps: 135 | - name: Checkout repository 136 | uses: actions/checkout@v4 137 | - name: Install Rust 138 | uses: dtolnay/rust-toolchain@stable 139 | with: 140 | toolchain: "1.87" # STABLE 141 | components: clippy 142 | - uses: Swatinem/rust-cache@v2 143 | - name: Install SARIF tools 144 | run: cargo install clippy-sarif --locked 145 | - name: Install SARIF tools 146 | run: cargo install sarif-fmt --locked 147 | - name: Check 148 | run: > 149 | cargo clippy --workspace --all-features --all-targets --message-format=json 150 | | clippy-sarif 151 | | tee clippy-results.sarif 152 | | sarif-fmt 153 | continue-on-error: true 154 | - name: Upload 155 | uses: github/codeql-action/upload-sarif@v3 156 | with: 157 | sarif_file: clippy-results.sarif 158 | wait-for-processing: true 159 | - name: Report status 160 | run: cargo clippy --workspace --all-features --all-targets --keep-going -- -D warnings --allow deprecated 161 | coverage: 162 | name: Coverage 163 | runs-on: ubuntu-latest 164 | steps: 165 | - name: Checkout repository 166 | uses: actions/checkout@v4 167 | - name: Install Rust 168 | uses: dtolnay/rust-toolchain@stable 169 | with: 170 | toolchain: stable 171 | - uses: Swatinem/rust-cache@v2 172 | - name: Install cargo-tarpaulin 173 | run: cargo install cargo-tarpaulin 174 | - name: Gather coverage 175 | run: cargo tarpaulin --output-dir coverage --out lcov 176 | - name: Publish to Coveralls 177 | uses: coverallsapp/github-action@master 178 | with: 179 | github-token: ${{ secrets.GITHUB_TOKEN }} 180 | -------------------------------------------------------------------------------- /.github/workflows/committed.yml: -------------------------------------------------------------------------------- 1 | # Not run as part of pre-commit checks because they don't handle sending the correct commit 2 | # range to `committed` 3 | name: Lint Commits 4 | on: [pull_request] 5 | 6 | permissions: 7 | contents: read 8 | 9 | env: 10 | RUST_BACKTRACE: 1 11 | CARGO_TERM_COLOR: always 12 | CLICOLOR: 1 13 | 14 | concurrency: 15 | group: "${{ github.workflow }}-${{ github.ref }}" 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | committed: 20 | name: Lint Commits 21 | runs-on: ubuntu-latest 22 | steps: 23 | - name: Checkout Actions Repository 24 | uses: actions/checkout@v4 25 | with: 26 | fetch-depth: 0 27 | - name: Lint Commits 28 | uses: crate-ci/committed@master 29 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: pre-commit 2 | 3 | permissions: {} # none 4 | 5 | on: 6 | pull_request: 7 | push: 8 | branches: [main] 9 | 10 | env: 11 | RUST_BACKTRACE: 1 12 | CARGO_TERM_COLOR: always 13 | CLICOLOR: 1 14 | 15 | concurrency: 16 | group: "${{ github.workflow }}-${{ github.ref }}" 17 | cancel-in-progress: true 18 | 19 | jobs: 20 | pre-commit: 21 | permissions: 22 | contents: read 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v4 26 | - uses: actions/setup-python@v5 27 | with: 28 | python-version: '3.x' 29 | - uses: pre-commit/action@v3.0.1 30 | -------------------------------------------------------------------------------- /.github/workflows/rust-next.yml: -------------------------------------------------------------------------------- 1 | name: rust-next 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | schedule: 8 | - cron: '1 1 1 * *' 9 | 10 | env: 11 | RUST_BACKTRACE: 1 12 | CARGO_TERM_COLOR: always 13 | CLICOLOR: 1 14 | 15 | concurrency: 16 | group: "${{ github.workflow }}-${{ github.ref }}" 17 | cancel-in-progress: true 18 | 19 | jobs: 20 | test: 21 | name: Test 22 | strategy: 23 | matrix: 24 | os: ["ubuntu-latest", "windows-latest", "macos-latest"] 25 | rust: ["stable", "beta"] 26 | include: 27 | - os: ubuntu-latest 28 | rust: "nightly" 29 | continue-on-error: ${{ matrix.rust != 'stable' }} 30 | runs-on: ${{ matrix.os }} 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v4 34 | - name: Install Rust 35 | uses: dtolnay/rust-toolchain@stable 36 | with: 37 | toolchain: ${{ matrix.rust }} 38 | - uses: Swatinem/rust-cache@v2 39 | - uses: taiki-e/install-action@cargo-hack 40 | - name: Build 41 | run: cargo test --workspace --no-run 42 | - name: Test 43 | run: cargo hack test --each-feature --workspace 44 | latest: 45 | name: "Check latest dependencies" 46 | runs-on: ubuntu-latest 47 | steps: 48 | - name: Checkout repository 49 | uses: actions/checkout@v4 50 | - name: Install Rust 51 | uses: dtolnay/rust-toolchain@stable 52 | with: 53 | toolchain: stable 54 | - uses: Swatinem/rust-cache@v2 55 | - uses: taiki-e/install-action@cargo-hack 56 | - name: Update dependencies 57 | run: cargo update 58 | - name: Build 59 | run: cargo test --workspace --no-run 60 | - name: Test 61 | run: cargo hack test --each-feature --workspace 62 | -------------------------------------------------------------------------------- /.github/workflows/spelling.yml: -------------------------------------------------------------------------------- 1 | name: Spelling 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: [pull_request] 7 | 8 | env: 9 | RUST_BACKTRACE: 1 10 | CARGO_TERM_COLOR: always 11 | CLICOLOR: 1 12 | 13 | concurrency: 14 | group: "${{ github.workflow }}-${{ github.ref }}" 15 | cancel-in-progress: true 16 | 17 | jobs: 18 | spelling: 19 | name: Spell Check with Typos 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: Checkout Actions Repository 23 | uses: actions/checkout@v4 24 | - name: Spell Check Repo 25 | uses: crate-ci/typos@master 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | -------------------------------------------------------------------------------- /.gitlint: -------------------------------------------------------------------------------- 1 | # Edit this file as you like. 2 | # 3 | # All these sections are optional. Each section with the exception of [general] represents 4 | # one rule and each key in it is an option for that specific rule. 5 | # 6 | # Rules and sections can be referenced by their full name or by id. For example 7 | # section "[body-max-line-length]" could also be written as "[B1]". Full section names are 8 | # used in here for clarity. 9 | # 10 | [general] 11 | # Ignore certain rules, this example uses both full name and id 12 | ignore=body-is-missing 13 | 14 | # verbosity should be a value between 1 and 3, the commandline -v flags take precedence over this 15 | # verbosity = 2 16 | 17 | # By default gitlint will ignore merge, revert, fixup and squash commits. 18 | ignore-merge-commits=true 19 | # ignore-revert-commits=true 20 | # ignore-fixup-commits=true 21 | # ignore-squash-commits=true 22 | 23 | # Ignore any data send to gitlint via stdin 24 | # ignore-stdin=true 25 | 26 | # Fetch additional meta-data from the local repository when manually passing a 27 | # commit message to gitlint via stdin or --commit-msg. Disabled by default. 28 | # staged=true 29 | 30 | # Hard fail when the target commit range is empty. Note that gitlint will 31 | # already fail by default on invalid commit ranges. This option is specifically 32 | # to tell gitlint to fail on *valid but empty* commit ranges. 33 | # Disabled by default. 34 | # fail-without-commits=true 35 | 36 | # Enable debug mode (prints more output). Disabled by default. 37 | # debug=true 38 | 39 | # Enable community contributed rules 40 | # See http://jorisroovers.github.io/gitlint/contrib_rules for details 41 | contrib=CC1, CC2 42 | 43 | # Set the extra-path where gitlint will search for user defined rules 44 | # See http://jorisroovers.github.io/gitlint/user_defined_rules for details 45 | # extra-path=examples/ 46 | 47 | # This is an example of how to configure the "title-max-length" rule and 48 | # set the line-length it enforces to 50 49 | # [title-max-length] 50 | # line-length=50 51 | 52 | # Conversely, you can also enforce minimal length of a title with the 53 | # "title-min-length" rule: 54 | # [title-min-length] 55 | # min-length=5 56 | 57 | # [title-must-not-contain-word] 58 | # Comma-separated list of words that should not occur in the title. Matching is case 59 | # insensitive. It's fine if the keyword occurs as part of a larger word (so "WIPING" 60 | # will not cause a violation, but "WIP: my title" will. 61 | # words=wip 62 | 63 | # [title-match-regex] 64 | # python-style regex that the commit-msg title must match 65 | # Note that the regex can contradict with other rules if not used correctly 66 | # (e.g. title-must-not-contain-word). 67 | # regex=^US[0-9]* 68 | 69 | # [body-max-line-length] 70 | # line-length=72 71 | 72 | # [body-min-length] 73 | # min-length=5 74 | 75 | # [body-is-missing] 76 | # Whether to ignore this rule on merge commits (which typically only have a title) 77 | # default = True 78 | # ignore-merge-commits=false 79 | 80 | # [body-changed-file-mention] 81 | # List of files that need to be explicitly mentioned in the body when they are changed 82 | # This is useful for when developers often erroneously edit certain files or git submodules. 83 | # By specifying this rule, developers can only change the file when they explicitly reference 84 | # it in the commit message. 85 | # files=gitlint-core/gitlint/rules.py,README.md 86 | 87 | # [body-match-regex] 88 | # python-style regex that the commit-msg body must match. 89 | # E.g. body must end in My-Commit-Tag: foo 90 | # regex=My-Commit-Tag: foo$ 91 | 92 | # [author-valid-email] 93 | # python-style regex that the commit author email address must match. 94 | # For example, use the following regex if you only want to allow email addresses from foo.com 95 | # regex=[^@]+@foo.com 96 | 97 | # [ignore-by-title] 98 | # Ignore certain rules for commits of which the title matches a regex 99 | # E.g. Match commit titles that start with "Release" 100 | # regex=^Release(.*) 101 | 102 | # Ignore certain rules, you can reference them by their id or by their full name 103 | # Use 'all' to ignore all rules 104 | # ignore=T1,body-min-length 105 | 106 | # [ignore-by-body] 107 | # Ignore certain rules for commits of which the body has a line that matches a regex 108 | # E.g. Match bodies that have a line that that contain "release" 109 | # regex=(.*)release(.*) 110 | # 111 | # Ignore certain rules, you can reference them by their id or by their full name 112 | # Use 'all' to ignore all rules 113 | # ignore=T1,body-min-length 114 | 115 | # [ignore-body-lines] 116 | # Ignore certain lines in a commit body that match a regex. 117 | # E.g. Ignore all lines that start with 'Co-Authored-By' 118 | # regex=^Co-Authored-By 119 | 120 | [ignore-by-author-name] 121 | # Ignore certain rules for commits of which the author name matches a regex 122 | # E.g. Match commits made by dependabot 123 | regex=(.*)dependabot(.*) 124 | # 125 | # Ignore certain rules, you can reference them by their id or by their full name 126 | # Use 'all' to ignore all rules 127 | # ignore=T1,body-min-length 128 | 129 | # This is a contrib rule - a community contributed rule. These are disabled by default. 130 | # You need to explicitly enable them one-by-one by adding them to the "contrib" option 131 | # under [general] section above. 132 | # [contrib-title-conventional-commits] 133 | # Specify allowed commit types. For details see: https://www.conventionalcommits.org/ 134 | # types = bugfix,user-story,epic 135 | [contrib-body-requires-signed-off-by] 136 | 137 | [contrib-disallow-cleanup-commits] 138 | 139 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: | 2 | (?x)^( 3 | tests/.*| 4 | examples/.* 5 | )$ 6 | default_install_hook_types: ["pre-commit", "commit-msg"] 7 | repos: 8 | - repo: https://github.com/pre-commit/pre-commit-hooks 9 | rev: v5.0.0 10 | hooks: 11 | - id: check-yaml 12 | - id: check-json 13 | - id: check-toml 14 | - id: check-merge-conflict 15 | - id: check-case-conflict 16 | - id: detect-private-key 17 | - repo: https://github.com/crate-ci/typos 18 | rev: v1.32.0 19 | hooks: 20 | - id: typos 21 | - repo: https://github.com/crate-ci/committed 22 | rev: v1.1.7 23 | hooks: 24 | - id: committed 25 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to config-rs 2 | 3 | Thanks for wanting to contribute! There are many ways to contribute and we 4 | appreciate any level you're willing to do. 5 | 6 | ## Feature Requests 7 | 8 | Need some new functionality to help? You can let us know by opening an 9 | [issue][new issue]. It's helpful to look through [all issues][all issues] in 10 | case it's already being talked about. 11 | 12 | ## Bug Reports 13 | 14 | Please let us know about what problems you run into, whether in behavior or 15 | ergonomics of API. You can do this by opening an [issue][new issue]. It's 16 | helpful to look through [all issues][all issues] in case it's already being 17 | talked about. 18 | 19 | ## Pull Requests 20 | 21 | Looking for an idea? Check our [issues][issues]. If the issue looks open ended, 22 | it is probably best to post on the issue how you are thinking of resolving the 23 | issue so you can get feedback early in the process. We want you to be 24 | successful and it can be discouraging to find out a lot of re-work is needed. 25 | 26 | Already have an idea? It might be good to first [create an issue][new issue] 27 | to propose it so we can make sure we are aligned and lower the risk of having 28 | to re-work some of it and the discouragement that goes along with that. 29 | 30 | ### Process 31 | 32 | As a heads up, we'll be running your PR through the following gauntlet: 33 | - warnings turned to compile errors 34 | - `cargo test` 35 | - `rustfmt` 36 | - `clippy` 37 | - `rustdoc` 38 | - [`committed`](https://github.com/crate-ci/committed) as we use [Conventional](https://www.conventionalcommits.org) commit style 39 | - [`typos`](https://github.com/crate-ci/typos) to check spelling 40 | 41 | Not everything can be checked automatically though. 42 | 43 | We request that the commit history gets cleaned up. 44 | 45 | We ask that commits are atomic, meaning they are complete and have a single responsibility. 46 | A complete commit should build, pass tests, update documentation and tests, and not have dead code. 47 | 48 | PRs should tell a cohesive story, with refactor and test commits that keep the 49 | fix or feature commits simple and clear. 50 | 51 | Specifically, we would encourage 52 | - File renames be isolated into their own commit 53 | - Add tests in a commit before their feature or fix, showing the current behavior (i.e. they should pass). 54 | The diff for the feature/fix commit will then show how the behavior changed, 55 | making the commit's intent clearer to reviewers and the community, and showing people that the 56 | test is verifying the expected state. 57 | - e.g. [clap#5520](https://github.com/clap-rs/clap/pull/5520) 58 | 59 | Note that we are talking about ideals. 60 | We understand having a clean history requires more advanced git skills; 61 | feel free to ask us for help! 62 | We might even suggest where it would work to be lax. 63 | We also understand that editing some early commits may cause a lot of churn 64 | with merge conflicts which can make it not worth editing all of the history. 65 | 66 | For code organization, we recommend 67 | - Grouping `impl` blocks next to their type (or trait) 68 | - Grouping private items after the `pub` item that uses them. 69 | - The intent is to help people quickly find the "relevant" details, allowing them to "dig deeper" as needed. Or put another way, the `pub` items serve as a table-of-contents. 70 | - The exact order is fuzzy; do what makes sense 71 | 72 | ## Releasing 73 | 74 | Pre-requisites 75 | - Running `cargo login` 76 | - A member of `rust-cli:Maintainers` 77 | - Push permission to the repo 78 | - [`cargo-release`](https://github.com/crate-ci/cargo-release/) 79 | 80 | When we're ready to release, a project owner should do the following 81 | 1. Update the changelog (see `cargo release changes` for ideas) 82 | 2. Determine what the next version is, according to semver 83 | 3. Run [`cargo release -x `](https://github.com/crate-ci/cargo-release) 84 | 85 | [issues]: https://github.com/rust-cli/config-rs/issues 86 | [new issue]: https://github.com/rust-cli/config-rs/issues/new 87 | [all issues]: https://github.com/rust-cli/config-rs/issues?utf8=%E2%9C%93&q=is%3Aissue 88 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "2" 3 | 4 | [workspace.package] 5 | repository = "https://github.com/rust-cli/config-rs" 6 | license = "MIT OR Apache-2.0" 7 | edition = "2018" 8 | rust-version = "1.75.0" # MSRV 9 | include = [ 10 | "build.rs", 11 | "src/**/*", 12 | "Cargo.toml", 13 | "Cargo.lock", 14 | "LICENSE*", 15 | "README.md", 16 | "examples/**/*" 17 | ] 18 | 19 | [workspace.lints.rust] 20 | rust_2018_idioms = { level = "warn", priority = -1 } 21 | unnameable_types = "warn" 22 | unreachable_pub = "warn" 23 | unsafe_op_in_unsafe_fn = "warn" 24 | unused_lifetimes = "warn" 25 | unused_macro_rules = "warn" 26 | unused_qualifications = "warn" 27 | 28 | [workspace.lints.clippy] 29 | bool_assert_comparison = "allow" 30 | branches_sharing_code = "allow" 31 | checked_conversions = "warn" 32 | collapsible_else_if = "allow" 33 | create_dir = "warn" 34 | dbg_macro = "warn" 35 | debug_assert_with_mut_call = "warn" 36 | doc_markdown = "warn" 37 | empty_enum = "warn" 38 | enum_glob_use = "warn" 39 | expl_impl_clone_on_copy = "warn" 40 | explicit_deref_methods = "warn" 41 | explicit_into_iter_loop = "warn" 42 | fallible_impl_from = "warn" 43 | filter_map_next = "warn" 44 | flat_map_option = "warn" 45 | float_cmp_const = "warn" 46 | fn_params_excessive_bools = "warn" 47 | from_iter_instead_of_collect = "warn" 48 | if_same_then_else = "allow" 49 | implicit_clone = "warn" 50 | imprecise_flops = "warn" 51 | inconsistent_struct_constructor = "warn" 52 | inefficient_to_string = "warn" 53 | infinite_loop = "warn" 54 | invalid_upcast_comparisons = "warn" 55 | large_digit_groups = "warn" 56 | large_stack_arrays = "warn" 57 | large_types_passed_by_value = "warn" 58 | let_and_return = "allow" # sometimes good to name what you are returning 59 | linkedlist = "warn" 60 | lossy_float_literal = "warn" 61 | macro_use_imports = "warn" 62 | mem_forget = "warn" 63 | mutex_integer = "warn" 64 | needless_continue = "allow" 65 | needless_for_each = "warn" 66 | negative_feature_names = "warn" 67 | path_buf_push_overwrite = "warn" 68 | ptr_as_ptr = "warn" 69 | rc_mutex = "warn" 70 | redundant_feature_names = "warn" 71 | ref_option_ref = "warn" 72 | rest_pat_in_fully_bound_structs = "warn" 73 | result_large_err = "allow" 74 | same_functions_in_if_condition = "warn" 75 | self_named_module_files = "warn" 76 | semicolon_if_nothing_returned = "warn" 77 | str_to_string = "warn" 78 | string_add = "warn" 79 | string_add_assign = "warn" 80 | string_lit_as_bytes = "warn" 81 | string_to_string = "warn" 82 | todo = "warn" 83 | trait_duplication_in_bounds = "warn" 84 | uninlined_format_args = "warn" 85 | verbose_file_reads = "warn" 86 | wildcard_imports = "warn" 87 | zero_sized_map_values = "warn" 88 | 89 | [profile.dev] 90 | panic = "abort" 91 | 92 | [profile.release] 93 | panic = "abort" 94 | codegen-units = 1 95 | lto = true 96 | # debug = "line-tables-only" # requires Cargo 1.71 97 | 98 | [package] 99 | name = "config" 100 | version = "0.15.11" 101 | description = "Layered configuration system for Rust applications." 102 | categories = ["config"] 103 | keywords = ["config", "configuration", "settings", "env", "environment"] 104 | repository.workspace = true 105 | license.workspace = true 106 | edition.workspace = true 107 | rust-version.workspace = true 108 | include.workspace = true 109 | 110 | [package.metadata.docs.rs] 111 | all-features = true 112 | rustdoc-args = ["--cfg", "docsrs", "--generate-link-to-definition"] 113 | 114 | [package.metadata.release] 115 | pre-release-replacements = [ 116 | {file="CHANGELOG.md", search="Unreleased", replace="{{version}}", min=1}, 117 | {file="CHANGELOG.md", search="\\.\\.\\.HEAD", replace="...{{tag_name}}", exactly=1}, 118 | {file="CHANGELOG.md", search="ReleaseDate", replace="{{date}}", min=1}, 119 | {file="CHANGELOG.md", search="", replace="\n## [Unreleased] - ReleaseDate\n", exactly=1}, 120 | {file="CHANGELOG.md", search="", replace="\n[Unreleased]: https://github.com/rust-cli/config-rs/compare/{{tag_name}}...HEAD", exactly=1}, 121 | ] 122 | 123 | [features] 124 | default = ["toml", "json", "yaml", "ini", "ron", "json5", "convert-case", "async"] 125 | json = ["serde_json"] 126 | yaml = ["yaml-rust2"] 127 | ini = ["rust-ini"] 128 | json5 = ["json5_rs", "serde/derive"] 129 | convert-case = ["convert_case"] 130 | preserve_order = ["indexmap", "toml?/preserve_order", "serde_json?/preserve_order", "ron?/indexmap"] 131 | async = ["async-trait"] 132 | toml = ["dep:toml"] 133 | 134 | [dependencies] 135 | serde = "1.0" 136 | 137 | async-trait = { version = "0.1", optional = true } 138 | toml = { version = "0.8", optional = true, default-features = false, features = ["parse"] } 139 | serde_json = { version = "1.0", optional = true } 140 | yaml-rust2 = { version = "0.10", optional = true } 141 | rust-ini = { version = "0.21", optional = true } 142 | ron = { version = "0.8", optional = true } 143 | json5_rs = { version = "0.4", optional = true, package = "json5" } 144 | indexmap = { version = "2.2", features = ["serde"], optional = true } 145 | convert_case = { version = "0.6", optional = true } 146 | pathdiff = "0.2" 147 | winnow = "0.7.0" 148 | 149 | [dev-dependencies] 150 | serde_derive = "1.0" 151 | float-cmp = "0.10" 152 | chrono = { version = "0.4", features = ["serde"] } 153 | tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "time"]} 154 | warp = "0.3" 155 | futures = "0.3" 156 | reqwest = "0.12" 157 | 158 | glob = "0.3" 159 | notify = "7.0" 160 | temp-env = "0.3" 161 | log = { version = "0.4", features = ["serde"] } 162 | snapbox = "0.6.21" 163 | 164 | [[example]] 165 | name = "async_source" 166 | required-features = ["json", "async"] 167 | 168 | [lints] 169 | workspace = true 170 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) Individual contributors 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # config-rs 2 | 3 | ![Rust](https://img.shields.io/badge/rust-stable-brightgreen.svg) 4 | [![Build Status](https://travis-ci.org/mehcode/config-rs.svg?branch=master)](https://travis-ci.org/mehcode/config-rs) 5 | [![Crates.io](https://img.shields.io/crates/d/config.svg)](https://crates.io/crates/config) 6 | [![Docs.rs](https://docs.rs/config/badge.svg)](https://docs.rs/config) 7 | 8 | > Layered configuration system for Rust applications (with strong support for [12-factor] applications). 9 | 10 | [12-factor]: https://12factor.net/config 11 | 12 | - Set defaults 13 | - Set explicit values (to programmatically override) 14 | - Read from [JSON], [TOML], [YAML], [INI], [RON], [JSON5] files 15 | - Read from environment 16 | - Loosely typed — Configuration values may be read in any supported type, as long as there exists a reasonable conversion 17 | - Access nested fields using a formatted path — Uses a subset of JSONPath; currently supports the child ( `redis.port` ) and subscript operators ( `databases[0].name` ) 18 | 19 | [JSON]: https://github.com/serde-rs/json 20 | [TOML]: https://github.com/toml-lang/toml 21 | [YAML]: https://github.com/Ethiraric/yaml-rust2 22 | [INI]: https://github.com/zonyitoo/rust-ini 23 | [RON]: https://github.com/ron-rs/ron 24 | [JSON5]: https://github.com/callum-oakley/json5-rs 25 | 26 | Please note that this library can not be used to write changed configuration 27 | values back to the configuration file(s)! 28 | 29 | ## Usage 30 | 31 | ### Feature flags 32 | 33 | - `ini` - Adds support for reading INI files 34 | - `json` - Adds support for reading JSON files 35 | - `yaml` - Adds support for reading YAML files 36 | - `toml` - Adds support for reading TOML files 37 | - `ron` - Adds support for reading RON files 38 | - `json5` - Adds support for reading JSON5 files 39 | 40 | ### Support for custom formats 41 | 42 | Library provides out of the box support for most renowned data formats such as JSON or Yaml. Nonetheless, it contains an extensibility point - a `Format` trait that, once implemented, allows seamless integration with library's APIs using custom, less popular or proprietary data formats. 43 | 44 | See [custom_file_format](https://github.com/mehcode/config-rs/tree/master/examples/custom_file_format) example for more information. 45 | 46 | ### More 47 | 48 | See the [documentation](https://docs.rs/config) or [examples](https://github.com/mehcode/config-rs/tree/master/examples) for 49 | more usage information. 50 | 51 | ## License 52 | 53 | Licensed under either of 54 | 55 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or ) 56 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or ) 57 | 58 | at your option. 59 | -------------------------------------------------------------------------------- /_typos.toml: -------------------------------------------------------------------------------- 1 | [files] 2 | extend-exclude = [ 3 | "/tests/", 4 | ] 5 | -------------------------------------------------------------------------------- /committed.toml: -------------------------------------------------------------------------------- 1 | style="conventional" 2 | ignore_author_re="(dependabot|renovate)" 3 | merge_commit = false 4 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | # Note that all fields that take a lint level have these possible values: 2 | # * deny - An error will be produced and the check will fail 3 | # * warn - A warning will be produced, but the check will not fail 4 | # * allow - No warning or error will be produced, though in some cases a note 5 | # will be 6 | 7 | # Root options 8 | 9 | # The graph table configures how the dependency graph is constructed and thus 10 | # which crates the checks are performed against 11 | [graph] 12 | # If 1 or more target triples (and optionally, target_features) are specified, 13 | # only the specified targets will be checked when running `cargo deny check`. 14 | # This means, if a particular package is only ever used as a target specific 15 | # dependency, such as, for example, the `nix` crate only being used via the 16 | # `target_family = "unix"` configuration, that only having windows targets in 17 | # this list would mean the nix crate, as well as any of its exclusive 18 | # dependencies not shared by any other crates, would be ignored, as the target 19 | # list here is effectively saying which targets you are building for. 20 | targets = [ 21 | # The triple can be any string, but only the target triples built in to 22 | # rustc (as of 1.40) can be checked against actual config expressions 23 | #"x86_64-unknown-linux-musl", 24 | # You can also specify which target_features you promise are enabled for a 25 | # particular target. target_features are currently not validated against 26 | # the actual valid features supported by the target architecture. 27 | #{ triple = "wasm32-unknown-unknown", features = ["atomics"] }, 28 | ] 29 | # When creating the dependency graph used as the source of truth when checks are 30 | # executed, this field can be used to prune crates from the graph, removing them 31 | # from the view of cargo-deny. This is an extremely heavy hammer, as if a crate 32 | # is pruned from the graph, all of its dependencies will also be pruned unless 33 | # they are connected to another crate in the graph that hasn't been pruned, 34 | # so it should be used with care. The identifiers are [Package ID Specifications] 35 | # (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) 36 | #exclude = [] 37 | # If true, metadata will be collected with `--all-features`. Note that this can't 38 | # be toggled off if true, if you want to conditionally enable `--all-features` it 39 | # is recommended to pass `--all-features` on the cmd line instead 40 | all-features = false 41 | # If true, metadata will be collected with `--no-default-features`. The same 42 | # caveat with `all-features` applies 43 | no-default-features = false 44 | # If set, these feature will be enabled when collecting metadata. If `--features` 45 | # is specified on the cmd line they will take precedence over this option. 46 | #features = [] 47 | 48 | # The output table provides options for how/if diagnostics are outputted 49 | [output] 50 | # When outputting inclusion graphs in diagnostics that include features, this 51 | # option can be used to specify the depth at which feature edges will be added. 52 | # This option is included since the graphs can be quite large and the addition 53 | # of features from the crate(s) to all of the graph roots can be far too verbose. 54 | # This option can be overridden via `--feature-depth` on the cmd line 55 | feature-depth = 1 56 | 57 | # This section is considered when running `cargo deny check advisories` 58 | # More documentation for the advisories section can be found here: 59 | # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html 60 | [advisories] 61 | # The path where the advisory databases are cloned/fetched into 62 | #db-path = "$CARGO_HOME/advisory-dbs" 63 | # The url(s) of the advisory databases to use 64 | #db-urls = ["https://github.com/rustsec/advisory-db"] 65 | # A list of advisory IDs to ignore. Note that ignored advisories will still 66 | # output a note when they are encountered. 67 | ignore = [ 68 | #"RUSTSEC-0000-0000", 69 | #{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, 70 | #"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish 71 | #{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, 72 | ] 73 | # If this is true, then cargo deny will use the git executable to fetch advisory database. 74 | # If this is false, then it uses a built-in git library. 75 | # Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. 76 | # See Git Authentication for more information about setting up git authentication. 77 | #git-fetch-with-cli = true 78 | 79 | # This section is considered when running `cargo deny check licenses` 80 | # More documentation for the licenses section can be found here: 81 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 82 | [licenses] 83 | # List of explicitly allowed licenses 84 | # See https://spdx.org/licenses/ for list of possible licenses 85 | # [possible values: any SPDX 3.11 short identifier (+ optional exception)]. 86 | allow = [ 87 | "MIT", 88 | "MIT-0", 89 | "Apache-2.0", 90 | "BSD-2-Clause", 91 | "BSD-3-Clause", 92 | "MPL-2.0", 93 | "Unicode-DFS-2016", 94 | "CC0-1.0", 95 | "ISC", 96 | "OpenSSL", 97 | "Zlib", 98 | ] 99 | # The confidence threshold for detecting a license from license text. 100 | # The higher the value, the more closely the license text must be to the 101 | # canonical license text of a valid SPDX license file. 102 | # [possible values: any between 0.0 and 1.0]. 103 | confidence-threshold = 0.8 104 | # Allow 1 or more licenses on a per-crate basis, so that particular licenses 105 | # aren't accepted for every possible crate as with the normal allow list 106 | exceptions = [ 107 | # Each entry is the crate and version constraint, and its specific allow 108 | # list 109 | #{ allow = ["Zlib"], crate = "adler32" }, 110 | ] 111 | 112 | # Some crates don't have (easily) machine readable licensing information, 113 | # adding a clarification entry for it allows you to manually specify the 114 | # licensing information 115 | [[licenses.clarify]] 116 | # The package spec the clarification applies to 117 | crate = "ring" 118 | # The SPDX expression for the license requirements of the crate 119 | expression = "MIT AND ISC AND OpenSSL" 120 | # One or more files in the crate's source used as the "source of truth" for 121 | # the license expression. If the contents match, the clarification will be used 122 | # when running the license check, otherwise the clarification will be ignored 123 | # and the crate will be checked normally, which may produce warnings or errors 124 | # depending on the rest of your configuration 125 | license-files = [ 126 | # Each entry is a crate relative path, and the (opaque) hash of its contents 127 | { path = "LICENSE", hash = 0xbd0eed23 } 128 | ] 129 | 130 | [licenses.private] 131 | # If true, ignores workspace crates that aren't published, or are only 132 | # published to private registries. 133 | # To see how to mark a crate as unpublished (to the official registry), 134 | # visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. 135 | ignore = true 136 | # One or more private registries that you might publish crates to, if a crate 137 | # is only published to private registries, and ignore is true, the crate will 138 | # not have its license(s) checked 139 | registries = [ 140 | #"https://sekretz.com/registry 141 | ] 142 | 143 | # This section is considered when running `cargo deny check bans`. 144 | # More documentation about the 'bans' section can be found here: 145 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 146 | [bans] 147 | # Lint level for when multiple versions of the same crate are detected 148 | multiple-versions = "warn" 149 | # Lint level for when a crate version requirement is `*` 150 | wildcards = "allow" 151 | # The graph highlighting used when creating dotgraphs for crates 152 | # with multiple versions 153 | # * lowest-version - The path to the lowest versioned duplicate is highlighted 154 | # * simplest-path - The path to the version with the fewest edges is highlighted 155 | # * all - Both lowest-version and simplest-path are used 156 | highlight = "all" 157 | # The default lint level for `default` features for crates that are members of 158 | # the workspace that is being checked. This can be overridden by allowing/denying 159 | # `default` on a crate-by-crate basis if desired. 160 | workspace-default-features = "allow" 161 | # The default lint level for `default` features for external crates that are not 162 | # members of the workspace. This can be overridden by allowing/denying `default` 163 | # on a crate-by-crate basis if desired. 164 | external-default-features = "allow" 165 | # List of crates that are allowed. Use with care! 166 | allow = [ 167 | #"ansi_term@0.11.0", 168 | #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, 169 | ] 170 | # List of crates to deny 171 | deny = [ 172 | #"ansi_term@0.11.0", 173 | #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, 174 | # Wrapper crates can optionally be specified to allow the crate when it 175 | # is a direct dependency of the otherwise banned crate 176 | #{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, 177 | ] 178 | 179 | # List of features to allow/deny 180 | # Each entry the name of a crate and a version range. If version is 181 | # not specified, all versions will be matched. 182 | #[[bans.features]] 183 | #crate = "reqwest" 184 | # Features to not allow 185 | #deny = ["json"] 186 | # Features to allow 187 | #allow = [ 188 | # "rustls", 189 | # "__rustls", 190 | # "__tls", 191 | # "hyper-rustls", 192 | # "rustls", 193 | # "rustls-pemfile", 194 | # "rustls-tls-webpki-roots", 195 | # "tokio-rustls", 196 | # "webpki-roots", 197 | #] 198 | # If true, the allowed features must exactly match the enabled feature set. If 199 | # this is set there is no point setting `deny` 200 | #exact = true 201 | 202 | # Certain crates/versions that will be skipped when doing duplicate detection. 203 | skip = [ 204 | #"ansi_term@0.11.0", 205 | #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, 206 | ] 207 | # Similarly to `skip` allows you to skip certain crates during duplicate 208 | # detection. Unlike skip, it also includes the entire tree of transitive 209 | # dependencies starting at the specified crate, up to a certain depth, which is 210 | # by default infinite. 211 | skip-tree = [ 212 | #"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies 213 | #{ crate = "ansi_term@0.11.0", depth = 20 }, 214 | ] 215 | 216 | # This section is considered when running `cargo deny check sources`. 217 | # More documentation about the 'sources' section can be found here: 218 | # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html 219 | [sources] 220 | # Lint level for what to happen when a crate from a crate registry that is not 221 | # in the allow list is encountered 222 | unknown-registry = "deny" 223 | # Lint level for what to happen when a crate from a git repository that is not 224 | # in the allow list is encountered 225 | unknown-git = "deny" 226 | # List of URLs for allowed crate registries. Defaults to the crates.io index 227 | # if not specified. If it is specified but empty, no registries are allowed. 228 | allow-registry = ["https://github.com/rust-lang/crates.io-index"] 229 | # List of URLs for allowed Git repositories 230 | allow-git = [] 231 | 232 | [sources.allow-org] 233 | # 1 or more github.com organizations to allow git sources for 234 | github = [] 235 | # 1 or more gitlab.com organizations to allow git sources for 236 | gitlab = [] 237 | # 1 or more bitbucket.org organizations to allow git sources for 238 | bitbucket = [] 239 | -------------------------------------------------------------------------------- /examples/async_source/main.rs: -------------------------------------------------------------------------------- 1 | use std::{error::Error, fmt::Debug}; 2 | 3 | use async_trait::async_trait; 4 | use config::{ 5 | builder::AsyncState, AsyncSource, ConfigBuilder, ConfigError, FileFormat, Format, Map, 6 | }; 7 | use futures::{select, FutureExt}; 8 | use warp::Filter; 9 | 10 | // Example below presents sample configuration server and client. 11 | // 12 | // Server serves simple configuration on HTTP endpoint. 13 | // Client consumes it using custom HTTP AsyncSource built on top of reqwest. 14 | 15 | #[tokio::main] 16 | async fn main() -> Result<(), Box> { 17 | select! { 18 | r = run_server().fuse() => r, 19 | r = run_client().fuse() => r 20 | } 21 | } 22 | 23 | async fn run_server() -> Result<(), Box> { 24 | let service = warp::path("configuration").map(|| r#"{ "value" : 123 }"#); 25 | 26 | println!("Running server on localhost:5001"); 27 | 28 | warp::serve(service).bind(([127, 0, 0, 1], 5001)).await; 29 | 30 | Ok(()) 31 | } 32 | 33 | async fn run_client() -> Result<(), Box> { 34 | // Good enough for an example to allow server to start 35 | tokio::time::sleep(tokio::time::Duration::from_secs(3)).await; 36 | 37 | let config = ConfigBuilder::::default() 38 | .add_async_source(HttpSource { 39 | uri: "http://localhost:5001/configuration".into(), 40 | format: FileFormat::Json, 41 | }) 42 | .build() 43 | .await?; 44 | 45 | println!("Config value is {}", config.get::("value")?); 46 | 47 | Ok(()) 48 | } 49 | 50 | // Actual implementation of AsyncSource can be found below 51 | 52 | #[derive(Debug)] 53 | struct HttpSource { 54 | uri: String, 55 | format: F, 56 | } 57 | 58 | #[async_trait] 59 | impl AsyncSource for HttpSource { 60 | async fn collect(&self) -> Result, ConfigError> { 61 | reqwest::get(&self.uri) 62 | .await 63 | .map_err(|e| ConfigError::Foreign(Box::new(e)))? // error conversion is possible from custom AsyncSource impls 64 | .text() 65 | .await 66 | .map_err(|e| ConfigError::Foreign(Box::new(e))) 67 | .and_then(|text| { 68 | self.format 69 | .parse(Some(&self.uri), &text) 70 | .map_err(ConfigError::Foreign) 71 | }) 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /examples/custom_file_format/files/private.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpQIBAAKCAQEA7P+5Ow3YfQJJ0W4DhwdJRUWi1cYOen7qQ+XPAtFOdbJcvIZe 3 | T+D+fEENDpkDM+lOE1KtpehW4JOZ13ePLM0phktEf9hT1aB0Zc5LXB3M4YuW+lAW 4 | iXF9moHWxa2DlpyGck7cSlVVKbdljP9AQOzMdTXi3JJUWlnqjeUSINnBqCW21nOh 5 | frUZZbqBKOx7/iEgjdAsR7K5WAyVIXgbIipCAgWWjP3ejUjrl20QpXu06dGQF8O8 6 | S7ztwLwtmdUJ5SBrhiub1Ocjr+DSUvIg8kOzUp9gQiMtV8RGTr4W0Bfr75ZwgAfC 7 | oDNZ+D9S/rlZX0eJJd5rMobiQRE8qBk2oxWcxwIDAQABAoIBAQCtP3MEvGZZW+bi 8 | de2WM7lYLkOOyi2jVkuiPshJYwBcAXrRRdiDxBHEezk0Rp6UwCQW9AWEloeLu9pm 9 | LDw5n/CO/06ftl/ydk0gbuGgARjYd9ZyPUF8T75lyCxcbS8YVmvh+8wFesO6rxpJ 10 | K/6od3Iu7KleXInVUo2oFKBf608pvp/80oSvCeNCK9vh64UUZnm2PfhzD47jYE+u 11 | QEM/Ceb4LZ/6jf3SqXi/PpZu/IfDqc7JaBkuGIh4Zv+EQuSh6MYvkkn/51PmqOvf 12 | KM+bCo8U6sGzvQkMJKDUXFPfTeKCaRFdYgYDm94CCGgaMtMUBt+lm2vG2tNuKH0b 13 | a/J+x1ZBAoGBAPelu0V2T0Wg8WKjI6nbIWRbuPImAp35WyPikchqmi3hpnP68VxG 14 | D9z0TNmfr5TAajKQ11SSReIEiwJPOwq1/5v0xmqYdhyWX2alAQq5xUAfJzMDN2Rg 15 | ftO4qMcNoVeH4wAMwXc1gdRHjqWNZrz381y3Z4K/VWOm+BbG7JrejeE/AoGBAPT+ 16 | DLc///zfBEA94m6/I/78jL/+SsLM7LflPByO7JNrsQTm6mo1DvluGYmE34TP8aTb 17 | dvt5KXb8gpsKS3Z9vD6FJTB0dNrSpTWEPKFTTp/VWTvwHuh8mF/r1KyngDW3IU3q 18 | 7mKkVHMrfnU23qYHODJDnS6WmL3X3tJJAUXDqlp5AoGAR/VlRBrLj/zjBvlGbJ2a 19 | x1GLnPkEe6iwHe5A1A59vGU7+6loJprJEzf9eKLY3w1GDmld2FokajdNuR8Sldsq 20 | acOnP+QLNeVP1UCO2/H86dPjjQQbPVR4pcabbDN+tTNr92C9eokWr3sXbO14c+JM 21 | WZ2FO02jXzBuGBg3Ogz/BvsCgYEA69lCfotTMam0mu+4c2r5CTkxeocgi6Xh4SsC 22 | km+ZGlabJJ/0XWhU0RUH6paK432YIF/SjEbY/x4Z0Y24lgp3VSyyX5JNCHeu6fUy 23 | tQ/Q6hfmfsgryR5hRj5vEAN0bsGsgyk+cqHGVtUxOUAoWWcr11+2CqqZwnD1pjT3 24 | z6SM8+kCgYEA3GPFdb/ILXwPSEFfHE5RGWa2jlns+xVvQTaymR6ZAtLPv2RkBKvw 25 | Hwy8maCmWgw0+U/f8nMUDPVYYa/5Tyk5UzEVhtbAXGYzyY+Nk4IBFZZ+8P95RJBL 26 | 8jqfXxr2ZpYf9mEgZI8v8Pr013R3Vqkpy+B8jlfpvxFdOwSzkY42ur4= 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /examples/custom_file_format/files/public.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7P+5Ow3YfQJJ0W4DhwdJ 3 | RUWi1cYOen7qQ+XPAtFOdbJcvIZeT+D+fEENDpkDM+lOE1KtpehW4JOZ13ePLM0p 4 | hktEf9hT1aB0Zc5LXB3M4YuW+lAWiXF9moHWxa2DlpyGck7cSlVVKbdljP9AQOzM 5 | dTXi3JJUWlnqjeUSINnBqCW21nOhfrUZZbqBKOx7/iEgjdAsR7K5WAyVIXgbIipC 6 | AgWWjP3ejUjrl20QpXu06dGQF8O8S7ztwLwtmdUJ5SBrhiub1Ocjr+DSUvIg8kOz 7 | Up9gQiMtV8RGTr4W0Bfr75ZwgAfCoDNZ+D9S/rlZX0eJJd5rMobiQRE8qBk2oxWc 8 | xwIDAQAB 9 | -----END PUBLIC KEY----- 10 | -------------------------------------------------------------------------------- /examples/custom_file_format/main.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Error, ErrorKind}; 2 | 3 | use config::{Config, File, FileStoredFormat, Format, Map, Value, ValueKind}; 4 | 5 | /// The private and public key sources will be read into their associated variable: 6 | #[derive(serde_derive::Deserialize, Clone, Debug)] 7 | pub struct Settings { 8 | pub private_key: Option, 9 | pub public_key: Option, 10 | } 11 | 12 | fn main() { 13 | // Sourcing from two separate files for the `Settings` struct,: 14 | let file_public_key = File::new("examples/custom_file_format/files/public.pem", PemFile); 15 | let file_private_key = File::new("examples/custom_file_format/files/private.pem", PemFile); 16 | 17 | // Provide the sources and build the config object: 18 | // Both are marked as optional to avoid failure if the file doesn't exist. 19 | let settings = Config::builder() 20 | .add_source(file_public_key.required(false)) 21 | .add_source(file_private_key.required(false)) 22 | .build() 23 | .unwrap(); 24 | 25 | // Deserialize the config object into your Settings struct: 26 | let settings: Settings = settings.try_deserialize().unwrap(); 27 | println!("{settings:#?}"); 28 | } 29 | 30 | #[derive(Debug, Clone)] 31 | pub struct PemFile; 32 | 33 | impl Format for PemFile { 34 | fn parse( 35 | &self, 36 | uri: Option<&String>, 37 | text: &str, 38 | ) -> Result, Box> { 39 | // Store any valid keys into this map, they'll be merged with other sources into the final config map: 40 | let mut result = Map::new(); 41 | 42 | // Identify the PEM encoded data type by the first occurrence found: 43 | // NOTE: This example is kept simple, multiple or other encoded types are not handled. 44 | let key_type = vec!["PUBLIC", "PRIVATE"] 45 | .into_iter() 46 | .find(|s| text.contains(s)); 47 | let key = match key_type { 48 | Some("PRIVATE") => "private_key", 49 | Some("PUBLIC") => "public_key", 50 | // Otherwise fail with an error message (the filename is implicitly appended): 51 | _ => { 52 | return Err(Box::new(Error::new( 53 | ErrorKind::InvalidData, 54 | "PEM file did not contain a Private or Public key", 55 | ))) 56 | } 57 | }; 58 | 59 | result.insert( 60 | key.to_owned(), 61 | Value::new(uri, ValueKind::String(text.into())), 62 | ); 63 | 64 | Ok(result) 65 | } 66 | } 67 | 68 | // A slice of extensions associated to this format, when an extension 69 | // is omitted from a file source, these will be tried implicitly: 70 | impl FileStoredFormat for PemFile { 71 | fn file_extensions(&self) -> &'static [&'static str] { 72 | &["pem"] 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /examples/custom_str_format/main.rs: -------------------------------------------------------------------------------- 1 | use config::{Config, File, FileStoredFormat, Format, Map, Value, ValueKind}; 2 | 3 | fn main() { 4 | let config = Config::builder() 5 | .add_source(File::from_str("bad", MyFormat)) 6 | .add_source(File::from_str("good", MyFormat)) 7 | .build(); 8 | 9 | match config { 10 | Ok(cfg) => println!("A config: {cfg:#?}"), 11 | Err(e) => println!("An error: {e}"), 12 | } 13 | } 14 | 15 | #[derive(Debug, Clone)] 16 | pub struct MyFormat; 17 | 18 | impl Format for MyFormat { 19 | fn parse( 20 | &self, 21 | uri: Option<&String>, 22 | text: &str, 23 | ) -> Result, Box> { 24 | // Let's assume our format is somewhat malformed, but this is fine 25 | // In real life anything can be used here - nom, serde or other. 26 | // 27 | // For some more real-life examples refer to format implementation within the library code 28 | let mut result = Map::new(); 29 | 30 | if text == "good" { 31 | result.insert( 32 | "key".to_owned(), 33 | Value::new(uri, ValueKind::String(text.into())), 34 | ); 35 | } else { 36 | println!("Something went wrong in {uri:?}"); 37 | } 38 | 39 | Ok(result) 40 | } 41 | } 42 | 43 | // As strange as it seems for config sourced from a string, legacy demands its sacrifice 44 | // It is only required for File source, custom sources can use Format without caring for extensions 45 | static MY_FORMAT_EXT: Vec<&'static str> = vec![]; 46 | impl FileStoredFormat for MyFormat { 47 | fn file_extensions(&self) -> &'static [&'static str] { 48 | &MY_FORMAT_EXT 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /examples/env-list/main.rs: -------------------------------------------------------------------------------- 1 | use config::Config; 2 | #[derive(Debug, Default, serde_derive::Deserialize, PartialEq, Eq)] 3 | struct AppConfig { 4 | list: Vec, 5 | } 6 | 7 | fn main() { 8 | std::env::set_var("APP_LIST", "Hello World"); 9 | 10 | let config = Config::builder() 11 | .add_source( 12 | config::Environment::with_prefix("APP") 13 | .try_parsing(true) 14 | .separator("_") 15 | .list_separator(" "), 16 | ) 17 | .build() 18 | .unwrap(); 19 | 20 | let app: AppConfig = config.try_deserialize().unwrap(); 21 | 22 | assert_eq!(app.list, vec![String::from("Hello"), String::from("World")]); 23 | 24 | std::env::remove_var("APP_LIST"); 25 | } 26 | -------------------------------------------------------------------------------- /examples/glob/conf/00-default.toml: -------------------------------------------------------------------------------- 1 | debug = false 2 | -------------------------------------------------------------------------------- /examples/glob/conf/05-some.yml: -------------------------------------------------------------------------------- 1 | secret: THIS IS SECRET 2 | debug: true 3 | -------------------------------------------------------------------------------- /examples/glob/conf/99-extra.json: -------------------------------------------------------------------------------- 1 | { 2 | "that": 3, 3 | "this": 1230, 4 | "key": "sdgnjklsdjklgds" 5 | } 6 | -------------------------------------------------------------------------------- /examples/glob/main.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::path::Path; 3 | 4 | use config::{Config, File}; 5 | use glob::glob; 6 | 7 | fn main() { 8 | // Option 1 9 | // -------- 10 | // Gather all conf files from conf/ manually 11 | let settings = Config::builder() 12 | // File::with_name(..) is shorthand for File::from(Path::new(..)) 13 | .add_source(File::with_name("examples/glob/conf/00-default.toml")) 14 | .add_source(File::from(Path::new("examples/glob/conf/05-some.yml"))) 15 | .add_source(File::from(Path::new("examples/glob/conf/99-extra.json"))) 16 | .build() 17 | .unwrap(); 18 | 19 | // Print out our settings (as a HashMap) 20 | println!( 21 | "\n{:?} \n\n-----------", 22 | settings 23 | .try_deserialize::>() 24 | .unwrap() 25 | ); 26 | 27 | // Option 2 28 | // -------- 29 | // Gather all conf files from conf/ manually, but put in 1 merge call. 30 | let settings = Config::builder() 31 | .add_source(vec![ 32 | File::with_name("examples/glob/conf/00-default.toml"), 33 | File::from(Path::new("examples/glob/conf/05-some.yml")), 34 | File::from(Path::new("examples/glob/conf/99-extra.json")), 35 | ]) 36 | .build() 37 | .unwrap(); 38 | 39 | // Print out our settings (as a HashMap) 40 | println!( 41 | "\n{:?} \n\n-----------", 42 | settings 43 | .try_deserialize::>() 44 | .unwrap() 45 | ); 46 | 47 | // Option 3 48 | // -------- 49 | // Gather all conf files from conf/ using glob and put in 1 merge call. 50 | let settings = Config::builder() 51 | .add_source( 52 | glob("examples/glob/conf/*") 53 | .unwrap() 54 | .map(|path| File::from(path.unwrap())) 55 | .collect::>(), 56 | ) 57 | .build() 58 | .unwrap(); 59 | 60 | // Print out our settings (as a HashMap) 61 | println!( 62 | "\n{:?} \n\n-----------", 63 | settings 64 | .try_deserialize::>() 65 | .unwrap() 66 | ); 67 | } 68 | -------------------------------------------------------------------------------- /examples/hierarchical-env/config/default.toml: -------------------------------------------------------------------------------- 1 | [database] 2 | url = "postgres://postgres@localhost" 3 | 4 | [sparkpost] 5 | key = "sparkpost-dev-key" 6 | token = "sparkpost-dev-token" 7 | url = "https://api.sparkpost.com" 8 | version = 1 9 | 10 | [twitter] 11 | consumer_token = "twitter-dev-consumer-key" 12 | consumer_secret = "twitter-dev-consumer-secret" 13 | 14 | [braintree] 15 | merchant_id = "braintree-merchant-id" 16 | public_key = "braintree-dev-public-key" 17 | private_key = "braintree-dev-private-key" 18 | -------------------------------------------------------------------------------- /examples/hierarchical-env/config/development.toml: -------------------------------------------------------------------------------- 1 | debug = true 2 | 3 | [database] 4 | echo = true 5 | -------------------------------------------------------------------------------- /examples/hierarchical-env/config/production.toml: -------------------------------------------------------------------------------- 1 | debug = false 2 | 3 | [sparkpost] 4 | key = "sparkpost-prod-key" 5 | token = "sparkpost-prod-token" 6 | 7 | [twitter] 8 | consumer_token = "twitter-prod-consumer-key" 9 | consumer_secret = "twitter-prod-consumer-secret" 10 | 11 | [braintree] 12 | public_key = "braintree-prod-public-key" 13 | private_key = "braintree-prod-private-key" 14 | -------------------------------------------------------------------------------- /examples/hierarchical-env/main.rs: -------------------------------------------------------------------------------- 1 | mod settings; 2 | 3 | use settings::Settings; 4 | 5 | fn main() { 6 | let settings = Settings::new(); 7 | 8 | // Print out our settings 9 | println!("{settings:?}"); 10 | } 11 | -------------------------------------------------------------------------------- /examples/hierarchical-env/settings.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | 3 | use config::{Config, ConfigError, Environment, File}; 4 | use serde_derive::Deserialize; 5 | 6 | #[derive(Debug, Deserialize)] 7 | #[allow(unused)] 8 | struct Database { 9 | url: String, 10 | } 11 | 12 | #[derive(Debug, Deserialize)] 13 | #[allow(unused)] 14 | struct Sparkpost { 15 | key: String, 16 | token: String, 17 | url: String, 18 | version: u8, 19 | } 20 | 21 | #[derive(Debug, Deserialize)] 22 | #[allow(unused)] 23 | struct Twitter { 24 | consumer_token: String, 25 | consumer_secret: String, 26 | } 27 | 28 | #[derive(Debug, Deserialize)] 29 | #[allow(unused)] 30 | struct Braintree { 31 | merchant_id: String, 32 | public_key: String, 33 | private_key: String, 34 | } 35 | 36 | #[derive(Debug, Deserialize)] 37 | #[allow(unused)] 38 | pub(crate) struct Settings { 39 | debug: bool, 40 | database: Database, 41 | sparkpost: Sparkpost, 42 | twitter: Twitter, 43 | braintree: Braintree, 44 | } 45 | 46 | impl Settings { 47 | pub(crate) fn new() -> Result { 48 | let run_mode = env::var("RUN_MODE").unwrap_or_else(|_| "development".into()); 49 | 50 | let s = Config::builder() 51 | // Start off by merging in the "default" configuration file 52 | .add_source(File::with_name("examples/hierarchical-env/config/default")) 53 | // Add in the current environment file 54 | // Default to 'development' env 55 | // Note that this file is _optional_ 56 | .add_source( 57 | File::with_name(&format!("examples/hierarchical-env/config/{run_mode}")) 58 | .required(false), 59 | ) 60 | // Add in a local configuration file 61 | // This file shouldn't be checked in to git 62 | .add_source(File::with_name("examples/hierarchical-env/config/local").required(false)) 63 | // Add in settings from the environment (with a prefix of APP) 64 | // Eg.. `APP_DEBUG=1 ./target/app` would set the `debug` key 65 | .add_source(Environment::with_prefix("app")) 66 | // You may also programmatically change settings 67 | .set_override("database.url", "postgres://")? 68 | .build()?; 69 | 70 | // Now that we're done, let's access our configuration 71 | println!("debug: {:?}", s.get_bool("debug")); 72 | println!("database: {:?}", s.get::("database.url")); 73 | 74 | // You can deserialize (and thus freeze) the entire configuration as 75 | s.try_deserialize() 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /examples/simple/Settings.toml: -------------------------------------------------------------------------------- 1 | debug = false 2 | priority = 32 3 | key = "189rjfadoisfj8923fjio" 4 | -------------------------------------------------------------------------------- /examples/simple/main.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use config::Config; 4 | 5 | fn main() { 6 | let settings = Config::builder() 7 | // Add in `./Settings.toml` 8 | .add_source(config::File::with_name("examples/simple/Settings")) 9 | // Add in settings from the environment (with a prefix of APP) 10 | // Eg.. `APP_DEBUG=1 ./target/app` would set the `debug` key 11 | .add_source(config::Environment::with_prefix("APP")) 12 | .build() 13 | .unwrap(); 14 | 15 | // Print out our settings (as a HashMap) 16 | println!( 17 | "{:?}", 18 | settings 19 | .try_deserialize::>() 20 | .unwrap() 21 | ); 22 | } 23 | -------------------------------------------------------------------------------- /examples/static_env.rs: -------------------------------------------------------------------------------- 1 | use std::sync::OnceLock; 2 | 3 | use config::Config; 4 | 5 | fn config() -> &'static Config { 6 | static CONFIG: OnceLock = OnceLock::new(); 7 | CONFIG.get_or_init(|| { 8 | Config::builder() 9 | .add_source(config::Environment::with_prefix("APP_NAME").separator("_")) 10 | .build() 11 | .unwrap() 12 | }) 13 | } 14 | 15 | /// Get a configuration value from the static configuration object 16 | pub fn get<'a, T: serde::Deserialize<'a>>(key: &str) -> T { 17 | // You shouldn't probably do it like that and actually handle that error that might happen 18 | // here, but for the sake of simplicity, we do it like this here 19 | config().get::(key).unwrap() 20 | } 21 | 22 | fn main() { 23 | println!("{:?}", get::("foo")); 24 | } 25 | -------------------------------------------------------------------------------- /examples/watch/Settings.toml: -------------------------------------------------------------------------------- 1 | debug = false 2 | port = 3223 3 | host = "0.0.0.0" 4 | -------------------------------------------------------------------------------- /examples/watch/main.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::path::Path; 3 | use std::sync::mpsc::channel; 4 | use std::sync::OnceLock; 5 | use std::sync::RwLock; 6 | use std::time::Duration; 7 | 8 | use config::{Config, File}; 9 | use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher}; 10 | 11 | fn settings() -> &'static RwLock { 12 | static CONFIG: OnceLock> = OnceLock::new(); 13 | CONFIG.get_or_init(|| { 14 | let settings = load(); 15 | 16 | RwLock::new(settings) 17 | }) 18 | } 19 | 20 | fn refresh() { 21 | *settings().write().unwrap() = load(); 22 | } 23 | 24 | fn load() -> Config { 25 | Config::builder() 26 | .add_source(File::with_name("examples/watch/Settings.toml")) 27 | .build() 28 | .unwrap() 29 | } 30 | 31 | fn show() { 32 | println!( 33 | " * Settings :: \n\x1b[31m{:?}\x1b[0m", 34 | settings() 35 | .read() 36 | .unwrap() 37 | .clone() 38 | .try_deserialize::>() 39 | .unwrap() 40 | ); 41 | } 42 | 43 | fn watch() -> ! { 44 | // Create a channel to receive the events. 45 | let (tx, rx) = channel(); 46 | 47 | // Automatically select the best implementation for your platform. 48 | // You can also access each implementation directly e.g. INotifyWatcher. 49 | let mut watcher: RecommendedWatcher = Watcher::new( 50 | tx, 51 | notify::Config::default().with_poll_interval(Duration::from_secs(2)), 52 | ) 53 | .unwrap(); 54 | 55 | // Add a path to be watched. All files and directories at that path and 56 | // below will be monitored for changes. 57 | watcher 58 | .watch( 59 | Path::new("examples/watch/Settings.toml"), 60 | RecursiveMode::NonRecursive, 61 | ) 62 | .unwrap(); 63 | 64 | // This is a simple loop, but you may want to use more complex logic here, 65 | // for example to handle I/O. 66 | loop { 67 | match rx.recv() { 68 | Ok(Ok(Event { 69 | kind: notify::event::EventKind::Modify(_), 70 | .. 71 | })) => { 72 | println!(" * Settings.toml written; refreshing configuration ..."); 73 | refresh(); 74 | show(); 75 | } 76 | 77 | Err(e) => println!("watch error: {e:?}"), 78 | 79 | _ => { 80 | // Ignore event 81 | } 82 | } 83 | } 84 | } 85 | 86 | fn main() { 87 | // This is just an example of what could be done, today 88 | // We do want this to be built-in to config-rs at some point 89 | // Feel free to take a crack at a PR 90 | 91 | show(); 92 | watch(); 93 | } 94 | -------------------------------------------------------------------------------- /release.toml: -------------------------------------------------------------------------------- 1 | owners = ["github:rust-cli:Maintainers"] 2 | dependent-version = "fix" 3 | allow-branch = ["main"] 4 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | use serde::de::Deserialize; 4 | use serde::ser::Serialize; 5 | 6 | use crate::builder::{ConfigBuilder, DefaultState}; 7 | use crate::error::{ConfigError, Result}; 8 | use crate::map::Map; 9 | use crate::path; 10 | use crate::ser::ConfigSerializer; 11 | use crate::source::Source; 12 | use crate::value::{Table, Value}; 13 | 14 | /// A prioritized configuration repository. 15 | /// 16 | /// It maintains a set of configuration sources, fetches values to populate those, and provides 17 | /// them according to the source's priority. 18 | #[derive(Clone, Debug)] 19 | pub struct Config { 20 | defaults: Map, 21 | overrides: Map, 22 | sources: Vec>, 23 | 24 | /// Root of the cached configuration. 25 | pub cache: Value, 26 | } 27 | 28 | impl Default for Config { 29 | fn default() -> Self { 30 | Self { 31 | defaults: Default::default(), 32 | overrides: Default::default(), 33 | sources: Default::default(), 34 | cache: Value::new(None, Table::new()), 35 | } 36 | } 37 | } 38 | 39 | impl Config { 40 | pub(crate) fn new(value: Value) -> Self { 41 | Self { 42 | cache: value, 43 | ..Self::default() 44 | } 45 | } 46 | 47 | /// Creates new [`ConfigBuilder`] instance 48 | pub fn builder() -> ConfigBuilder { 49 | ConfigBuilder::::default() 50 | } 51 | 52 | /// Refresh the configuration cache with fresh 53 | /// data from added sources. 54 | /// 55 | /// Configuration is automatically refreshed after a mutation 56 | /// operation (`set`, `merge`, `set_default`, etc.). 57 | fn refresh(&mut self) -> Result<&mut Self> { 58 | self.cache = { 59 | let mut cache: Value = Map::::new().into(); 60 | 61 | // Add defaults 62 | for (key, val) in &self.defaults { 63 | key.set(&mut cache, val.clone()); 64 | } 65 | 66 | // Add sources 67 | self.sources.collect_to(&mut cache)?; 68 | 69 | // Add overrides 70 | for (key, val) in &self.overrides { 71 | key.set(&mut cache, val.clone()); 72 | } 73 | 74 | cache 75 | }; 76 | 77 | Ok(self) 78 | } 79 | 80 | /// Set an overwrite 81 | /// 82 | /// This function sets an overwrite value. 83 | /// The overwrite `value` is written to the `key` location on every `refresh()` 84 | /// 85 | /// # Warning 86 | /// 87 | /// Errors if config is frozen 88 | pub(crate) fn set(&mut self, key: &str, value: T) -> Result<&mut Self> 89 | where 90 | T: Into, 91 | { 92 | self.overrides.insert(key.parse()?, value.into()); 93 | 94 | self.refresh() 95 | } 96 | 97 | fn get_value(&self, key: &str) -> Result { 98 | // Parse the key into a path expression 99 | let expr: path::Expression = key.parse()?; 100 | 101 | // Traverse the cache using the path to (possibly) retrieve a value 102 | let value = expr.get(&self.cache).cloned(); 103 | 104 | value.ok_or_else(|| ConfigError::NotFound(key.into())) 105 | } 106 | 107 | pub fn get<'de, T: Deserialize<'de>>(&self, key: &str) -> Result { 108 | self.get_value(key).and_then(|value| { 109 | // Deserialize the received value into the requested type 110 | T::deserialize(value).map_err(|e| e.extend_with_key(key)) 111 | }) 112 | } 113 | 114 | pub fn get_string(&self, key: &str) -> Result { 115 | self.get_value(key) 116 | .and_then(|value| value.into_string().map_err(|e| e.extend_with_key(key))) 117 | } 118 | 119 | pub fn get_int(&self, key: &str) -> Result { 120 | self.get_value(key) 121 | .and_then(|value| value.into_int().map_err(|e| e.extend_with_key(key))) 122 | } 123 | 124 | pub fn get_float(&self, key: &str) -> Result { 125 | self.get_value(key) 126 | .and_then(|value| value.into_float().map_err(|e| e.extend_with_key(key))) 127 | } 128 | 129 | pub fn get_bool(&self, key: &str) -> Result { 130 | self.get_value(key) 131 | .and_then(|value| value.into_bool().map_err(|e| e.extend_with_key(key))) 132 | } 133 | 134 | pub fn get_table(&self, key: &str) -> Result> { 135 | self.get_value(key) 136 | .and_then(|value| value.into_table().map_err(|e| e.extend_with_key(key))) 137 | } 138 | 139 | pub fn get_array(&self, key: &str) -> Result> { 140 | self.get_value(key) 141 | .and_then(|value| value.into_array().map_err(|e| e.extend_with_key(key))) 142 | } 143 | 144 | /// Attempt to deserialize the entire configuration into the requested type. 145 | pub fn try_deserialize<'de, T: Deserialize<'de>>(self) -> Result { 146 | T::deserialize(self) 147 | } 148 | 149 | /// Attempt to serialize the entire configuration from the given type. 150 | pub fn try_from(from: &T) -> Result { 151 | let mut serializer = ConfigSerializer::default(); 152 | from.serialize(&mut serializer)?; 153 | Ok(serializer.output) 154 | } 155 | } 156 | 157 | impl Source for Config { 158 | fn clone_into_box(&self) -> Box { 159 | Box::new((*self).clone()) 160 | } 161 | 162 | fn collect(&self) -> Result> { 163 | self.cache.clone().into_table() 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | use std::fmt; 3 | use std::result; 4 | 5 | use serde::de; 6 | use serde::ser; 7 | 8 | #[allow(unnameable_types)] // Unsure if/how to expose this 9 | #[derive(Debug)] 10 | pub enum Unexpected { 11 | Bool(bool), 12 | I64(i64), 13 | I128(i128), 14 | U64(u64), 15 | U128(u128), 16 | Float(f64), 17 | Str(String), 18 | Unit, 19 | Seq, 20 | Map, 21 | } 22 | 23 | impl fmt::Display for Unexpected { 24 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> result::Result<(), fmt::Error> { 25 | match *self { 26 | Unexpected::Bool(b) => write!(f, "boolean `{b}`"), 27 | Unexpected::I64(i) => write!(f, "64-bit integer `{i}`"), 28 | Unexpected::I128(i) => write!(f, "128-bit integer `{i}`"), 29 | Unexpected::U64(i) => write!(f, "64-bit unsigned integer `{i}`"), 30 | Unexpected::U128(i) => write!(f, "128-bit unsigned integer `{i}`"), 31 | Unexpected::Float(v) => write!(f, "floating point `{v}`"), 32 | Unexpected::Str(ref s) => write!(f, "string {s:?}"), 33 | Unexpected::Unit => write!(f, "unit value"), 34 | Unexpected::Seq => write!(f, "sequence"), 35 | Unexpected::Map => write!(f, "map"), 36 | } 37 | } 38 | } 39 | 40 | /// Represents all possible errors that can occur when working with 41 | /// configuration. 42 | #[non_exhaustive] 43 | pub enum ConfigError { 44 | /// Configuration is frozen and no further mutations can be made. 45 | Frozen, 46 | 47 | /// Configuration property was not found 48 | NotFound(String), 49 | 50 | /// Configuration path could not be parsed. 51 | PathParse { cause: Box }, 52 | 53 | /// Configuration could not be parsed from file. 54 | FileParse { 55 | /// The URI used to access the file (if not loaded from a string). 56 | /// Example: `/path/to/config.json` 57 | uri: Option, 58 | 59 | /// The captured error from attempting to parse the file in its desired format. 60 | /// This is the actual error object from the library used for the parsing. 61 | cause: Box, 62 | }, 63 | 64 | /// Value could not be converted into the requested type. 65 | Type { 66 | /// The URI that references the source that the value came from. 67 | /// Example: `/path/to/config.json` or `Environment` or `etcd://localhost` 68 | // TODO: Why is this called Origin but FileParse has a uri field? 69 | origin: Option, 70 | 71 | /// What we found when parsing the value 72 | unexpected: Unexpected, 73 | 74 | /// What was expected when parsing the value 75 | expected: &'static str, 76 | 77 | /// The key in the configuration hash of this value (if available where the 78 | /// error is generated). 79 | key: Option, 80 | }, 81 | 82 | /// Custom message 83 | At { 84 | /// Error being extended with a path 85 | error: Box, 86 | 87 | /// The URI that references the source that the value came from. 88 | /// Example: `/path/to/config.json` or `Environment` or `etcd://localhost` 89 | // TODO: Why is this called Origin but FileParse has a uri field? 90 | origin: Option, 91 | 92 | /// The key in the configuration hash of this value (if available where the 93 | /// error is generated). 94 | key: Option, 95 | }, 96 | 97 | /// Custom message 98 | Message(String), 99 | 100 | /// Unadorned error from a foreign origin. 101 | Foreign(Box), 102 | } 103 | 104 | impl ConfigError { 105 | // FIXME: pub(crate) 106 | #[doc(hidden)] 107 | pub fn invalid_type( 108 | origin: Option, 109 | unexpected: Unexpected, 110 | expected: &'static str, 111 | ) -> Self { 112 | Self::Type { 113 | origin, 114 | unexpected, 115 | expected, 116 | key: None, 117 | } 118 | } 119 | 120 | // Have a proper error fire if the root of a file is ever not a Table 121 | // TODO: for now only json5 checked, need to finish others 122 | #[doc(hidden)] 123 | pub fn invalid_root(origin: Option<&String>, unexpected: Unexpected) -> Box { 124 | Box::new(Self::Type { 125 | origin: origin.cloned(), 126 | unexpected, 127 | expected: "a map", 128 | key: None, 129 | }) 130 | } 131 | 132 | // FIXME: pub(crate) 133 | #[doc(hidden)] 134 | #[must_use] 135 | pub fn extend_with_key(self, key: &str) -> Self { 136 | match self { 137 | Self::Type { 138 | origin, 139 | unexpected, 140 | expected, 141 | .. 142 | } => Self::Type { 143 | origin, 144 | unexpected, 145 | expected, 146 | key: Some(key.into()), 147 | }, 148 | 149 | Self::At { origin, error, .. } => Self::At { 150 | error, 151 | origin, 152 | key: Some(key.into()), 153 | }, 154 | 155 | other => Self::At { 156 | error: Box::new(other), 157 | origin: None, 158 | key: Some(key.into()), 159 | }, 160 | } 161 | } 162 | 163 | #[must_use] 164 | fn prepend(self, segment: &str, add_dot: bool) -> Self { 165 | let concat = |key: Option| { 166 | let key = key.unwrap_or_default(); 167 | let dot = if add_dot && key.as_bytes().first().unwrap_or(&b'[') != &b'[' { 168 | "." 169 | } else { 170 | "" 171 | }; 172 | format!("{segment}{dot}{key}") 173 | }; 174 | match self { 175 | Self::Type { 176 | origin, 177 | unexpected, 178 | expected, 179 | key, 180 | } => Self::Type { 181 | origin, 182 | unexpected, 183 | expected, 184 | key: Some(concat(key)), 185 | }, 186 | Self::At { error, origin, key } => Self::At { 187 | error, 188 | origin, 189 | key: Some(concat(key)), 190 | }, 191 | Self::NotFound(key) => Self::NotFound(concat(Some(key))), 192 | other => Self::At { 193 | error: Box::new(other), 194 | origin: None, 195 | key: Some(concat(None)), 196 | }, 197 | } 198 | } 199 | 200 | #[must_use] 201 | pub(crate) fn prepend_key(self, key: &str) -> Self { 202 | self.prepend(key, true) 203 | } 204 | 205 | #[must_use] 206 | pub(crate) fn prepend_index(self, idx: usize) -> Self { 207 | self.prepend(&format!("[{idx}]"), false) 208 | } 209 | } 210 | 211 | /// Alias for a `Result` with the error type set to `ConfigError`. 212 | pub(crate) type Result = result::Result; 213 | 214 | // Forward Debug to Display for readable panic! messages 215 | impl fmt::Debug for ConfigError { 216 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 217 | write!(f, "{}", *self) 218 | } 219 | } 220 | 221 | impl fmt::Display for ConfigError { 222 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 223 | match *self { 224 | ConfigError::Frozen => write!(f, "configuration is frozen"), 225 | 226 | ConfigError::PathParse { ref cause } => write!(f, "{cause}"), 227 | 228 | ConfigError::Message(ref s) => write!(f, "{s}"), 229 | 230 | ConfigError::Foreign(ref cause) => write!(f, "{cause}"), 231 | 232 | ConfigError::NotFound(ref key) => { 233 | write!(f, "configuration property {key:?} not found") 234 | } 235 | 236 | ConfigError::Type { 237 | ref origin, 238 | ref unexpected, 239 | expected, 240 | ref key, 241 | } => { 242 | write!(f, "invalid type: {unexpected}, expected {expected}")?; 243 | 244 | if let Some(ref key) = *key { 245 | write!(f, " for key `{key}`")?; 246 | } 247 | 248 | if let Some(ref origin) = *origin { 249 | write!(f, " in {origin}")?; 250 | } 251 | 252 | Ok(()) 253 | } 254 | 255 | ConfigError::At { 256 | ref error, 257 | ref origin, 258 | ref key, 259 | } => { 260 | write!(f, "{error}")?; 261 | 262 | if let Some(ref key) = *key { 263 | write!(f, " for key `{key}`")?; 264 | } 265 | 266 | if let Some(ref origin) = *origin { 267 | write!(f, " in {origin}")?; 268 | } 269 | 270 | Ok(()) 271 | } 272 | 273 | ConfigError::FileParse { ref cause, ref uri } => { 274 | write!(f, "{cause}")?; 275 | 276 | if let Some(ref uri) = *uri { 277 | write!(f, " in {uri}")?; 278 | } 279 | 280 | Ok(()) 281 | } 282 | } 283 | } 284 | } 285 | 286 | impl Error for ConfigError {} 287 | 288 | impl de::Error for ConfigError { 289 | fn custom(msg: T) -> Self { 290 | Self::Message(msg.to_string()) 291 | } 292 | } 293 | 294 | impl ser::Error for ConfigError { 295 | fn custom(msg: T) -> Self { 296 | Self::Message(msg.to_string()) 297 | } 298 | } 299 | -------------------------------------------------------------------------------- /src/file/format/ini.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use ini::Ini; 4 | 5 | use crate::map::Map; 6 | use crate::value::{Value, ValueKind}; 7 | 8 | pub(crate) fn parse( 9 | uri: Option<&String>, 10 | text: &str, 11 | ) -> Result, Box> { 12 | let mut map: Map = Map::new(); 13 | let i = Ini::load_from_str(text)?; 14 | for (sec, prop) in i.iter() { 15 | match sec { 16 | Some(sec) => { 17 | let mut sec_map: Map = Map::new(); 18 | for (k, v) in prop.iter() { 19 | sec_map.insert( 20 | k.to_owned(), 21 | Value::new(uri, ValueKind::String(v.to_owned())), 22 | ); 23 | } 24 | map.insert(sec.to_owned(), Value::new(uri, ValueKind::Table(sec_map))); 25 | } 26 | None => { 27 | for (k, v) in prop.iter() { 28 | map.insert( 29 | k.to_owned(), 30 | Value::new(uri, ValueKind::String(v.to_owned())), 31 | ); 32 | } 33 | } 34 | } 35 | } 36 | Ok(map) 37 | } 38 | -------------------------------------------------------------------------------- /src/file/format/json.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use crate::format; 4 | use crate::map::Map; 5 | use crate::value::{Value, ValueKind}; 6 | 7 | pub(crate) fn parse( 8 | uri: Option<&String>, 9 | text: &str, 10 | ) -> Result, Box> { 11 | // Parse a JSON object value from the text 12 | let value = from_json_value(uri, &serde_json::from_str(text)?); 13 | format::extract_root_table(uri, value) 14 | } 15 | 16 | fn from_json_value(uri: Option<&String>, value: &serde_json::Value) -> Value { 17 | match *value { 18 | serde_json::Value::String(ref value) => Value::new(uri, ValueKind::String(value.clone())), 19 | 20 | serde_json::Value::Number(ref value) => { 21 | if let Some(value) = value.as_i64() { 22 | Value::new(uri, ValueKind::I64(value)) 23 | } else if let Some(value) = value.as_f64() { 24 | Value::new(uri, ValueKind::Float(value)) 25 | } else { 26 | unreachable!(); 27 | } 28 | } 29 | 30 | serde_json::Value::Bool(value) => Value::new(uri, ValueKind::Boolean(value)), 31 | 32 | serde_json::Value::Object(ref table) => { 33 | let mut m = Map::new(); 34 | 35 | for (key, value) in table { 36 | m.insert(key.clone(), from_json_value(uri, value)); 37 | } 38 | 39 | Value::new(uri, ValueKind::Table(m)) 40 | } 41 | 42 | serde_json::Value::Array(ref array) => { 43 | let mut l = Vec::new(); 44 | 45 | for value in array { 46 | l.push(from_json_value(uri, value)); 47 | } 48 | 49 | Value::new(uri, ValueKind::Array(l)) 50 | } 51 | 52 | serde_json::Value::Null => Value::new(uri, ValueKind::Nil), 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/file/format/json5.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use crate::format; 4 | use crate::map::Map; 5 | use crate::value::{Value, ValueKind}; 6 | 7 | #[derive(serde::Deserialize, Debug)] 8 | #[serde(untagged)] 9 | pub(crate) enum Val { 10 | Null, 11 | Boolean(bool), 12 | Integer(i64), 13 | Float(f64), 14 | String(String), 15 | Array(Vec), 16 | Object(Map), 17 | } 18 | 19 | pub(crate) fn parse( 20 | uri: Option<&String>, 21 | text: &str, 22 | ) -> Result, Box> { 23 | let value = from_json5_value(uri, json5_rs::from_str::(text)?); 24 | format::extract_root_table(uri, value) 25 | } 26 | 27 | fn from_json5_value(uri: Option<&String>, value: Val) -> Value { 28 | let vk = match value { 29 | Val::Null => ValueKind::Nil, 30 | Val::String(v) => ValueKind::String(v), 31 | Val::Integer(v) => ValueKind::I64(v), 32 | Val::Float(v) => ValueKind::Float(v), 33 | Val::Boolean(v) => ValueKind::Boolean(v), 34 | Val::Object(table) => { 35 | let m = table 36 | .into_iter() 37 | .map(|(k, v)| (k, from_json5_value(uri, v))) 38 | .collect(); 39 | 40 | ValueKind::Table(m) 41 | } 42 | 43 | Val::Array(array) => { 44 | let l = array 45 | .into_iter() 46 | .map(|v| from_json5_value(uri, v)) 47 | .collect(); 48 | 49 | ValueKind::Array(l) 50 | } 51 | }; 52 | 53 | Value::new(uri, vk) 54 | } 55 | -------------------------------------------------------------------------------- /src/file/format/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::error::Error; 3 | use std::sync::OnceLock; 4 | 5 | use crate::map::Map; 6 | use crate::{file::FileStoredFormat, value::Value, Format}; 7 | 8 | #[cfg(feature = "toml")] 9 | mod toml; 10 | 11 | #[cfg(feature = "json")] 12 | mod json; 13 | 14 | #[cfg(feature = "yaml")] 15 | mod yaml; 16 | 17 | #[cfg(feature = "ini")] 18 | mod ini; 19 | 20 | #[cfg(feature = "ron")] 21 | mod ron; 22 | 23 | #[cfg(feature = "json5")] 24 | mod json5; 25 | 26 | /// File formats provided by the library. 27 | /// 28 | /// Although it is possible to define custom formats using [`Format`] trait it is recommended to use `FileFormat` if possible. 29 | #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] 30 | #[non_exhaustive] 31 | pub enum FileFormat { 32 | /// TOML (parsed with toml) 33 | #[cfg(feature = "toml")] 34 | Toml, 35 | 36 | /// JSON (parsed with `serde_json`) 37 | #[cfg(feature = "json")] 38 | Json, 39 | 40 | /// YAML (parsed with `yaml_rust2`) 41 | #[cfg(feature = "yaml")] 42 | Yaml, 43 | 44 | /// INI (parsed with `rust_ini`) 45 | #[cfg(feature = "ini")] 46 | Ini, 47 | 48 | /// RON (parsed with ron) 49 | #[cfg(feature = "ron")] 50 | Ron, 51 | 52 | /// JSON5 (parsed with json5) 53 | #[cfg(feature = "json5")] 54 | Json5, 55 | } 56 | 57 | pub(crate) fn all_extensions() -> &'static HashMap> { 58 | #![allow(unused_mut)] // If no features are used, there is an "unused mut" warning in `all_extensions` 59 | 60 | static ALL_EXTENSIONS: OnceLock>> = OnceLock::new(); 61 | ALL_EXTENSIONS.get_or_init(|| { 62 | let mut formats: HashMap> = HashMap::new(); 63 | 64 | #[cfg(feature = "toml")] 65 | formats.insert(FileFormat::Toml, vec!["toml"]); 66 | 67 | #[cfg(feature = "json")] 68 | formats.insert(FileFormat::Json, vec!["json"]); 69 | 70 | #[cfg(feature = "yaml")] 71 | formats.insert(FileFormat::Yaml, vec!["yaml", "yml"]); 72 | 73 | #[cfg(feature = "ini")] 74 | formats.insert(FileFormat::Ini, vec!["ini"]); 75 | 76 | #[cfg(feature = "ron")] 77 | formats.insert(FileFormat::Ron, vec!["ron"]); 78 | 79 | #[cfg(feature = "json5")] 80 | formats.insert(FileFormat::Json5, vec!["json5"]); 81 | 82 | formats 83 | }) 84 | } 85 | 86 | impl FileFormat { 87 | pub(crate) fn extensions(&self) -> &'static [&'static str] { 88 | // It should not be possible for this to fail 89 | // A FileFormat would need to be declared without being added to the 90 | // all_extensions map. 91 | all_extensions().get(self).unwrap() 92 | } 93 | 94 | pub(crate) fn parse( 95 | &self, 96 | uri: Option<&String>, 97 | text: &str, 98 | ) -> Result, Box> { 99 | match self { 100 | #[cfg(feature = "toml")] 101 | FileFormat::Toml => toml::parse(uri, text), 102 | 103 | #[cfg(feature = "json")] 104 | FileFormat::Json => json::parse(uri, text), 105 | 106 | #[cfg(feature = "yaml")] 107 | FileFormat::Yaml => yaml::parse(uri, text), 108 | 109 | #[cfg(feature = "ini")] 110 | FileFormat::Ini => ini::parse(uri, text), 111 | 112 | #[cfg(feature = "ron")] 113 | FileFormat::Ron => ron::parse(uri, text), 114 | 115 | #[cfg(feature = "json5")] 116 | FileFormat::Json5 => json5::parse(uri, text), 117 | 118 | #[cfg(all( 119 | not(feature = "toml"), 120 | not(feature = "json"), 121 | not(feature = "yaml"), 122 | not(feature = "ini"), 123 | not(feature = "ron"), 124 | not(feature = "json5"), 125 | ))] 126 | _ => unreachable!("No features are enabled, this library won't work without features"), 127 | } 128 | } 129 | } 130 | 131 | impl Format for FileFormat { 132 | fn parse( 133 | &self, 134 | uri: Option<&String>, 135 | text: &str, 136 | ) -> Result, Box> { 137 | self.parse(uri, text) 138 | } 139 | } 140 | 141 | impl FileStoredFormat for FileFormat { 142 | fn file_extensions(&self) -> &'static [&'static str] { 143 | self.extensions() 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/file/format/ron.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use crate::format; 4 | use crate::map::Map; 5 | use crate::value::{Value, ValueKind}; 6 | 7 | pub(crate) fn parse( 8 | uri: Option<&String>, 9 | text: &str, 10 | ) -> Result, Box> { 11 | let value = from_ron_value(uri, ron::from_str(text)?)?; 12 | format::extract_root_table(uri, value) 13 | } 14 | 15 | fn from_ron_value( 16 | uri: Option<&String>, 17 | value: ron::Value, 18 | ) -> Result> { 19 | let kind = match value { 20 | ron::Value::Option(value) => match value { 21 | Some(value) => from_ron_value(uri, *value)?.kind, 22 | None => ValueKind::Nil, 23 | }, 24 | 25 | ron::Value::Unit => ValueKind::Nil, 26 | 27 | ron::Value::Bool(value) => ValueKind::Boolean(value), 28 | 29 | ron::Value::Number(value) => match value { 30 | ron::Number::Float(value) => ValueKind::Float(value.get()), 31 | ron::Number::Integer(value) => ValueKind::I64(value), 32 | }, 33 | 34 | ron::Value::Char(value) => ValueKind::String(value.to_string()), 35 | 36 | ron::Value::String(value) => ValueKind::String(value), 37 | 38 | ron::Value::Seq(values) => { 39 | let array = values 40 | .into_iter() 41 | .map(|value| from_ron_value(uri, value)) 42 | .collect::, _>>()?; 43 | 44 | ValueKind::Array(array) 45 | } 46 | 47 | ron::Value::Map(values) => { 48 | let map = values 49 | .iter() 50 | .map(|(key, value)| -> Result<_, Box> { 51 | let key = key.clone().into_rust::()?; 52 | let value = from_ron_value(uri, value.clone())?; 53 | 54 | Ok((key, value)) 55 | }) 56 | .collect::, _>>()?; 57 | 58 | ValueKind::Table(map) 59 | } 60 | }; 61 | 62 | Ok(Value::new(uri, kind)) 63 | } 64 | -------------------------------------------------------------------------------- /src/file/format/toml.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use crate::format; 4 | use crate::map::Map; 5 | use crate::value::Value; 6 | 7 | pub(crate) fn parse( 8 | uri: Option<&String>, 9 | text: &str, 10 | ) -> Result, Box> { 11 | // Parse a TOML value from the provided text 12 | let value = from_toml_value(uri, &toml::from_str(text)?); 13 | format::extract_root_table(uri, value) 14 | } 15 | 16 | fn from_toml_value(uri: Option<&String>, value: &toml::Value) -> Value { 17 | match *value { 18 | toml::Value::String(ref value) => Value::new(uri, value.clone()), 19 | toml::Value::Float(value) => Value::new(uri, value), 20 | toml::Value::Integer(value) => Value::new(uri, value), 21 | toml::Value::Boolean(value) => Value::new(uri, value), 22 | 23 | toml::Value::Table(ref table) => { 24 | let mut m = Map::new(); 25 | 26 | for (key, value) in table { 27 | m.insert(key.clone(), from_toml_value(uri, value)); 28 | } 29 | 30 | Value::new(uri, m) 31 | } 32 | 33 | toml::Value::Array(ref array) => { 34 | let mut l = Vec::new(); 35 | 36 | for value in array { 37 | l.push(from_toml_value(uri, value)); 38 | } 39 | 40 | Value::new(uri, l) 41 | } 42 | 43 | toml::Value::Datetime(ref datetime) => Value::new(uri, datetime.to_string()), 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/file/format/yaml.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | use std::fmt; 3 | use std::mem; 4 | 5 | use yaml_rust2 as yaml; 6 | 7 | use crate::format; 8 | use crate::map::Map; 9 | use crate::value::{Value, ValueKind}; 10 | 11 | pub(crate) fn parse( 12 | uri: Option<&String>, 13 | text: &str, 14 | ) -> Result, Box> { 15 | // Parse a YAML object from file 16 | let mut docs = yaml::YamlLoader::load_from_str(text)?; 17 | let root = match docs.len() { 18 | 0 => yaml::Yaml::Hash(yaml::yaml::Hash::new()), 19 | 1 => mem::replace(&mut docs[0], yaml::Yaml::Null), 20 | n => { 21 | return Err(Box::new(MultipleDocumentsError(n))); 22 | } 23 | }; 24 | 25 | let value = from_yaml_value(uri, &root)?; 26 | format::extract_root_table(uri, value) 27 | } 28 | 29 | fn from_yaml_value( 30 | uri: Option<&String>, 31 | value: &yaml::Yaml, 32 | ) -> Result> { 33 | match *value { 34 | yaml::Yaml::String(ref value) => Ok(Value::new(uri, ValueKind::String(value.clone()))), 35 | yaml::Yaml::Real(ref value) => { 36 | // TODO: Figure out in what cases this can panic? 37 | value 38 | .parse::() 39 | .map_err(|_| { 40 | Box::new(FloatParsingError(value.clone())) as Box<(dyn Error + Send + Sync)> 41 | }) 42 | .map(ValueKind::Float) 43 | .map(|f| Value::new(uri, f)) 44 | } 45 | yaml::Yaml::Integer(value) => Ok(Value::new(uri, ValueKind::I64(value))), 46 | yaml::Yaml::Boolean(value) => Ok(Value::new(uri, ValueKind::Boolean(value))), 47 | yaml::Yaml::Hash(ref table) => { 48 | let mut m = Map::new(); 49 | for (key, value) in table { 50 | match key { 51 | yaml::Yaml::String(k) => m.insert(k.to_owned(), from_yaml_value(uri, value)?), 52 | yaml::Yaml::Integer(k) => m.insert(k.to_string(), from_yaml_value(uri, value)?), 53 | yaml::Yaml::Boolean(k) => m.insert(k.to_string(), from_yaml_value(uri, value)?), 54 | yaml::Yaml::Real(k) => m.insert(k.to_owned(), from_yaml_value(uri, value)?), 55 | other => Err(Box::new(UnsupportedHashKeyError(format!("{other:?}"))))?, 56 | }; 57 | } 58 | Ok(Value::new(uri, ValueKind::Table(m))) 59 | } 60 | yaml::Yaml::Array(ref array) => { 61 | let mut l = Vec::new(); 62 | 63 | for value in array { 64 | l.push(from_yaml_value(uri, value)?); 65 | } 66 | 67 | Ok(Value::new(uri, ValueKind::Array(l))) 68 | } 69 | 70 | // 1. Yaml NULL 71 | // 2. BadValue – It shouldn't be possible to hit BadValue as this only happens when 72 | // using the index trait badly or on a type error but we send back nil. 73 | // 3. Alias – No idea what to do with this and there is a note in the lib that its 74 | // not fully supported yet anyway 75 | _ => Ok(Value::new(uri, ValueKind::Nil)), 76 | } 77 | } 78 | 79 | #[derive(Debug, Copy, Clone)] 80 | struct MultipleDocumentsError(usize); 81 | 82 | impl fmt::Display for MultipleDocumentsError { 83 | fn fmt(&self, format: &mut fmt::Formatter<'_>) -> fmt::Result { 84 | write!(format, "Got {} YAML documents, expected 1", self.0) 85 | } 86 | } 87 | 88 | impl Error for MultipleDocumentsError { 89 | fn description(&self) -> &str { 90 | "More than one YAML document provided" 91 | } 92 | } 93 | 94 | #[derive(Debug, Clone)] 95 | struct FloatParsingError(String); 96 | 97 | impl fmt::Display for FloatParsingError { 98 | fn fmt(&self, format: &mut fmt::Formatter<'_>) -> fmt::Result { 99 | write!(format, "Parsing {} as floating point number failed", self.0) 100 | } 101 | } 102 | 103 | impl Error for FloatParsingError { 104 | fn description(&self) -> &str { 105 | "Floating point number parsing failed" 106 | } 107 | } 108 | 109 | #[derive(Debug, Clone)] 110 | struct UnsupportedHashKeyError(String); 111 | 112 | impl fmt::Display for UnsupportedHashKeyError { 113 | fn fmt(&self, format: &mut fmt::Formatter<'_>) -> fmt::Result { 114 | write!( 115 | format, 116 | "Cannot parse {} because it is an unsupported hash key type", 117 | self.0 118 | ) 119 | } 120 | } 121 | 122 | impl Error for UnsupportedHashKeyError { 123 | fn description(&self) -> &str { 124 | "Unsupported yaml hash key found" 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /src/file/mod.rs: -------------------------------------------------------------------------------- 1 | mod format; 2 | pub(crate) mod source; 3 | 4 | use std::fmt::Debug; 5 | use std::path::{Path, PathBuf}; 6 | 7 | use self::source::FileSource; 8 | use crate::error::{ConfigError, Result}; 9 | use crate::map::Map; 10 | use crate::source::Source; 11 | use crate::value::Value; 12 | use crate::Format; 13 | 14 | pub use self::format::FileFormat; 15 | pub use self::source::file::FileSourceFile; 16 | pub use self::source::string::FileSourceString; 17 | 18 | /// An extension of [`Format`] trait. 19 | /// 20 | /// Associates format with file extensions, therefore linking storage-agnostic notion of format to a file system. 21 | pub trait FileStoredFormat: Format { 22 | /// Returns a vector of file extensions, for instance `[yml, yaml]`. 23 | fn file_extensions(&self) -> &'static [&'static str]; 24 | } 25 | 26 | /// A configuration source backed up by a file. 27 | /// 28 | /// It supports optional automatic file format discovery. 29 | #[derive(Clone, Debug)] 30 | #[must_use] 31 | pub struct File { 32 | source: T, 33 | 34 | /// Format of file (which dictates what driver to use). 35 | format: Option, 36 | 37 | /// A required File will error if it cannot be found 38 | required: bool, 39 | } 40 | 41 | impl File 42 | where 43 | F: FileStoredFormat + 'static, 44 | { 45 | pub fn from_str(s: &str, format: F) -> Self { 46 | Self { 47 | format: Some(format), 48 | required: true, 49 | source: s.into(), 50 | } 51 | } 52 | } 53 | 54 | impl File 55 | where 56 | F: FileStoredFormat + 'static, 57 | { 58 | pub fn new(name: &str, format: F) -> Self { 59 | Self { 60 | format: Some(format), 61 | required: true, 62 | source: FileSourceFile::new(name.into()), 63 | } 64 | } 65 | } 66 | 67 | impl File { 68 | /// Given the basename of a file, will attempt to locate a file by setting its 69 | /// extension to a registered format. 70 | pub fn with_name(base_name: &str) -> Self { 71 | Self { 72 | format: None, 73 | required: true, 74 | source: FileSourceFile::new(base_name.into()), 75 | } 76 | } 77 | } 78 | 79 | impl File 80 | where 81 | F: FileStoredFormat + 'static, 82 | T: FileSource, 83 | { 84 | pub fn format(mut self, format: F) -> Self { 85 | self.format = Some(format); 86 | self 87 | } 88 | 89 | /// Set required to false to make a file optional when building the config. 90 | pub fn required(mut self, required: bool) -> Self { 91 | self.required = required; 92 | self 93 | } 94 | } 95 | 96 | impl<'a> From<&'a Path> for File { 97 | fn from(path: &'a Path) -> Self { 98 | Self { 99 | format: None, 100 | required: true, 101 | source: FileSourceFile::new(path.to_path_buf()), 102 | } 103 | } 104 | } 105 | 106 | impl From for File { 107 | fn from(path: PathBuf) -> Self { 108 | Self { 109 | format: None, 110 | required: true, 111 | source: FileSourceFile::new(path), 112 | } 113 | } 114 | } 115 | 116 | impl Source for File 117 | where 118 | F: FileStoredFormat + Debug + Clone + Send + Sync + 'static, 119 | T: Sync + Send + FileSource + 'static, 120 | { 121 | fn clone_into_box(&self) -> Box { 122 | Box::new((*self).clone()) 123 | } 124 | 125 | fn collect(&self) -> Result> { 126 | // Coerce the file contents to a string 127 | let (uri, contents, format) = match self 128 | .source 129 | .resolve(self.format.clone()) 130 | .map_err(ConfigError::Foreign) 131 | { 132 | Ok(result) => (result.uri, result.content, result.format), 133 | 134 | Err(error) => { 135 | if !self.required { 136 | return Ok(Map::new()); 137 | } 138 | 139 | return Err(error); 140 | } 141 | }; 142 | 143 | // Parse the string using the given format 144 | format 145 | .parse(uri.as_ref(), &contents) 146 | .map_err(|cause| ConfigError::FileParse { uri, cause }) 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /src/file/source/file.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::error::Error; 3 | use std::fs; 4 | use std::io; 5 | use std::path::PathBuf; 6 | 7 | use crate::file::{ 8 | format::all_extensions, source::FileSourceResult, FileSource, FileStoredFormat, Format, 9 | }; 10 | 11 | /// Describes a file sourced from a file 12 | #[derive(Clone, Debug)] 13 | pub struct FileSourceFile { 14 | /// Path of configuration file 15 | name: PathBuf, 16 | } 17 | 18 | impl FileSourceFile { 19 | pub fn new(name: PathBuf) -> Self { 20 | Self { name } 21 | } 22 | 23 | fn find_file( 24 | &self, 25 | format_hint: Option, 26 | ) -> Result<(PathBuf, Box), Box> 27 | where 28 | F: FileStoredFormat + Format + 'static, 29 | { 30 | let filename = if self.name.is_absolute() { 31 | self.name.clone() 32 | } else { 33 | env::current_dir()?.as_path().join(&self.name) 34 | }; 35 | 36 | // First check for an _exact_ match 37 | if filename.is_file() { 38 | return if let Some(format) = format_hint { 39 | Ok((filename, Box::new(format))) 40 | } else { 41 | for (format, extensions) in all_extensions().iter() { 42 | if extensions.contains( 43 | &filename 44 | .extension() 45 | .unwrap_or_default() 46 | .to_string_lossy() 47 | .as_ref(), 48 | ) { 49 | return Ok((filename, Box::new(*format))); 50 | } 51 | } 52 | 53 | Err(Box::new(io::Error::new( 54 | io::ErrorKind::NotFound, 55 | format!( 56 | "configuration file \"{}\" is not of a registered file format", 57 | filename.to_string_lossy() 58 | ), 59 | ))) 60 | }; 61 | } 62 | // Adding a dummy extension will make sure we will not override secondary extensions, i.e. "file.local" 63 | // This will make the following set_extension function calls to append the extension. 64 | let mut filename = add_dummy_extension(filename); 65 | 66 | match format_hint { 67 | Some(format) => { 68 | for ext in format.file_extensions() { 69 | filename.set_extension(ext); 70 | 71 | if filename.is_file() { 72 | return Ok((filename, Box::new(format))); 73 | } 74 | } 75 | } 76 | 77 | None => { 78 | for format in all_extensions().keys() { 79 | for ext in format.extensions() { 80 | filename.set_extension(ext); 81 | 82 | if filename.is_file() { 83 | return Ok((filename, Box::new(*format))); 84 | } 85 | } 86 | } 87 | } 88 | } 89 | 90 | Err(Box::new(io::Error::new( 91 | io::ErrorKind::NotFound, 92 | format!( 93 | "configuration file \"{}\" not found", 94 | self.name.to_string_lossy() 95 | ), 96 | ))) 97 | } 98 | } 99 | 100 | impl FileSource for FileSourceFile 101 | where 102 | F: Format + FileStoredFormat + 'static, 103 | { 104 | fn resolve( 105 | &self, 106 | format_hint: Option, 107 | ) -> Result> { 108 | // Find file 109 | let (filename, format) = self.find_file(format_hint)?; 110 | 111 | // Attempt to use a relative path for the URI 112 | let uri = env::current_dir() 113 | .ok() 114 | .and_then(|base| pathdiff::diff_paths(&filename, base)) 115 | .unwrap_or_else(|| filename.clone()); 116 | 117 | // Read contents from file 118 | let text = fs::read_to_string(filename)?; 119 | 120 | Ok(FileSourceResult { 121 | uri: Some(uri.to_string_lossy().into_owned()), 122 | content: text, 123 | format, 124 | }) 125 | } 126 | } 127 | 128 | fn add_dummy_extension(mut filename: PathBuf) -> PathBuf { 129 | match filename.extension() { 130 | Some(extension) => { 131 | let mut ext = extension.to_os_string(); 132 | ext.push("."); 133 | ext.push("dummy"); 134 | filename.set_extension(ext); 135 | } 136 | None => { 137 | filename.set_extension("dummy"); 138 | } 139 | } 140 | filename 141 | } 142 | -------------------------------------------------------------------------------- /src/file/source/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod file; 2 | pub(crate) mod string; 3 | 4 | use std::error::Error; 5 | use std::fmt::Debug; 6 | 7 | use crate::{file::FileStoredFormat, Format}; 8 | 9 | /// Describes where the [`File`][super::File] is sourced 10 | pub trait FileSource: Debug + Clone 11 | where 12 | T: Format + FileStoredFormat, 13 | { 14 | fn resolve( 15 | &self, 16 | format_hint: Option, 17 | ) -> Result>; 18 | } 19 | 20 | #[allow(unnameable_types)] // Unsure if/how to expose this 21 | pub struct FileSourceResult { 22 | pub(crate) uri: Option, 23 | pub(crate) content: String, 24 | pub(crate) format: Box, 25 | } 26 | 27 | impl FileSourceResult { 28 | pub fn uri(&self) -> &Option { 29 | &self.uri 30 | } 31 | 32 | pub fn content(&self) -> &str { 33 | self.content.as_str() 34 | } 35 | 36 | pub fn format(&self) -> &dyn Format { 37 | self.format.as_ref() 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/file/source/string.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use crate::{ 4 | file::source::FileSourceResult, 5 | file::{FileSource, FileStoredFormat}, 6 | Format, 7 | }; 8 | 9 | /// Describes a file sourced from a string 10 | #[derive(Clone, Debug)] 11 | pub struct FileSourceString(String); 12 | 13 | impl<'a> From<&'a str> for FileSourceString { 14 | fn from(s: &'a str) -> Self { 15 | Self(s.into()) 16 | } 17 | } 18 | 19 | impl FileSource for FileSourceString 20 | where 21 | F: Format + FileStoredFormat + 'static, 22 | { 23 | fn resolve( 24 | &self, 25 | format_hint: Option, 26 | ) -> Result> { 27 | Ok(FileSourceResult { 28 | uri: None, 29 | content: self.0.clone(), 30 | format: Box::new(format_hint.expect("from_str requires a set file format")), 31 | }) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/format.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use crate::error::{ConfigError, Unexpected}; 4 | use crate::map::Map; 5 | use crate::value::{Value, ValueKind}; 6 | 7 | /// Describes a format of configuration source data 8 | /// 9 | /// Implementations of this trait can be used to convert [`File`](crate::File) sources to configuration data. 10 | /// 11 | /// There can be various formats, some of them provided by this library, such as JSON, Yaml and other. 12 | /// This trait enables users of the library to easily define their own, even proprietary formats without 13 | /// the need to alter library sources. 14 | /// 15 | /// What is more, it is recommended to use this trait with custom [`Source`](crate::Source)s and their async counterparts. 16 | pub trait Format { 17 | /// Parses provided content into configuration values understood by the library. 18 | /// 19 | /// It also allows specifying optional URI of the source associated with format instance that can facilitate debugging. 20 | fn parse( 21 | &self, 22 | uri: Option<&String>, 23 | text: &str, 24 | ) -> Result, Box>; 25 | } 26 | 27 | // Have a proper error fire if the root of a file is ever not a Table 28 | pub(crate) fn extract_root_table( 29 | uri: Option<&String>, 30 | value: Value, 31 | ) -> Result, Box> { 32 | match value.kind { 33 | ValueKind::Table(map) => Ok(map), 34 | ValueKind::Nil => Err(Unexpected::Unit), 35 | ValueKind::Array(_value) => Err(Unexpected::Seq), 36 | ValueKind::Boolean(value) => Err(Unexpected::Bool(value)), 37 | ValueKind::I64(value) => Err(Unexpected::I64(value)), 38 | ValueKind::I128(value) => Err(Unexpected::I128(value)), 39 | ValueKind::U64(value) => Err(Unexpected::U64(value)), 40 | ValueKind::U128(value) => Err(Unexpected::U128(value)), 41 | ValueKind::Float(value) => Err(Unexpected::Float(value)), 42 | ValueKind::String(value) => Err(Unexpected::Str(value)), 43 | } 44 | .map_err(|err| ConfigError::invalid_root(uri, err)) 45 | .map_err(|err| Box::new(err) as Box) 46 | } 47 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! [`Config`] organizes hierarchical or layered configurations for Rust applications. 2 | //! 3 | //! [`Config`] lets you set a set of [default parameters][ConfigBuilder::set_default] and then extend them via merging in 4 | //! configuration from a variety of sources: 5 | //! 6 | //! - [Environment variables][Environment] 7 | //! - [String literals][FileSourceString] in [well-known formats][FileFormat] 8 | //! - Another [`Config`] instance 9 | //! - [Files][FileSourceFile] in [well known formats][FileFormat] and custom ones defined with [`Format`] trait 10 | //! - Manual, programmatic [overrides][ConfigBuilder::set_override] 11 | //! 12 | //! Additionally, [`Config`] supports: 13 | //! 14 | //! - Live watching and re-reading of configuration files 15 | //! - Deep access into the merged configuration via a path syntax 16 | //! - Deserialization via `serde` of the configuration or any subset defined via a path 17 | //! 18 | //! # Example 19 | //! 20 | //! ```rust 21 | //! # #[cfg(feature = "toml")] { 22 | #![doc = include_str!("../examples/simple/main.rs")] 23 | //! # } 24 | //! ``` 25 | //! 26 | //! See more [examples](https://github.com/mehcode/config-rs/tree/master/examples) for 27 | //! general usage information. 28 | 29 | #![cfg_attr(docsrs, feature(doc_auto_cfg))] 30 | #![warn(clippy::print_stderr)] 31 | #![warn(clippy::print_stdout)] 32 | 33 | pub mod builder; 34 | mod config; 35 | mod de; 36 | mod env; 37 | mod error; 38 | mod file; 39 | mod format; 40 | mod map; 41 | mod path; 42 | mod ser; 43 | mod source; 44 | mod value; 45 | 46 | // Re-export 47 | #[cfg(feature = "convert-case")] 48 | pub use convert_case::Case; 49 | 50 | pub use crate::builder::ConfigBuilder; 51 | pub use crate::config::Config; 52 | pub use crate::env::Environment; 53 | pub use crate::error::ConfigError; 54 | pub use crate::file::source::FileSource; 55 | pub use crate::file::{File, FileFormat, FileSourceFile, FileSourceString, FileStoredFormat}; 56 | pub use crate::format::Format; 57 | pub use crate::map::Map; 58 | #[cfg(feature = "async")] 59 | pub use crate::source::AsyncSource; 60 | pub use crate::source::Source; 61 | pub use crate::value::{Value, ValueKind}; 62 | 63 | #[doc = include_str!("../README.md")] 64 | #[cfg(doctest)] 65 | pub struct ReadmeDoctests; 66 | -------------------------------------------------------------------------------- /src/map.rs: -------------------------------------------------------------------------------- 1 | /// The backing store for [`Config`][crate::Config] 2 | pub type Map = InternalMap; 3 | 4 | #[cfg(not(feature = "preserve_order"))] 5 | type InternalMap = std::collections::HashMap; 6 | #[cfg(feature = "preserve_order")] 7 | type InternalMap = indexmap::IndexMap; 8 | -------------------------------------------------------------------------------- /src/path/mod.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use crate::error::{ConfigError, Result}; 4 | use crate::map::Map; 5 | use crate::value::{Value, ValueKind}; 6 | 7 | mod parser; 8 | 9 | #[derive(Debug, Eq, PartialEq, Clone, Hash)] 10 | pub(crate) struct Expression { 11 | root: String, 12 | postfix: Vec, 13 | } 14 | 15 | impl Expression { 16 | pub(crate) fn root(root: String) -> Self { 17 | Self { 18 | root, 19 | postfix: Vec::new(), 20 | } 21 | } 22 | } 23 | 24 | impl FromStr for Expression { 25 | type Err = ConfigError; 26 | 27 | fn from_str(s: &str) -> Result { 28 | parser::from_str(s).map_err(|e| ConfigError::PathParse { 29 | cause: Box::new(ParseError::new(e)), 30 | }) 31 | } 32 | } 33 | 34 | #[derive(Debug, Eq, PartialEq, Clone, Hash)] 35 | enum Postfix { 36 | Key(String), 37 | Index(isize), 38 | } 39 | 40 | #[derive(Debug)] 41 | struct ParseError(String); 42 | 43 | impl ParseError { 44 | fn new(inner: winnow::error::ParseError<&str, winnow::error::ContextError>) -> Self { 45 | Self(inner.to_string()) 46 | } 47 | } 48 | 49 | impl std::fmt::Display for ParseError { 50 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 51 | self.0.fmt(f) 52 | } 53 | } 54 | 55 | impl std::error::Error for ParseError {} 56 | 57 | /// Convert a relative index into an absolute index 58 | fn abs_index(index: isize, len: usize) -> Result { 59 | if index >= 0 { 60 | Ok(index as usize) 61 | } else if let Some(index) = len.checked_sub(index.unsigned_abs()) { 62 | Ok(index) 63 | } else { 64 | Err((len as isize + index).unsigned_abs()) 65 | } 66 | } 67 | 68 | impl Expression { 69 | pub(crate) fn get(self, root: &Value) -> Option<&Value> { 70 | let ValueKind::Table(map) = &root.kind else { 71 | return None; 72 | }; 73 | let mut child = map.get(&self.root)?; 74 | for postfix in &self.postfix { 75 | match postfix { 76 | Postfix::Key(key) => { 77 | let ValueKind::Table(map) = &child.kind else { 78 | return None; 79 | }; 80 | child = map.get(key)?; 81 | } 82 | Postfix::Index(rel_index) => { 83 | let ValueKind::Array(array) = &child.kind else { 84 | return None; 85 | }; 86 | let index = abs_index(*rel_index, array.len()).ok()?; 87 | child = array.get(index)?; 88 | } 89 | } 90 | } 91 | Some(child) 92 | } 93 | 94 | pub(crate) fn get_mut_forcibly<'a>(&self, root: &'a mut Value) -> &'a mut Value { 95 | if !matches!(root.kind, ValueKind::Table(_)) { 96 | *root = Map::::new().into(); 97 | } 98 | let ValueKind::Table(map) = &mut root.kind else { 99 | unreachable!() 100 | }; 101 | let mut child = map 102 | .entry(self.root.clone()) 103 | .or_insert_with(|| Value::new(None, ValueKind::Nil)); 104 | for postfix in &self.postfix { 105 | match postfix { 106 | Postfix::Key(key) => { 107 | if !matches!(child.kind, ValueKind::Table(_)) { 108 | *child = Map::::new().into(); 109 | } 110 | let ValueKind::Table(ref mut map) = child.kind else { 111 | unreachable!() 112 | }; 113 | 114 | child = map 115 | .entry(key.clone()) 116 | .or_insert_with(|| Value::new(None, ValueKind::Nil)); 117 | } 118 | Postfix::Index(rel_index) => { 119 | if !matches!(child.kind, ValueKind::Array(_)) { 120 | *child = Vec::::new().into(); 121 | } 122 | let ValueKind::Array(ref mut array) = child.kind else { 123 | unreachable!() 124 | }; 125 | 126 | let uindex = match abs_index(*rel_index, array.len()) { 127 | Ok(uindex) => { 128 | if uindex >= array.len() { 129 | array.resize(uindex + 1, Value::new(None, ValueKind::Nil)); 130 | } 131 | uindex 132 | } 133 | Err(insertion) => { 134 | array.splice( 135 | 0..0, 136 | (0..insertion).map(|_| Value::new(None, ValueKind::Nil)), 137 | ); 138 | 0 139 | } 140 | }; 141 | 142 | child = &mut array[uindex]; 143 | } 144 | } 145 | } 146 | child 147 | } 148 | 149 | pub(crate) fn set(&self, root: &mut Value, value: Value) { 150 | let parent = self.get_mut_forcibly(root); 151 | match value.kind { 152 | ValueKind::Table(ref incoming_map) => { 153 | // If the parent is not a table, overwrite it, treating it as a 154 | // table 155 | if !matches!(parent.kind, ValueKind::Table(_)) { 156 | *parent = Map::::new().into(); 157 | } 158 | 159 | // Continue the deep merge 160 | for (key, val) in incoming_map { 161 | Self::root(key.clone()).set(parent, val.clone()); 162 | } 163 | } 164 | _ => { 165 | *parent = value; 166 | } 167 | } 168 | } 169 | } 170 | -------------------------------------------------------------------------------- /src/path/parser.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use winnow::ascii::digit1; 4 | use winnow::ascii::space0; 5 | use winnow::combinator::cut_err; 6 | use winnow::combinator::dispatch; 7 | use winnow::combinator::fail; 8 | use winnow::combinator::opt; 9 | use winnow::combinator::repeat; 10 | use winnow::combinator::seq; 11 | use winnow::error::ContextError; 12 | use winnow::error::ParseError; 13 | use winnow::error::StrContext; 14 | use winnow::error::StrContextValue; 15 | use winnow::prelude::*; 16 | use winnow::token::any; 17 | use winnow::token::take_while; 18 | 19 | use crate::path::Expression; 20 | use crate::path::Postfix; 21 | 22 | pub(crate) fn from_str(input: &str) -> Result> { 23 | path.parse(input) 24 | } 25 | 26 | fn path(i: &mut &str) -> ModalResult { 27 | let root = ident.parse_next(i)?; 28 | let postfix = repeat(0.., postfix).parse_next(i)?; 29 | let expr = Expression { root, postfix }; 30 | Ok(expr) 31 | } 32 | 33 | fn postfix(i: &mut &str) -> ModalResult { 34 | dispatch! {any; 35 | '[' => cut_err( 36 | seq!( 37 | integer.map(Postfix::Index), 38 | _: ']'.context(StrContext::Expected(StrContextValue::CharLiteral(']'))), 39 | ) 40 | .map(|(i,)| i) 41 | .context(StrContext::Label("subscript")) 42 | ), 43 | '.' => cut_err(ident.map(Postfix::Key)), 44 | _ => cut_err( 45 | fail 46 | .context(StrContext::Label("postfix")) 47 | .context(StrContext::Expected(StrContextValue::CharLiteral('['))) 48 | .context(StrContext::Expected(StrContextValue::CharLiteral('.'))) 49 | ), 50 | } 51 | .parse_next(i) 52 | } 53 | 54 | fn ident(i: &mut &str) -> ModalResult { 55 | take_while(1.., ('a'..='z', 'A'..='Z', '0'..='9', '_', '-')) 56 | .map(ToOwned::to_owned) 57 | .context(StrContext::Label("identifier")) 58 | .context(StrContext::Expected(StrContextValue::Description( 59 | "ASCII alphanumeric", 60 | ))) 61 | .context(StrContext::Expected(StrContextValue::CharLiteral('_'))) 62 | .context(StrContext::Expected(StrContextValue::CharLiteral('-'))) 63 | .parse_next(i) 64 | } 65 | 66 | fn integer(i: &mut &str) -> ModalResult { 67 | seq!( 68 | _: space0, 69 | (opt('-'), digit1).take().try_map(FromStr::from_str), 70 | _: space0 71 | ) 72 | .context(StrContext::Expected(StrContextValue::Description( 73 | "integer", 74 | ))) 75 | .map(|(i,)| i) 76 | .parse_next(i) 77 | } 78 | 79 | #[cfg(test)] 80 | mod test { 81 | use snapbox::prelude::*; 82 | use snapbox::{assert_data_eq, str}; 83 | 84 | use super::*; 85 | 86 | #[test] 87 | fn test_id() { 88 | let parsed: Expression = from_str("abcd").unwrap(); 89 | assert_data_eq!( 90 | parsed.to_debug(), 91 | str![[r#" 92 | Expression { 93 | root: "abcd", 94 | postfix: [], 95 | } 96 | 97 | "#]] 98 | ); 99 | } 100 | 101 | #[test] 102 | fn test_id_dash() { 103 | let parsed: Expression = from_str("abcd-efgh").unwrap(); 104 | assert_data_eq!( 105 | parsed.to_debug(), 106 | str![[r#" 107 | Expression { 108 | root: "abcd-efgh", 109 | postfix: [], 110 | } 111 | 112 | "#]] 113 | ); 114 | } 115 | 116 | #[test] 117 | fn test_child() { 118 | let parsed: Expression = from_str("abcd.efgh").unwrap(); 119 | assert_data_eq!( 120 | parsed.to_debug(), 121 | str![[r#" 122 | Expression { 123 | root: "abcd", 124 | postfix: [ 125 | Key( 126 | "efgh", 127 | ), 128 | ], 129 | } 130 | 131 | "#]] 132 | ); 133 | 134 | let parsed: Expression = from_str("abcd.efgh.ijkl").unwrap(); 135 | assert_data_eq!( 136 | parsed.to_debug(), 137 | str![[r#" 138 | Expression { 139 | root: "abcd", 140 | postfix: [ 141 | Key( 142 | "efgh", 143 | ), 144 | Key( 145 | "ijkl", 146 | ), 147 | ], 148 | } 149 | 150 | "#]] 151 | ); 152 | } 153 | 154 | #[test] 155 | fn test_subscript() { 156 | let parsed: Expression = from_str("abcd[12]").unwrap(); 157 | assert_data_eq!( 158 | parsed.to_debug(), 159 | str![[r#" 160 | Expression { 161 | root: "abcd", 162 | postfix: [ 163 | Index( 164 | 12, 165 | ), 166 | ], 167 | } 168 | 169 | "#]] 170 | ); 171 | } 172 | 173 | #[test] 174 | fn test_subscript_neg() { 175 | let parsed: Expression = from_str("abcd[-1]").unwrap(); 176 | assert_data_eq!( 177 | parsed.to_debug(), 178 | str![[r#" 179 | Expression { 180 | root: "abcd", 181 | postfix: [ 182 | Index( 183 | -1, 184 | ), 185 | ], 186 | } 187 | 188 | "#]] 189 | ); 190 | } 191 | 192 | #[test] 193 | fn test_invalid_identifier() { 194 | let err = from_str("!").unwrap_err(); 195 | assert_data_eq!( 196 | err.to_string(), 197 | str![[r#" 198 | ! 199 | ^ 200 | invalid identifier 201 | expected ASCII alphanumeric, `_`, `-` 202 | "#]] 203 | ); 204 | } 205 | 206 | #[test] 207 | fn test_invalid_child() { 208 | let err = from_str("a..").unwrap_err(); 209 | assert_data_eq!( 210 | err.to_string(), 211 | str![[r#" 212 | a.. 213 | ^ 214 | invalid identifier 215 | expected ASCII alphanumeric, `_`, `-` 216 | "#]] 217 | ); 218 | } 219 | 220 | #[test] 221 | fn test_invalid_subscript() { 222 | let err = from_str("a[b]").unwrap_err(); 223 | assert_data_eq!( 224 | err.to_string(), 225 | str![[r#" 226 | a[b] 227 | ^ 228 | invalid subscript 229 | expected integer 230 | "#]] 231 | ); 232 | } 233 | 234 | #[test] 235 | fn test_incomplete_subscript() { 236 | let err = from_str("a[0").unwrap_err(); 237 | assert_data_eq!( 238 | err.to_string(), 239 | str![[r#" 240 | a[0 241 | ^ 242 | invalid subscript 243 | expected `]` 244 | "#]] 245 | ); 246 | } 247 | 248 | #[test] 249 | fn test_invalid_postfix() { 250 | let err = from_str("a!b").unwrap_err(); 251 | assert_data_eq!( 252 | err.to_string(), 253 | str![[r#" 254 | a!b 255 | ^ 256 | invalid postfix 257 | expected `[`, `.` 258 | "#]] 259 | ); 260 | } 261 | } 262 | -------------------------------------------------------------------------------- /src/source.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | use std::str::FromStr; 3 | 4 | #[cfg(feature = "async")] 5 | use async_trait::async_trait; 6 | 7 | use crate::error::Result; 8 | use crate::map::Map; 9 | use crate::path; 10 | use crate::value::{Value, ValueKind}; 11 | 12 | /// Describes a generic _source_ of configuration properties. 13 | pub trait Source: Debug { 14 | fn clone_into_box(&self) -> Box; 15 | 16 | /// Collect all configuration properties available from this source into 17 | /// a [`Map`]. 18 | fn collect(&self) -> Result>; 19 | 20 | /// Collects all configuration properties to a provided cache. 21 | fn collect_to(&self, cache: &mut Value) -> Result<()> { 22 | self.collect()? 23 | .into_iter() 24 | .for_each(|(key, val)| set_value(cache, key, val)); 25 | 26 | Ok(()) 27 | } 28 | } 29 | 30 | fn set_value(cache: &mut Value, key: String, value: Value) { 31 | match path::Expression::from_str(key.as_str()) { 32 | // Set using the path 33 | Ok(expr) => expr.set(cache, value), 34 | 35 | // Set directly anyway 36 | _ => path::Expression::root(key).set(cache, value), 37 | } 38 | } 39 | 40 | /// Describes a generic _source_ of configuration properties capable of using an async runtime. 41 | /// 42 | /// At the moment this library does not implement it, although it allows using its implementations 43 | /// within builders. Due to the scattered landscape of asynchronous runtimes, it is impossible to 44 | /// cater to all needs with one implementation. Also, this trait might be most useful with remote 45 | /// configuration sources, reachable via the network, probably using HTTP protocol. Numerous HTTP 46 | /// libraries exist, making it even harder to find one implementation that rules them all. 47 | /// 48 | /// For those reasons, it is left to other crates to implement runtime-specific or proprietary 49 | /// details. 50 | /// 51 | /// It is advised to use `async_trait` crate while implementing this trait. 52 | /// 53 | /// See examples for sample implementation. 54 | #[cfg(feature = "async")] 55 | #[async_trait] 56 | pub trait AsyncSource: Debug + Sync { 57 | // Sync is supertrait due to https://docs.rs/async-trait/0.1.50/async_trait/index.html#dyn-traits 58 | 59 | /// Collects all configuration properties available from this source and return 60 | /// a Map as an async operations. 61 | async fn collect(&self) -> Result>; 62 | 63 | /// Collects all configuration properties to a provided cache. 64 | async fn collect_to(&self, cache: &mut Value) -> Result<()> { 65 | self.collect() 66 | .await? 67 | .into_iter() 68 | .for_each(|(key, val)| set_value(cache, key, val)); 69 | 70 | Ok(()) 71 | } 72 | } 73 | 74 | #[cfg(feature = "async")] 75 | impl Clone for Box { 76 | fn clone(&self) -> Self { 77 | self.to_owned() 78 | } 79 | } 80 | 81 | impl Clone for Box { 82 | fn clone(&self) -> Self { 83 | self.clone_into_box() 84 | } 85 | } 86 | 87 | impl Source for Vec> { 88 | fn clone_into_box(&self) -> Box { 89 | Box::new((*self).clone()) 90 | } 91 | 92 | fn collect(&self) -> Result> { 93 | let mut cache: Value = Map::::new().into(); 94 | 95 | for source in self { 96 | source.collect_to(&mut cache)?; 97 | } 98 | 99 | if let ValueKind::Table(table) = cache.kind { 100 | Ok(table) 101 | } else { 102 | unreachable!(); 103 | } 104 | } 105 | } 106 | 107 | impl Source for [Box] { 108 | fn clone_into_box(&self) -> Box { 109 | Box::new(self.to_owned()) 110 | } 111 | 112 | fn collect(&self) -> Result> { 113 | let mut cache: Value = Map::::new().into(); 114 | 115 | for source in self { 116 | source.collect_to(&mut cache)?; 117 | } 118 | 119 | if let ValueKind::Table(table) = cache.kind { 120 | Ok(table) 121 | } else { 122 | unreachable!(); 123 | } 124 | } 125 | } 126 | 127 | impl Source for Vec 128 | where 129 | T: Source + Sync + Send + Clone + 'static, 130 | { 131 | fn clone_into_box(&self) -> Box { 132 | Box::new((*self).clone()) 133 | } 134 | 135 | fn collect(&self) -> Result> { 136 | let mut cache: Value = Map::::new().into(); 137 | 138 | for source in self { 139 | source.collect_to(&mut cache)?; 140 | } 141 | 142 | if let ValueKind::Table(table) = cache.kind { 143 | Ok(table) 144 | } else { 145 | unreachable!(); 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /tests/testsuite/async_builder.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "async")] 2 | #![cfg(feature = "json")] 3 | 4 | use async_trait::async_trait; 5 | 6 | use config::{AsyncSource, Config, ConfigError, FileFormat, Format, Map, Value}; 7 | 8 | #[derive(Debug)] 9 | struct AsyncJson(&'static str); 10 | 11 | #[async_trait] 12 | impl AsyncSource for AsyncJson { 13 | async fn collect(&self) -> Result, ConfigError> { 14 | let text = self.0; 15 | 16 | FileFormat::Json 17 | .parse(None, text) 18 | .map_err(ConfigError::Foreign) 19 | } 20 | } 21 | 22 | #[tokio::test] 23 | async fn test_single_async_file_source() { 24 | let config = Config::builder() 25 | .add_async_source(AsyncJson( 26 | r#" 27 | { 28 | "debug": true 29 | } 30 | "#, 31 | )) 32 | .build() 33 | .await 34 | .unwrap(); 35 | 36 | assert!(config.get::("debug").unwrap()); 37 | } 38 | 39 | #[tokio::test] 40 | async fn test_two_async_file_sources() { 41 | let config = Config::builder() 42 | .add_async_source(AsyncJson( 43 | r#" 44 | { 45 | "debug_json": true, 46 | "place": { 47 | "name": "Torre di Pisa" 48 | } 49 | } 50 | "#, 51 | )) 52 | .add_async_source(AsyncJson( 53 | r#" 54 | { 55 | "place": { 56 | "name": "Torre di Pisa", 57 | "number": 1 58 | } 59 | } 60 | "#, 61 | )) 62 | .build() 63 | .await 64 | .unwrap(); 65 | 66 | assert_eq!(config.get::("place.name").unwrap(), "Torre di Pisa"); 67 | assert_eq!(config.get::("place.number").unwrap(), 1); 68 | assert!(config.get::("debug_json").unwrap()); 69 | } 70 | 71 | #[tokio::test] 72 | async fn test_sync_to_async_file_sources() { 73 | let config = Config::builder() 74 | .add_source(config::File::from_str( 75 | r#" 76 | { 77 | "debug_json": true, 78 | "place": { 79 | "name": "Torre di Pisa" 80 | } 81 | } 82 | "#, 83 | FileFormat::Json, 84 | )) 85 | .add_async_source(AsyncJson( 86 | r#" 87 | { 88 | "place": { 89 | "name": "Torre di Pisa", 90 | "number": 1 91 | } 92 | } 93 | "#, 94 | )) 95 | .build() 96 | .await 97 | .unwrap(); 98 | 99 | assert_eq!(config.get::("place.name").unwrap(), "Torre di Pisa",); 100 | assert_eq!(config.get::("place.number").unwrap(), 1); 101 | } 102 | 103 | #[tokio::test] 104 | async fn test_async_to_sync_file_sources() { 105 | let config = Config::builder() 106 | .add_async_source(AsyncJson( 107 | r#" 108 | { 109 | "place": { 110 | "name": "Torre di Pisa", 111 | "number": 1 112 | } 113 | } 114 | "#, 115 | )) 116 | .add_source(config::File::from_str( 117 | r#" 118 | { 119 | "debug_json": true, 120 | "place": { 121 | "name": "Torre di Pisa" 122 | } 123 | } 124 | "#, 125 | FileFormat::Json, 126 | )) 127 | .build() 128 | .await 129 | .unwrap(); 130 | 131 | assert_eq!(config.get::("place.name").unwrap(), "Torre di Pisa",); 132 | assert_eq!(config.get::("place.number").unwrap(), 1,); 133 | } 134 | 135 | #[tokio::test] 136 | async fn test_async_file_sources_with_defaults() { 137 | let config = Config::builder() 138 | .set_default("place.name", "Tower of London") 139 | .unwrap() 140 | .set_default("place.sky", "blue") 141 | .unwrap() 142 | .add_async_source(AsyncJson( 143 | r#" 144 | { 145 | "place": { 146 | "name": "Torre di Pisa", 147 | "number": 1 148 | } 149 | } 150 | "#, 151 | )) 152 | .build() 153 | .await 154 | .unwrap(); 155 | 156 | assert_eq!(config.get::("place.name").unwrap(), "Torre di Pisa",); 157 | assert_eq!(config.get::("place.sky").unwrap(), "blue",); 158 | assert_eq!(config.get::("place.number").unwrap(), 1); 159 | } 160 | 161 | #[tokio::test] 162 | async fn test_async_file_sources_with_overrides() { 163 | let config = Config::builder() 164 | .set_override("place.name", "Tower of London") 165 | .unwrap() 166 | .add_async_source(AsyncJson( 167 | r#" 168 | { 169 | "place": { 170 | "name": "Torre di Pisa", 171 | "number": 1 172 | } 173 | } 174 | "#, 175 | )) 176 | .build() 177 | .await 178 | .unwrap(); 179 | 180 | assert_eq!( 181 | config.get::("place.name").unwrap(), 182 | "Tower of London", 183 | ); 184 | assert_eq!(config.get::("place.number").unwrap(), 1); 185 | } 186 | -------------------------------------------------------------------------------- /tests/testsuite/case.rs: -------------------------------------------------------------------------------- 1 | use serde_derive::Deserialize; 2 | 3 | use config::{Config, File, FileFormat}; 4 | 5 | #[test] 6 | #[cfg(feature = "json")] 7 | fn respect_field_case() { 8 | #[derive(Deserialize, Debug)] 9 | #[allow(non_snake_case)] 10 | #[allow(dead_code)] 11 | struct Kafka { 12 | broker: String, 13 | topic: String, 14 | pollSleep: u64, //<--- 15 | } 16 | 17 | let c = Config::builder() 18 | .add_source(File::from_str( 19 | r#" 20 | { 21 | "broker": "localhost:29092", 22 | "topic": "rust", 23 | "pollSleep": 1000 24 | } 25 | "#, 26 | FileFormat::Json, 27 | )) 28 | .build() 29 | .unwrap(); 30 | 31 | c.try_deserialize::().unwrap(); 32 | } 33 | 34 | #[test] 35 | #[cfg(feature = "json")] 36 | fn respect_renamed_field() { 37 | #[derive(Deserialize, Debug)] 38 | #[allow(dead_code)] 39 | struct MyConfig { 40 | #[serde(rename = "FooBar")] 41 | foo_bar: String, 42 | } 43 | 44 | let c = Config::builder() 45 | .add_source(File::from_str( 46 | r#" 47 | { 48 | "FooBar": "Hello, world!" 49 | } 50 | "#, 51 | FileFormat::Json, 52 | )) 53 | .build() 54 | .unwrap(); 55 | 56 | c.try_deserialize::().unwrap(); 57 | } 58 | 59 | #[test] 60 | #[cfg(feature = "json")] 61 | fn respect_path_case() { 62 | let c = Config::builder() 63 | .add_source(File::from_str( 64 | r#" 65 | { 66 | "Student": [ 67 | { "Name": "1" }, 68 | { "Name": "2" } 69 | ] 70 | } 71 | "#, 72 | FileFormat::Json, 73 | )) 74 | .build() 75 | .unwrap(); 76 | 77 | c.get_string("Student[0].Name").unwrap(); 78 | } 79 | -------------------------------------------------------------------------------- /tests/testsuite/defaults.rs: -------------------------------------------------------------------------------- 1 | use config::Config; 2 | use serde_derive::{Deserialize, Serialize}; 3 | 4 | #[derive(Debug, Serialize, Deserialize)] 5 | #[serde(default)] 6 | pub struct Settings { 7 | pub db_host: String, 8 | } 9 | 10 | impl Default for Settings { 11 | fn default() -> Self { 12 | Self { 13 | db_host: String::from("default"), 14 | } 15 | } 16 | } 17 | 18 | #[test] 19 | fn set_defaults() { 20 | let c = Config::default(); 21 | let s: Settings = c.try_deserialize().expect("Deserialization failed"); 22 | 23 | assert_eq!(s.db_host, "default"); 24 | } 25 | 26 | #[test] 27 | fn try_from_defaults() { 28 | let c = Config::try_from(&Settings::default()).expect("Serialization failed"); 29 | let s: Settings = c.try_deserialize().expect("Deserialization failed"); 30 | assert_eq!(s.db_host, "default"); 31 | } 32 | -------------------------------------------------------------------------------- /tests/testsuite/deserialize-invalid-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "place": { 3 | "name": "Torre di Pisa" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /tests/testsuite/deserialize-missing-field.json: -------------------------------------------------------------------------------- 1 | { 2 | "inner": { "value": 42 } 3 | } 4 | -------------------------------------------------------------------------------- /tests/testsuite/empty.rs: -------------------------------------------------------------------------------- 1 | use config::Config; 2 | use serde_derive::{Deserialize, Serialize}; 3 | 4 | #[derive(Debug, Serialize, Deserialize)] 5 | struct Settings { 6 | #[serde(skip)] 7 | foo: isize, 8 | #[serde(skip)] 9 | bar: u8, 10 | } 11 | 12 | #[test] 13 | fn empty_deserializes() { 14 | let s: Settings = Config::default() 15 | .try_deserialize() 16 | .expect("Deserialization failed"); 17 | assert_eq!(s.foo, 0); 18 | assert_eq!(s.bar, 0); 19 | } 20 | -------------------------------------------------------------------------------- /tests/testsuite/file-auto.json: -------------------------------------------------------------------------------- 1 | { 2 | "debug": true, 3 | "production": false 4 | } 5 | -------------------------------------------------------------------------------- /tests/testsuite/file-ext.json: -------------------------------------------------------------------------------- 1 | { 2 | "debug": true, 3 | "production": false 4 | } 5 | -------------------------------------------------------------------------------- /tests/testsuite/file-second-ext.default.json: -------------------------------------------------------------------------------- 1 | { 2 | "debug": true, 3 | "production": false 4 | } 5 | -------------------------------------------------------------------------------- /tests/testsuite/file.rs: -------------------------------------------------------------------------------- 1 | use snapbox::{assert_data_eq, str}; 2 | 3 | use config::{Config, File, FileFormat}; 4 | 5 | #[test] 6 | #[cfg(feature = "json")] 7 | fn test_file_not_required() { 8 | let res = Config::builder() 9 | .add_source(File::new("tests/testsuite/file-nonexistent", FileFormat::Json).required(false)) 10 | .build(); 11 | 12 | assert!(res.is_ok()); 13 | } 14 | 15 | #[test] 16 | #[cfg(feature = "json")] 17 | fn test_file_required_not_found() { 18 | let res = Config::builder() 19 | .add_source(File::new( 20 | "tests/testsuite/file-nonexistent", 21 | FileFormat::Json, 22 | )) 23 | .build(); 24 | 25 | assert!(res.is_err()); 26 | assert_data_eq!( 27 | res.unwrap_err().to_string(), 28 | str![[r#"configuration file "tests/testsuite/file-nonexistent" not found"#]] 29 | ); 30 | } 31 | 32 | #[test] 33 | #[cfg(feature = "json")] 34 | fn test_file_auto() { 35 | let c = Config::builder() 36 | .add_source(File::with_name("tests/testsuite/file-auto")) 37 | .build() 38 | .unwrap(); 39 | 40 | assert_eq!(c.get("debug").ok(), Some(true)); 41 | assert_eq!(c.get("production").ok(), Some(false)); 42 | } 43 | 44 | #[test] 45 | #[cfg(feature = "json")] 46 | fn test_file_auto_not_found() { 47 | let res = Config::builder() 48 | .add_source(File::with_name("tests/testsuite/file-nonexistent")) 49 | .build(); 50 | 51 | assert!(res.is_err()); 52 | assert_data_eq!( 53 | res.unwrap_err().to_string(), 54 | str![[r#"configuration file "tests/testsuite/file-nonexistent" not found"#]] 55 | ); 56 | } 57 | 58 | #[test] 59 | #[cfg(feature = "json")] 60 | fn test_file_ext() { 61 | let c = Config::builder() 62 | .add_source(File::with_name("tests/testsuite/file-ext.json")) 63 | .build() 64 | .unwrap(); 65 | 66 | assert_eq!(c.get("debug").ok(), Some(true)); 67 | assert_eq!(c.get("production").ok(), Some(false)); 68 | } 69 | 70 | #[test] 71 | #[cfg(feature = "json")] 72 | fn test_file_second_ext() { 73 | let c = Config::builder() 74 | .add_source(File::with_name("tests/testsuite/file-second-ext.default")) 75 | .build() 76 | .unwrap(); 77 | 78 | assert_eq!(c.get("debug").ok(), Some(true)); 79 | assert_eq!(c.get("production").ok(), Some(false)); 80 | } 81 | -------------------------------------------------------------------------------- /tests/testsuite/file_ini.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "ini")] 2 | 3 | use chrono::{DateTime, TimeZone, Utc}; 4 | use serde_derive::Deserialize; 5 | use snapbox::{assert_data_eq, str}; 6 | 7 | use config::{Config, File, FileFormat}; 8 | 9 | #[test] 10 | fn test_file() { 11 | #[derive(Debug, Deserialize, PartialEq)] 12 | struct Settings { 13 | debug: f64, 14 | place: Place, 15 | } 16 | 17 | #[derive(Debug, Deserialize, PartialEq)] 18 | struct Place { 19 | name: String, 20 | longitude: f64, 21 | latitude: f64, 22 | favorite: bool, 23 | reviews: u64, 24 | rating: Option, 25 | } 26 | 27 | let c = Config::builder() 28 | .add_source(File::from_str( 29 | r#" 30 | debug = true 31 | production = false 32 | FOO = FOO should be overridden 33 | bar = I am bar 34 | [place] 35 | name = Torre di Pisa 36 | longitude = 43.7224985 37 | latitude = 10.3970522 38 | favorite = false 39 | reviews = 3866 40 | rating = 4.5 41 | "#, 42 | FileFormat::Ini, 43 | )) 44 | .build() 45 | .unwrap(); 46 | let s: Settings = c.try_deserialize().unwrap(); 47 | assert_eq!( 48 | s, 49 | Settings { 50 | debug: 1.0, 51 | place: Place { 52 | name: String::from("Torre di Pisa"), 53 | longitude: 43.722_498_5, 54 | latitude: 10.397_052_2, 55 | favorite: false, 56 | reviews: 3866, 57 | rating: Some(4.5), 58 | }, 59 | } 60 | ); 61 | } 62 | 63 | #[test] 64 | fn test_error_parse() { 65 | let res = Config::builder() 66 | .add_source(File::from_str( 67 | r#" 68 | ok : true, 69 | error 70 | "#, 71 | FileFormat::Ini, 72 | )) 73 | .build(); 74 | 75 | assert!(res.is_err()); 76 | assert_data_eq!( 77 | res.unwrap_err().to_string(), 78 | str![[r#"4:1 expecting "[Some('='), Some(':')]" but found EOF."#]] 79 | ); 80 | } 81 | 82 | #[test] 83 | fn test_override_uppercase_value_for_struct() { 84 | #[derive(Debug, Deserialize, PartialEq)] 85 | struct StructSettings { 86 | foo: String, 87 | bar: String, 88 | } 89 | 90 | #[derive(Debug, Deserialize, PartialEq)] 91 | #[allow(non_snake_case)] 92 | struct CapSettings { 93 | FOO: String, 94 | } 95 | 96 | std::env::set_var("APP_FOO", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 97 | 98 | let cfg = Config::builder() 99 | .add_source(File::from_str( 100 | r#" 101 | debug = true 102 | production = false 103 | FOO = FOO should be overridden 104 | bar = I am bar 105 | [place] 106 | name = Torre di Pisa 107 | longitude = 43.7224985 108 | latitude = 10.3970522 109 | favorite = false 110 | reviews = 3866 111 | rating = 4.5 112 | "#, 113 | FileFormat::Ini, 114 | )) 115 | .add_source(config::Environment::with_prefix("APP").separator("_")) 116 | .build() 117 | .unwrap(); 118 | let cap_settings = cfg.clone().try_deserialize::(); 119 | let lower_settings = cfg.try_deserialize::().unwrap(); 120 | 121 | match cap_settings { 122 | Ok(v) => { 123 | // this assertion will ensure that the map has only lowercase keys 124 | assert_eq!(v.FOO, "FOO should be overridden"); 125 | assert_eq!( 126 | lower_settings.foo, 127 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 128 | ); 129 | } 130 | Err(e) => { 131 | if e.to_string().contains("missing field `FOO`") { 132 | assert_eq!( 133 | lower_settings.foo, 134 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 135 | ); 136 | } else { 137 | panic!("{}", e); 138 | } 139 | } 140 | } 141 | } 142 | 143 | #[test] 144 | fn test_override_lowercase_value_for_struct() { 145 | #[derive(Debug, Deserialize, PartialEq)] 146 | struct StructSettings { 147 | foo: String, 148 | bar: String, 149 | } 150 | 151 | std::env::set_var("config_foo", "I have been overridden_with_lower_case"); 152 | 153 | let cfg = Config::builder() 154 | .add_source(File::from_str( 155 | r#" 156 | debug = true 157 | production = false 158 | FOO = FOO should be overridden 159 | bar = I am bar 160 | [place] 161 | name = Torre di Pisa 162 | longitude = 43.7224985 163 | latitude = 10.3970522 164 | favorite = false 165 | reviews = 3866 166 | rating = 4.5 167 | "#, 168 | FileFormat::Ini, 169 | )) 170 | .add_source(config::Environment::with_prefix("config").separator("_")) 171 | .build() 172 | .unwrap(); 173 | 174 | let values: StructSettings = cfg.try_deserialize().unwrap(); 175 | assert_eq!( 176 | values.foo, 177 | "I have been overridden_with_lower_case".to_owned() 178 | ); 179 | assert_eq!(values.bar, "I am bar".to_owned()); 180 | } 181 | 182 | #[test] 183 | fn test_override_uppercase_value_for_enums() { 184 | #[derive(Debug, Deserialize, PartialEq)] 185 | enum EnumSettings { 186 | Bar(String), 187 | } 188 | 189 | std::env::set_var("APPS_BAR", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 190 | 191 | let cfg = Config::builder() 192 | .add_source(File::from_str( 193 | r#" 194 | bar = "bar is a lowercase param" 195 | "#, 196 | FileFormat::Ini, 197 | )) 198 | .add_source(config::Environment::with_prefix("APPS").separator("_")) 199 | .build() 200 | .unwrap(); 201 | 202 | let param = cfg.try_deserialize::(); 203 | assert!(param.is_err()); 204 | assert_data_eq!( 205 | param.unwrap_err().to_string(), 206 | str!["enum EnumSettings does not have variant constructor bar"] 207 | ); 208 | } 209 | 210 | #[test] 211 | fn test_override_lowercase_value_for_enums() { 212 | #[derive(Debug, Deserialize, PartialEq)] 213 | enum EnumSettings { 214 | Bar(String), 215 | } 216 | 217 | std::env::set_var("test_bar", "I have been overridden_with_lower_case"); 218 | 219 | let cfg = Config::builder() 220 | .add_source(File::from_str( 221 | r#" 222 | bar = "bar is a lowercase param" 223 | "#, 224 | FileFormat::Ini, 225 | )) 226 | .add_source(config::Environment::with_prefix("test").separator("_")) 227 | .build() 228 | .unwrap(); 229 | 230 | let param = cfg.try_deserialize::(); 231 | assert!(param.is_err()); 232 | assert_data_eq!( 233 | param.unwrap_err().to_string(), 234 | str!["enum EnumSettings does not have variant constructor bar"] 235 | ); 236 | } 237 | 238 | #[test] 239 | fn ini() { 240 | let s = Config::builder() 241 | .add_source(File::from_str( 242 | r#" 243 | ini_datetime = 2017-05-10T02:14:53Z 244 | "#, 245 | FileFormat::Ini, 246 | )) 247 | .build() 248 | .unwrap(); 249 | 250 | let date: String = s.get("ini_datetime").unwrap(); 251 | assert_eq!(&date, "2017-05-10T02:14:53Z"); 252 | let date: DateTime = s.get("ini_datetime").unwrap(); 253 | assert_eq!(date, Utc.with_ymd_and_hms(2017, 5, 10, 2, 14, 53).unwrap()); 254 | } 255 | -------------------------------------------------------------------------------- /tests/testsuite/file_json.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "json")] 2 | 3 | use chrono::{DateTime, TimeZone, Utc}; 4 | use float_cmp::ApproxEqUlps; 5 | use serde_derive::Deserialize; 6 | use snapbox::{assert_data_eq, str}; 7 | 8 | use config::{Config, File, FileFormat, Map, Value}; 9 | 10 | #[test] 11 | fn test_file() { 12 | #[derive(Debug, Deserialize)] 13 | struct Settings { 14 | debug: f64, 15 | production: Option, 16 | place: Place, 17 | #[serde(rename = "arr")] 18 | elements: Vec, 19 | } 20 | 21 | #[derive(Debug, Deserialize)] 22 | struct Place { 23 | name: String, 24 | longitude: f64, 25 | latitude: f64, 26 | favorite: bool, 27 | telephone: Option, 28 | reviews: u64, 29 | creator: Map, 30 | rating: Option, 31 | } 32 | 33 | let c = Config::builder() 34 | .add_source(File::from_str( 35 | r#" 36 | { 37 | "debug": true, 38 | "debug_json": true, 39 | "production": false, 40 | "arr": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 41 | "place": { 42 | "name": "Torre di Pisa", 43 | "longitude": 43.7224985, 44 | "latitude": 10.3970522, 45 | "favorite": false, 46 | "reviews": 3866, 47 | "rating": 4.5, 48 | "creator": { 49 | "name": "John Smith", 50 | "username": "jsmith", 51 | "email": "jsmith@localhost" 52 | } 53 | }, 54 | "FOO": "FOO should be overridden", 55 | "bar": "I am bar" 56 | } 57 | "#, 58 | FileFormat::Json, 59 | )) 60 | .build() 61 | .unwrap(); 62 | 63 | // Deserialize the entire file as single struct 64 | let s: Settings = c.try_deserialize().unwrap(); 65 | 66 | assert!(s.debug.approx_eq_ulps(&1.0, 2)); 67 | assert_eq!(s.production, Some("false".to_owned())); 68 | assert_eq!(s.place.name, "Torre di Pisa"); 69 | assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); 70 | assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); 71 | assert!(!s.place.favorite); 72 | assert_eq!(s.place.reviews, 3866); 73 | assert_eq!(s.place.rating, Some(4.5)); 74 | assert_eq!(s.place.telephone, None); 75 | assert_eq!(s.elements.len(), 10); 76 | assert_eq!(s.elements[3], "4".to_owned()); 77 | if cfg!(feature = "preserve_order") { 78 | assert_eq!( 79 | s.place 80 | .creator 81 | .into_iter() 82 | .collect::>(), 83 | vec![ 84 | ("name".to_owned(), "John Smith".into()), 85 | ("username".into(), "jsmith".into()), 86 | ("email".into(), "jsmith@localhost".into()), 87 | ] 88 | ); 89 | } else { 90 | assert_eq!( 91 | s.place.creator["name"].clone().into_string().unwrap(), 92 | "John Smith".to_owned() 93 | ); 94 | } 95 | } 96 | 97 | #[test] 98 | fn test_error_parse() { 99 | let res = Config::builder() 100 | .add_source(File::from_str( 101 | r#" 102 | { 103 | "ok": true, 104 | "error" 105 | } 106 | "#, 107 | FileFormat::Json, 108 | )) 109 | .build(); 110 | 111 | assert!(res.is_err()); 112 | assert_data_eq!( 113 | res.unwrap_err().to_string(), 114 | str!["expected `:` at line 5 column 1"] 115 | ); 116 | } 117 | 118 | #[test] 119 | fn test_override_uppercase_value_for_struct() { 120 | #[derive(Debug, Deserialize, PartialEq)] 121 | struct StructSettings { 122 | foo: String, 123 | bar: String, 124 | } 125 | 126 | #[derive(Debug, Deserialize, PartialEq)] 127 | #[allow(non_snake_case)] 128 | struct CapSettings { 129 | FOO: String, 130 | } 131 | 132 | std::env::set_var("APP_FOO", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 133 | 134 | let cfg = Config::builder() 135 | .add_source(File::from_str( 136 | r#" 137 | { 138 | "debug": true, 139 | "debug_json": true, 140 | "production": false, 141 | "arr": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 142 | "place": { 143 | "name": "Torre di Pisa", 144 | "longitude": 43.7224985, 145 | "latitude": 10.3970522, 146 | "favorite": false, 147 | "reviews": 3866, 148 | "rating": 4.5, 149 | "creator": { 150 | "name": "John Smith", 151 | "username": "jsmith", 152 | "email": "jsmith@localhost" 153 | } 154 | }, 155 | "FOO": "FOO should be overridden", 156 | "bar": "I am bar" 157 | } 158 | "#, 159 | FileFormat::Json, 160 | )) 161 | .add_source(config::Environment::with_prefix("APP").separator("_")) 162 | .build() 163 | .unwrap(); 164 | 165 | let cap_settings = cfg.clone().try_deserialize::(); 166 | let lower_settings = cfg.try_deserialize::().unwrap(); 167 | 168 | match cap_settings { 169 | Ok(v) => { 170 | // this assertion will ensure that the map has only lowercase keys 171 | assert_eq!(v.FOO, "FOO should be overridden"); 172 | assert_eq!( 173 | lower_settings.foo, 174 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 175 | ); 176 | } 177 | Err(e) => { 178 | if e.to_string().contains("missing field `FOO`") { 179 | println!("triggered error {e:?}"); 180 | assert_eq!( 181 | lower_settings.foo, 182 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 183 | ); 184 | } else { 185 | panic!("{}", e); 186 | } 187 | } 188 | } 189 | } 190 | 191 | #[test] 192 | fn test_override_lowercase_value_for_struct() { 193 | #[derive(Debug, Deserialize, PartialEq)] 194 | struct StructSettings { 195 | foo: String, 196 | bar: String, 197 | } 198 | 199 | std::env::set_var("config_foo", "I have been overridden_with_lower_case"); 200 | 201 | let cfg = Config::builder() 202 | .add_source(File::from_str( 203 | r#" 204 | { 205 | "debug": true, 206 | "debug_json": true, 207 | "production": false, 208 | "arr": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 209 | "place": { 210 | "name": "Torre di Pisa", 211 | "longitude": 43.7224985, 212 | "latitude": 10.3970522, 213 | "favorite": false, 214 | "reviews": 3866, 215 | "rating": 4.5, 216 | "creator": { 217 | "name": "John Smith", 218 | "username": "jsmith", 219 | "email": "jsmith@localhost" 220 | } 221 | }, 222 | "FOO": "FOO should be overridden", 223 | "bar": "I am bar" 224 | } 225 | "#, 226 | FileFormat::Json, 227 | )) 228 | .add_source(config::Environment::with_prefix("config").separator("_")) 229 | .build() 230 | .unwrap(); 231 | 232 | let values: StructSettings = cfg.try_deserialize().unwrap(); 233 | assert_eq!( 234 | values.foo, 235 | "I have been overridden_with_lower_case".to_owned() 236 | ); 237 | assert_eq!(values.bar, "I am bar".to_owned()); 238 | } 239 | 240 | #[test] 241 | fn test_override_uppercase_value_for_enums() { 242 | #[derive(Debug, Deserialize, PartialEq)] 243 | enum EnumSettings { 244 | Bar(String), 245 | } 246 | 247 | std::env::set_var("APPS_BAR", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 248 | 249 | let cfg = Config::builder() 250 | .add_source(File::from_str( 251 | r#" 252 | { 253 | "bar": "bar is a lowercase param" 254 | } 255 | "#, 256 | FileFormat::Json, 257 | )) 258 | .add_source(config::Environment::with_prefix("APPS").separator("_")) 259 | .build() 260 | .unwrap(); 261 | 262 | let param = cfg.try_deserialize::(); 263 | assert!(param.is_err()); 264 | assert_data_eq!( 265 | param.unwrap_err().to_string(), 266 | str!["enum EnumSettings does not have variant constructor bar"] 267 | ); 268 | } 269 | 270 | #[test] 271 | fn test_override_lowercase_value_for_enums() { 272 | #[derive(Debug, Deserialize, PartialEq)] 273 | enum EnumSettings { 274 | Bar(String), 275 | } 276 | 277 | std::env::set_var("test_bar", "I have been overridden_with_lower_case"); 278 | 279 | let cfg = Config::builder() 280 | .add_source(File::from_str( 281 | r#" 282 | { 283 | "bar": "bar is a lowercase param" 284 | } 285 | "#, 286 | FileFormat::Json, 287 | )) 288 | .add_source(config::Environment::with_prefix("test").separator("_")) 289 | .build() 290 | .unwrap(); 291 | 292 | let param = cfg.try_deserialize::(); 293 | assert!(param.is_err()); 294 | assert_data_eq!( 295 | param.unwrap_err().to_string(), 296 | str!["enum EnumSettings does not have variant constructor bar"] 297 | ); 298 | } 299 | 300 | #[test] 301 | fn json() { 302 | let s = Config::builder() 303 | .add_source(File::from_str( 304 | r#" 305 | { 306 | "json_datetime": "2017-05-10T02:14:53Z" 307 | } 308 | "#, 309 | FileFormat::Json, 310 | )) 311 | .build() 312 | .unwrap(); 313 | 314 | let date: String = s.get("json_datetime").unwrap(); 315 | assert_eq!(&date, "2017-05-10T02:14:53Z"); 316 | let date: DateTime = s.get("json_datetime").unwrap(); 317 | assert_eq!(date, Utc.with_ymd_and_hms(2017, 5, 10, 2, 14, 53).unwrap()); 318 | } 319 | -------------------------------------------------------------------------------- /tests/testsuite/file_json5.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "json5")] 2 | 3 | use chrono::{DateTime, TimeZone, Utc}; 4 | use float_cmp::ApproxEqUlps; 5 | use serde_derive::Deserialize; 6 | use snapbox::{assert_data_eq, str}; 7 | 8 | use config::{Config, File, FileFormat, Map, Value}; 9 | 10 | #[test] 11 | fn test_file() { 12 | #[derive(Debug, Deserialize)] 13 | struct Settings { 14 | debug: f64, 15 | production: Option, 16 | place: Place, 17 | #[serde(rename = "arr")] 18 | elements: Vec, 19 | } 20 | 21 | #[derive(Debug, Deserialize)] 22 | struct Place { 23 | name: String, 24 | longitude: f64, 25 | latitude: f64, 26 | favorite: bool, 27 | telephone: Option, 28 | reviews: u64, 29 | creator: Map, 30 | rating: Option, 31 | } 32 | 33 | let c = Config::builder() 34 | .add_source(File::from_str( 35 | r#" 36 | { 37 | // c 38 | /* c */ 39 | debug: true, 40 | production: false, 41 | arr: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,], 42 | place: { 43 | name: 'Torre di Pisa', 44 | longitude: 43.7224985, 45 | latitude: 10.3970522, 46 | favorite: false, 47 | reviews: 3866, 48 | rating: 4.5, 49 | creator: { 50 | name: "John Smith", 51 | "username": "jsmith", 52 | "email": "jsmith@localhost", 53 | } 54 | }, 55 | FOO: "FOO should be overridden", 56 | bar: "I am bar", 57 | } 58 | "#, 59 | FileFormat::Json5, 60 | )) 61 | .build() 62 | .unwrap(); 63 | 64 | // Deserialize the entire file as single struct 65 | let s: Settings = c.try_deserialize().unwrap(); 66 | 67 | assert!(s.debug.approx_eq_ulps(&1.0, 2)); 68 | assert_eq!(s.production, Some("false".to_owned())); 69 | assert_eq!(s.place.name, "Torre di Pisa"); 70 | assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); 71 | assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); 72 | assert!(!s.place.favorite); 73 | assert_eq!(s.place.reviews, 3866); 74 | assert_eq!(s.place.rating, Some(4.5)); 75 | assert_eq!(s.place.telephone, None); 76 | assert_eq!(s.elements.len(), 10); 77 | assert_eq!(s.elements[3], "4".to_owned()); 78 | if cfg!(feature = "preserve_order") { 79 | assert_eq!( 80 | s.place 81 | .creator 82 | .into_iter() 83 | .collect::>(), 84 | vec![ 85 | ("name".to_owned(), "John Smith".into()), 86 | ("username".into(), "jsmith".into()), 87 | ("email".into(), "jsmith@localhost".into()), 88 | ] 89 | ); 90 | } else { 91 | assert_eq!( 92 | s.place.creator["name"].clone().into_string().unwrap(), 93 | "John Smith".to_owned() 94 | ); 95 | } 96 | } 97 | 98 | #[test] 99 | fn test_error_parse() { 100 | let res = Config::builder() 101 | .add_source(File::from_str( 102 | r#" 103 | { 104 | ok: true 105 | error 106 | } 107 | "#, 108 | FileFormat::Json5, 109 | )) 110 | .build(); 111 | 112 | assert!(res.is_err()); 113 | assert_data_eq!( 114 | res.unwrap_err().to_string(), 115 | str![[r#" 116 | --> 3:7 117 | | 118 | 3 | ok: true 119 | | ^--- 120 | | 121 | = expected null 122 | "#]] 123 | ); 124 | } 125 | 126 | #[test] 127 | fn test_override_uppercase_value_for_struct() { 128 | #[derive(Debug, Deserialize, PartialEq)] 129 | struct StructSettings { 130 | foo: String, 131 | bar: String, 132 | } 133 | 134 | #[derive(Debug, Deserialize, PartialEq)] 135 | #[allow(non_snake_case)] 136 | struct CapSettings { 137 | FOO: String, 138 | } 139 | 140 | std::env::set_var("APP_FOO", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 141 | 142 | let cfg = Config::builder() 143 | .add_source(File::from_str( 144 | r#" 145 | { 146 | // c 147 | /* c */ 148 | debug: true, 149 | production: false, 150 | arr: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,], 151 | place: { 152 | name: 'Torre di Pisa', 153 | longitude: 43.7224985, 154 | latitude: 10.3970522, 155 | favorite: false, 156 | reviews: 3866, 157 | rating: 4.5, 158 | creator: { 159 | name: "John Smith", 160 | "username": "jsmith", 161 | "email": "jsmith@localhost", 162 | } 163 | }, 164 | FOO: "FOO should be overridden", 165 | bar: "I am bar", 166 | } 167 | "#, 168 | FileFormat::Json5, 169 | )) 170 | .add_source(config::Environment::with_prefix("APP").separator("_")) 171 | .build() 172 | .unwrap(); 173 | 174 | let cap_settings = cfg.clone().try_deserialize::(); 175 | let lower_settings = cfg.try_deserialize::().unwrap(); 176 | 177 | match cap_settings { 178 | Ok(v) => { 179 | // this assertion will ensure that the map has only lowercase keys 180 | assert_eq!(v.FOO, "FOO should be overridden"); 181 | assert_eq!( 182 | lower_settings.foo, 183 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 184 | ); 185 | } 186 | Err(e) => { 187 | if e.to_string().contains("missing field `FOO`") { 188 | assert_eq!( 189 | lower_settings.foo, 190 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 191 | ); 192 | } else { 193 | panic!("{}", e); 194 | } 195 | } 196 | } 197 | } 198 | 199 | #[test] 200 | fn test_override_lowercase_value_for_struct() { 201 | #[derive(Debug, Deserialize, PartialEq)] 202 | struct StructSettings { 203 | foo: String, 204 | bar: String, 205 | } 206 | 207 | std::env::set_var("config_foo", "I have been overridden_with_lower_case"); 208 | 209 | let cfg = Config::builder() 210 | .add_source(File::from_str( 211 | r#" 212 | { 213 | // c 214 | /* c */ 215 | debug: true, 216 | production: false, 217 | arr: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,], 218 | place: { 219 | name: 'Torre di Pisa', 220 | longitude: 43.7224985, 221 | latitude: 10.3970522, 222 | favorite: false, 223 | reviews: 3866, 224 | rating: 4.5, 225 | creator: { 226 | name: "John Smith", 227 | "username": "jsmith", 228 | "email": "jsmith@localhost", 229 | } 230 | }, 231 | FOO: "FOO should be overridden", 232 | bar: "I am bar", 233 | } 234 | "#, 235 | FileFormat::Json5, 236 | )) 237 | .add_source(config::Environment::with_prefix("config").separator("_")) 238 | .build() 239 | .unwrap(); 240 | 241 | let values: StructSettings = cfg.try_deserialize().unwrap(); 242 | assert_eq!( 243 | values.foo, 244 | "I have been overridden_with_lower_case".to_owned() 245 | ); 246 | assert_eq!(values.bar, "I am bar".to_owned()); 247 | } 248 | 249 | #[test] 250 | fn test_override_uppercase_value_for_enums() { 251 | #[derive(Debug, Deserialize, PartialEq)] 252 | enum EnumSettings { 253 | Bar(String), 254 | } 255 | 256 | std::env::set_var("APPS_BAR", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 257 | 258 | let cfg = Config::builder() 259 | .add_source(File::from_str( 260 | r#" 261 | { 262 | bar: "bar is a lowercase param", 263 | } 264 | "#, 265 | FileFormat::Json5, 266 | )) 267 | .add_source(config::Environment::with_prefix("APPS").separator("_")) 268 | .build() 269 | .unwrap(); 270 | 271 | let param = cfg.try_deserialize::(); 272 | assert!(param.is_err()); 273 | assert_data_eq!( 274 | param.unwrap_err().to_string(), 275 | str!["enum EnumSettings does not have variant constructor bar"] 276 | ); 277 | } 278 | 279 | #[test] 280 | fn test_override_lowercase_value_for_enums() { 281 | #[derive(Debug, Deserialize, PartialEq)] 282 | enum EnumSettings { 283 | Bar(String), 284 | } 285 | 286 | std::env::set_var("test_bar", "I have been overridden_with_lower_case"); 287 | 288 | let cfg = Config::builder() 289 | .add_source(File::from_str( 290 | r#" 291 | { 292 | bar: "bar is a lowercase param", 293 | } 294 | "#, 295 | FileFormat::Json5, 296 | )) 297 | .add_source(config::Environment::with_prefix("test").separator("_")) 298 | .build() 299 | .unwrap(); 300 | 301 | let param = cfg.try_deserialize::(); 302 | assert!(param.is_err()); 303 | assert_data_eq!( 304 | param.unwrap_err().to_string(), 305 | str!["enum EnumSettings does not have variant constructor bar"] 306 | ); 307 | } 308 | 309 | #[test] 310 | fn json() { 311 | let s = Config::builder() 312 | .add_source(File::from_str( 313 | r#" 314 | { 315 | "json_datetime": "2017-05-10T02:14:53Z" 316 | } 317 | "#, 318 | FileFormat::Json5, 319 | )) 320 | .build() 321 | .unwrap(); 322 | 323 | let date: String = s.get("json_datetime").unwrap(); 324 | assert_eq!(&date, "2017-05-10T02:14:53Z"); 325 | let date: DateTime = s.get("json_datetime").unwrap(); 326 | assert_eq!(date, Utc.with_ymd_and_hms(2017, 5, 10, 2, 14, 53).unwrap()); 327 | } 328 | -------------------------------------------------------------------------------- /tests/testsuite/file_ron.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "ron")] 2 | 3 | use chrono::{DateTime, TimeZone, Utc}; 4 | use float_cmp::ApproxEqUlps; 5 | use serde_derive::Deserialize; 6 | use snapbox::{assert_data_eq, str}; 7 | 8 | use config::{Config, File, FileFormat, Map, Value}; 9 | 10 | #[test] 11 | fn test_file() { 12 | #[derive(Debug, Deserialize)] 13 | struct Settings { 14 | debug: f64, 15 | production: Option, 16 | place: Place, 17 | #[serde(rename = "arr")] 18 | elements: Vec, 19 | } 20 | 21 | #[derive(Debug, Deserialize)] 22 | struct Place { 23 | initials: (char, char), 24 | name: String, 25 | longitude: f64, 26 | latitude: f64, 27 | favorite: bool, 28 | telephone: Option, 29 | reviews: u64, 30 | creator: Map, 31 | rating: Option, 32 | } 33 | 34 | let c = Config::builder() 35 | .add_source(File::from_str( 36 | r#" 37 | ( 38 | debug: true, 39 | production: false, 40 | arr: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 41 | place: ( 42 | initials: ('T', 'P'), 43 | name: "Torre di Pisa", 44 | longitude: 43.7224985, 45 | latitude: 10.3970522, 46 | favorite: false, 47 | reviews: 3866, 48 | rating: Some(4.5), 49 | telephone: None, 50 | creator: { 51 | "name": "John Smith", 52 | "username": "jsmith", 53 | "email": "jsmith@localhost" 54 | } 55 | ), 56 | FOO: "FOO should be overridden", 57 | bar: "I am bar" 58 | ) 59 | "#, 60 | FileFormat::Ron, 61 | )) 62 | .build() 63 | .unwrap(); 64 | 65 | // Deserialize the entire file as single struct 66 | let s: Settings = c.try_deserialize().unwrap(); 67 | 68 | assert!(s.debug.approx_eq_ulps(&1.0, 2)); 69 | assert_eq!(s.production, Some("false".to_owned())); 70 | assert_eq!(s.place.initials, ('T', 'P')); 71 | assert_eq!(s.place.name, "Torre di Pisa"); 72 | assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); 73 | assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); 74 | assert!(!s.place.favorite); 75 | assert_eq!(s.place.reviews, 3866); 76 | assert_eq!(s.place.rating, Some(4.5)); 77 | assert_eq!(s.place.telephone, None); 78 | assert_eq!(s.elements.len(), 10); 79 | assert_eq!(s.elements[3], "4".to_owned()); 80 | if cfg!(feature = "preserve_order") { 81 | assert_eq!( 82 | s.place 83 | .creator 84 | .into_iter() 85 | .collect::>(), 86 | vec![ 87 | ("name".to_owned(), "John Smith".into()), 88 | ("username".into(), "jsmith".into()), 89 | ("email".into(), "jsmith@localhost".into()), 90 | ] 91 | ); 92 | } else { 93 | assert_eq!( 94 | s.place.creator["name"].clone().into_string().unwrap(), 95 | "John Smith".to_owned() 96 | ); 97 | } 98 | } 99 | 100 | #[test] 101 | fn test_error_parse() { 102 | let res = Config::builder() 103 | .add_source(File::from_str( 104 | r#" 105 | ( 106 | ok: true, 107 | error 108 | ) 109 | "#, 110 | FileFormat::Ron, 111 | )) 112 | .build(); 113 | 114 | assert!(res.is_err()); 115 | assert_data_eq!(res.unwrap_err().to_string(), str!["5:1: Expected colon"]); 116 | } 117 | 118 | #[test] 119 | fn test_override_uppercase_value_for_struct() { 120 | #[derive(Debug, Deserialize, PartialEq)] 121 | struct StructSettings { 122 | foo: String, 123 | bar: String, 124 | } 125 | 126 | #[derive(Debug, Deserialize, PartialEq)] 127 | #[allow(non_snake_case)] 128 | struct CapSettings { 129 | FOO: String, 130 | } 131 | 132 | std::env::set_var("APP_FOO", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 133 | 134 | let cfg = Config::builder() 135 | .add_source(File::from_str( 136 | r#" 137 | ( 138 | debug: true, 139 | production: false, 140 | arr: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 141 | place: ( 142 | initials: ('T', 'P'), 143 | name: "Torre di Pisa", 144 | longitude: 43.7224985, 145 | latitude: 10.3970522, 146 | favorite: false, 147 | reviews: 3866, 148 | rating: Some(4.5), 149 | telephone: None, 150 | creator: { 151 | "name": "John Smith", 152 | "username": "jsmith", 153 | "email": "jsmith@localhost" 154 | } 155 | ), 156 | FOO: "FOO should be overridden", 157 | bar: "I am bar" 158 | ) 159 | "#, 160 | FileFormat::Ron, 161 | )) 162 | .add_source(config::Environment::with_prefix("APP").separator("_")) 163 | .build() 164 | .unwrap(); 165 | 166 | let cap_settings = cfg.clone().try_deserialize::(); 167 | let lower_settings = cfg.try_deserialize::().unwrap(); 168 | 169 | match cap_settings { 170 | Ok(v) => { 171 | // this assertion will ensure that the map has only lowercase keys 172 | assert_eq!(v.FOO, "FOO should be overridden"); 173 | assert_eq!( 174 | lower_settings.foo, 175 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 176 | ); 177 | } 178 | Err(e) => { 179 | if e.to_string().contains("missing field `FOO`") { 180 | assert_eq!( 181 | lower_settings.foo, 182 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 183 | ); 184 | } else { 185 | panic!("{}", e); 186 | } 187 | } 188 | } 189 | } 190 | 191 | #[test] 192 | fn test_override_lowercase_value_for_struct() { 193 | #[derive(Debug, Deserialize, PartialEq)] 194 | struct StructSettings { 195 | foo: String, 196 | bar: String, 197 | } 198 | 199 | std::env::set_var("config_foo", "I have been overridden_with_lower_case"); 200 | 201 | let cfg = Config::builder() 202 | .add_source(File::from_str( 203 | r#" 204 | ( 205 | debug: true, 206 | production: false, 207 | arr: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 208 | place: ( 209 | initials: ('T', 'P'), 210 | name: "Torre di Pisa", 211 | longitude: 43.7224985, 212 | latitude: 10.3970522, 213 | favorite: false, 214 | reviews: 3866, 215 | rating: Some(4.5), 216 | telephone: None, 217 | creator: { 218 | "name": "John Smith", 219 | "username": "jsmith", 220 | "email": "jsmith@localhost" 221 | } 222 | ), 223 | FOO: "FOO should be overridden", 224 | bar: "I am bar" 225 | ) 226 | "#, 227 | FileFormat::Ron, 228 | )) 229 | .add_source(config::Environment::with_prefix("config").separator("_")) 230 | .build() 231 | .unwrap(); 232 | 233 | let values: StructSettings = cfg.try_deserialize().unwrap(); 234 | assert_eq!( 235 | values.foo, 236 | "I have been overridden_with_lower_case".to_owned() 237 | ); 238 | assert_eq!(values.bar, "I am bar".to_owned()); 239 | } 240 | 241 | #[test] 242 | fn test_override_uppercase_value_for_enums() { 243 | #[derive(Debug, Deserialize, PartialEq)] 244 | enum EnumSettings { 245 | Bar(String), 246 | } 247 | 248 | std::env::set_var("APPS_BAR", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 249 | 250 | let cfg = Config::builder() 251 | .add_source(File::from_str( 252 | r#" 253 | ( 254 | bar: "bar is a lowercase param" 255 | ) 256 | "#, 257 | FileFormat::Ron, 258 | )) 259 | .add_source(config::Environment::with_prefix("APPS").separator("_")) 260 | .build() 261 | .unwrap(); 262 | 263 | let param = cfg.try_deserialize::(); 264 | assert!(param.is_err()); 265 | assert_data_eq!( 266 | param.unwrap_err().to_string(), 267 | str!["enum EnumSettings does not have variant constructor bar"] 268 | ); 269 | } 270 | 271 | #[test] 272 | fn test_override_lowercase_value_for_enums() { 273 | #[derive(Debug, Deserialize, PartialEq)] 274 | enum EnumSettings { 275 | Bar(String), 276 | } 277 | 278 | std::env::set_var("test_bar", "I have been overridden_with_lower_case"); 279 | 280 | let cfg = Config::builder() 281 | .add_source(File::from_str( 282 | r#" 283 | ( 284 | bar: "bar is a lowercase param" 285 | ) 286 | "#, 287 | FileFormat::Ron, 288 | )) 289 | .add_source(config::Environment::with_prefix("test").separator("_")) 290 | .build() 291 | .unwrap(); 292 | 293 | let param = cfg.try_deserialize::(); 294 | assert!(param.is_err()); 295 | assert_data_eq!( 296 | param.unwrap_err().to_string(), 297 | str!["enum EnumSettings does not have variant constructor bar"] 298 | ); 299 | } 300 | 301 | #[test] 302 | fn ron() { 303 | let s = Config::builder() 304 | .add_source(File::from_str( 305 | r#" 306 | ( 307 | ron_datetime: "2021-04-19T11:33:02Z" 308 | ) 309 | "#, 310 | FileFormat::Ron, 311 | )) 312 | .build() 313 | .unwrap(); 314 | 315 | let date: String = s.get("ron_datetime").unwrap(); 316 | assert_eq!(&date, "2021-04-19T11:33:02Z"); 317 | let date: DateTime = s.get("ron_datetime").unwrap(); 318 | assert_eq!(date, Utc.with_ymd_and_hms(2021, 4, 19, 11, 33, 2).unwrap()); 319 | } 320 | -------------------------------------------------------------------------------- /tests/testsuite/file_toml.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "toml")] 2 | 3 | use chrono::{DateTime, TimeZone, Utc}; 4 | use float_cmp::ApproxEqUlps; 5 | use serde_derive::Deserialize; 6 | use snapbox::{assert_data_eq, str}; 7 | 8 | use config::{Config, File, FileFormat, Map, Value}; 9 | 10 | #[test] 11 | fn test_file() { 12 | #[derive(Debug, Deserialize)] 13 | struct Settings { 14 | debug: f64, 15 | production: Option, 16 | code: AsciiCode, 17 | place: Place, 18 | #[serde(rename = "arr")] 19 | elements: Vec, 20 | } 21 | 22 | #[derive(Debug, Deserialize)] 23 | struct Place { 24 | number: PlaceNumber, 25 | name: String, 26 | longitude: f64, 27 | latitude: f64, 28 | favorite: bool, 29 | telephone: Option, 30 | reviews: u64, 31 | creator: Map, 32 | rating: Option, 33 | } 34 | 35 | #[derive(Debug, Deserialize, PartialEq, Eq)] 36 | struct PlaceNumber(u8); 37 | 38 | #[derive(Debug, Deserialize, PartialEq, Eq)] 39 | struct AsciiCode(i8); 40 | 41 | let c = Config::builder() 42 | .add_source(File::from_str( 43 | r#" 44 | debug = true 45 | debug_s = "true" 46 | production = false 47 | production_s = "false" 48 | 49 | code = 53 50 | 51 | # errors 52 | boolean_s_parse = "fals" 53 | 54 | # For override tests 55 | FOO="FOO should be overridden" 56 | bar="I am bar" 57 | 58 | arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] 59 | quarks = ["up", "down", "strange", "charm", "bottom", "top"] 60 | 61 | [diodes] 62 | green = "off" 63 | 64 | [diodes.red] 65 | brightness = 100 66 | 67 | [diodes.blue] 68 | blinking = [300, 700] 69 | 70 | [diodes.white.pattern] 71 | name = "christmas" 72 | inifinite = true 73 | 74 | [[items]] 75 | name = "1" 76 | 77 | [[items]] 78 | name = "2" 79 | 80 | [place] 81 | number = 1 82 | name = "Torre di Pisa" 83 | longitude = 43.7224985 84 | latitude = 10.3970522 85 | favorite = false 86 | reviews = 3866 87 | rating = 4.5 88 | 89 | [place.creator] 90 | name = "John Smith" 91 | username = "jsmith" 92 | email = "jsmith@localhost" 93 | 94 | [proton] 95 | up = 2 96 | down = 1 97 | 98 | [divisors] 99 | 1 = 1 100 | 2 = 2 101 | 4 = 3 102 | 5 = 2 103 | "#, 104 | FileFormat::Toml, 105 | )) 106 | .build() 107 | .unwrap(); 108 | 109 | // Deserialize the entire file as single struct 110 | let s: Settings = c.try_deserialize().unwrap(); 111 | 112 | assert!(s.debug.approx_eq_ulps(&1.0, 2)); 113 | assert_eq!(s.production, Some("false".to_owned())); 114 | assert_eq!(s.code, AsciiCode(53)); 115 | assert_eq!(s.place.number, PlaceNumber(1)); 116 | assert_eq!(s.place.name, "Torre di Pisa"); 117 | assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); 118 | assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); 119 | assert!(!s.place.favorite); 120 | assert_eq!(s.place.reviews, 3866); 121 | assert_eq!(s.place.rating, Some(4.5)); 122 | assert_eq!(s.place.telephone, None); 123 | assert_eq!(s.elements.len(), 10); 124 | assert_eq!(s.elements[3], "4".to_owned()); 125 | if cfg!(feature = "preserve_order") { 126 | assert_eq!( 127 | s.place 128 | .creator 129 | .into_iter() 130 | .collect::>(), 131 | vec![ 132 | ("name".to_owned(), "John Smith".into()), 133 | ("username".into(), "jsmith".into()), 134 | ("email".into(), "jsmith@localhost".into()), 135 | ] 136 | ); 137 | } else { 138 | assert_eq!( 139 | s.place.creator["name"].clone().into_string().unwrap(), 140 | "John Smith".to_owned() 141 | ); 142 | } 143 | } 144 | 145 | #[test] 146 | fn test_error_parse() { 147 | let res = Config::builder() 148 | .add_source(File::from_str( 149 | r#" 150 | ok = true 151 | error = tru 152 | "#, 153 | FileFormat::Toml, 154 | )) 155 | .build(); 156 | 157 | assert!(res.is_err()); 158 | assert_data_eq!( 159 | res.unwrap_err().to_string(), 160 | str![[r#" 161 | TOML parse error at line 3, column 9 162 | | 163 | 3 | error = tru 164 | | ^ 165 | invalid string 166 | expected `"`, `'` 167 | 168 | "#]] 169 | ); 170 | } 171 | 172 | #[test] 173 | fn test_override_uppercase_value_for_struct() { 174 | #[derive(Debug, Deserialize, PartialEq)] 175 | struct StructSettings { 176 | foo: String, 177 | bar: String, 178 | } 179 | 180 | #[derive(Debug, Deserialize, PartialEq)] 181 | #[allow(non_snake_case)] 182 | struct CapSettings { 183 | FOO: String, 184 | } 185 | 186 | std::env::set_var("APP_FOO", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 187 | 188 | let cfg = Config::builder() 189 | .add_source(File::from_str( 190 | r#" 191 | debug = true 192 | debug_s = "true" 193 | production = false 194 | production_s = "false" 195 | 196 | code = 53 197 | 198 | # errors 199 | boolean_s_parse = "fals" 200 | 201 | # For override tests 202 | FOO="FOO should be overridden" 203 | bar="I am bar" 204 | 205 | arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] 206 | quarks = ["up", "down", "strange", "charm", "bottom", "top"] 207 | 208 | [diodes] 209 | green = "off" 210 | 211 | [diodes.red] 212 | brightness = 100 213 | 214 | [diodes.blue] 215 | blinking = [300, 700] 216 | 217 | [diodes.white.pattern] 218 | name = "christmas" 219 | inifinite = true 220 | 221 | [[items]] 222 | name = "1" 223 | 224 | [[items]] 225 | name = "2" 226 | 227 | [place] 228 | number = 1 229 | name = "Torre di Pisa" 230 | longitude = 43.7224985 231 | latitude = 10.3970522 232 | favorite = false 233 | reviews = 3866 234 | rating = 4.5 235 | 236 | [place.creator] 237 | name = "John Smith" 238 | username = "jsmith" 239 | email = "jsmith@localhost" 240 | 241 | [proton] 242 | up = 2 243 | down = 1 244 | 245 | [divisors] 246 | 1 = 1 247 | 2 = 2 248 | 4 = 3 249 | 5 = 2 250 | "#, 251 | FileFormat::Toml, 252 | )) 253 | .add_source(config::Environment::with_prefix("APP").separator("_")) 254 | .build() 255 | .unwrap(); 256 | 257 | let cap_settings = cfg.clone().try_deserialize::(); 258 | let lower_settings = cfg.try_deserialize::().unwrap(); 259 | 260 | match cap_settings { 261 | Ok(v) => { 262 | // this assertion will ensure that the map has only lowercase keys 263 | assert_eq!(v.FOO, "FOO should be overridden"); 264 | assert_eq!( 265 | lower_settings.foo, 266 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 267 | ); 268 | } 269 | Err(e) => { 270 | if e.to_string().contains("missing field `FOO`") { 271 | assert_eq!( 272 | lower_settings.foo, 273 | "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() 274 | ); 275 | } else { 276 | panic!("{}", e); 277 | } 278 | } 279 | } 280 | } 281 | 282 | #[test] 283 | fn test_override_lowercase_value_for_struct() { 284 | #[derive(Debug, Deserialize, PartialEq)] 285 | struct StructSettings { 286 | foo: String, 287 | bar: String, 288 | } 289 | 290 | std::env::set_var("config_foo", "I have been overridden_with_lower_case"); 291 | 292 | let cfg = Config::builder() 293 | .add_source(File::from_str( 294 | r#" 295 | debug = true 296 | debug_s = "true" 297 | production = false 298 | production_s = "false" 299 | 300 | code = 53 301 | 302 | # errors 303 | boolean_s_parse = "fals" 304 | 305 | # For override tests 306 | FOO="FOO should be overridden" 307 | bar="I am bar" 308 | 309 | arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] 310 | quarks = ["up", "down", "strange", "charm", "bottom", "top"] 311 | 312 | [diodes] 313 | green = "off" 314 | 315 | [diodes.red] 316 | brightness = 100 317 | 318 | [diodes.blue] 319 | blinking = [300, 700] 320 | 321 | [diodes.white.pattern] 322 | name = "christmas" 323 | inifinite = true 324 | 325 | [[items]] 326 | name = "1" 327 | 328 | [[items]] 329 | name = "2" 330 | 331 | [place] 332 | number = 1 333 | name = "Torre di Pisa" 334 | longitude = 43.7224985 335 | latitude = 10.3970522 336 | favorite = false 337 | reviews = 3866 338 | rating = 4.5 339 | 340 | [place.creator] 341 | name = "John Smith" 342 | username = "jsmith" 343 | email = "jsmith@localhost" 344 | 345 | [proton] 346 | up = 2 347 | down = 1 348 | 349 | [divisors] 350 | 1 = 1 351 | 2 = 2 352 | 4 = 3 353 | 5 = 2 354 | "#, 355 | FileFormat::Toml, 356 | )) 357 | .add_source(config::Environment::with_prefix("config").separator("_")) 358 | .build() 359 | .unwrap(); 360 | 361 | let values: StructSettings = cfg.try_deserialize().unwrap(); 362 | assert_eq!( 363 | values.foo, 364 | "I have been overridden_with_lower_case".to_owned() 365 | ); 366 | assert_eq!(values.bar, "I am bar".to_owned()); 367 | } 368 | 369 | #[test] 370 | fn test_override_uppercase_value_for_enums() { 371 | #[derive(Debug, Deserialize, PartialEq)] 372 | enum EnumSettings { 373 | Bar(String), 374 | } 375 | 376 | std::env::set_var("APPS_BAR", "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE"); 377 | 378 | let cfg = Config::builder() 379 | .add_source(File::from_str( 380 | r#" 381 | bar = "bar is a lowercase param" 382 | "#, 383 | FileFormat::Toml, 384 | )) 385 | .add_source(config::Environment::with_prefix("APPS").separator("_")) 386 | .build() 387 | .unwrap(); 388 | 389 | let param = cfg.try_deserialize::(); 390 | assert!(param.is_err()); 391 | assert_data_eq!( 392 | param.unwrap_err().to_string(), 393 | str!["enum EnumSettings does not have variant constructor bar"] 394 | ); 395 | } 396 | 397 | #[test] 398 | fn test_override_lowercase_value_for_enums() { 399 | #[derive(Debug, Deserialize, PartialEq)] 400 | enum EnumSettings { 401 | Bar(String), 402 | } 403 | 404 | std::env::set_var("test_bar", "I have been overridden_with_lower_case"); 405 | 406 | let cfg = Config::builder() 407 | .add_source(File::from_str( 408 | r#" 409 | bar = "bar is a lowercase param" 410 | "#, 411 | FileFormat::Toml, 412 | )) 413 | .add_source(config::Environment::with_prefix("test").separator("_")) 414 | .build() 415 | .unwrap(); 416 | 417 | let param = cfg.try_deserialize::(); 418 | assert!(param.is_err()); 419 | assert_data_eq!( 420 | param.unwrap_err().to_string(), 421 | str!["enum EnumSettings does not have variant constructor bar"] 422 | ); 423 | } 424 | 425 | #[test] 426 | fn toml() { 427 | let s = Config::builder() 428 | .add_source(File::from_str( 429 | r#" 430 | toml_datetime = 2017-05-11T14:55:15Z 431 | "#, 432 | FileFormat::Toml, 433 | )) 434 | .build() 435 | .unwrap(); 436 | 437 | let date: String = s.get("toml_datetime").unwrap(); 438 | assert_eq!(&date, "2017-05-11T14:55:15Z"); 439 | let date: DateTime = s.get("toml_datetime").unwrap(); 440 | assert_eq!(date, Utc.with_ymd_and_hms(2017, 5, 11, 14, 55, 15).unwrap()); 441 | } 442 | -------------------------------------------------------------------------------- /tests/testsuite/get-invalid-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "boolean_s_parse": "fals" 3 | } 4 | -------------------------------------------------------------------------------- /tests/testsuite/get-missing-field.json: -------------------------------------------------------------------------------- 1 | { 2 | "inner": { "value": 42 } 3 | } 4 | -------------------------------------------------------------------------------- /tests/testsuite/integer_range.rs: -------------------------------------------------------------------------------- 1 | use config::Config; 2 | 3 | #[test] 4 | #[cfg(feature = "json")] 5 | fn wrapping_u16() { 6 | let c = Config::builder() 7 | .add_source(config::File::from_str( 8 | r#" 9 | { 10 | "settings": { 11 | "port": 66000 12 | } 13 | } 14 | "#, 15 | config::FileFormat::Json, 16 | )) 17 | .build() 18 | .unwrap(); 19 | 20 | // FIXME: Can't compare ConfigError, because Unexpected are private. 21 | let _port_error = c.get::("settings.port").unwrap_err(); 22 | /* 23 | assert!(matches!( 24 | Err(ConfigError::invalid_type(None, config::Unexpected::U64(66000), "an unsigned 16 bit integer"),) 25 | port_error 26 | )); 27 | */ 28 | } 29 | 30 | #[test] 31 | #[cfg(feature = "json")] 32 | fn nonwrapping_u32() { 33 | let c = Config::builder() 34 | .add_source(config::File::from_str( 35 | r#" 36 | { 37 | "settings": { 38 | "port": 66000 39 | } 40 | } 41 | "#, 42 | config::FileFormat::Json, 43 | )) 44 | .build() 45 | .unwrap(); 46 | 47 | let port: u32 = c.get("settings.port").unwrap(); 48 | assert_eq!(port, 66000); 49 | } 50 | 51 | #[test] 52 | #[should_panic] 53 | #[cfg(feature = "json")] 54 | fn invalid_signedness() { 55 | let c = Config::builder() 56 | .add_source(config::File::from_str( 57 | r#" 58 | { 59 | "settings": { 60 | "port": -1 61 | } 62 | } 63 | "#, 64 | config::FileFormat::Json, 65 | )) 66 | .build() 67 | .unwrap(); 68 | 69 | let _: u32 = c.get("settings.port").unwrap(); 70 | } 71 | -------------------------------------------------------------------------------- /tests/testsuite/log.rs: -------------------------------------------------------------------------------- 1 | use snapbox::{assert_data_eq, str}; 2 | 3 | use config::*; 4 | 5 | #[derive(Debug, Deserialize)] 6 | struct Settings { 7 | log: log::Level, 8 | } 9 | 10 | #[test] 11 | #[cfg(feature = "json")] 12 | fn test_load_level_uppercase() { 13 | let s = r#"{ "log": "ERROR" }"#; 14 | let c = Config::builder() 15 | .add_source(File::from_str(s, FileFormat::Json)) 16 | .build() 17 | .unwrap(); 18 | let l = c.get::("log").unwrap(); 19 | assert_eq!(l, log::Level::Error); 20 | } 21 | 22 | #[test] 23 | fn test_case_sensitivity_log_level_from_str() { 24 | // to verify that this works 25 | 26 | use std::str::FromStr; 27 | let l = log::Level::from_str("error").unwrap(); 28 | assert_eq!(l, log::Level::Error); 29 | } 30 | 31 | #[test] 32 | #[cfg(feature = "json")] 33 | fn test_case_sensitivity_json_from_str() { 34 | // to confirm serde_json works as expected 35 | let s = r#"{ "log": "error" }"#; 36 | 37 | let j: Settings = serde_json::from_str(s).unwrap(); 38 | assert_eq!(j.log, log::Level::Error); 39 | } 40 | 41 | #[test] 42 | #[cfg(feature = "json")] 43 | fn test_load_level_lowercase() { 44 | let s = r#"{ "log": "error" }"#; 45 | let c = Config::builder() 46 | .add_source(File::from_str(s, FileFormat::Json)) 47 | .build() 48 | .unwrap(); 49 | 50 | assert_eq!(c.get_string("log").unwrap(), "error"); 51 | 52 | let s = c.try_deserialize::(); 53 | assert!(s.is_err()); 54 | assert_data_eq!( 55 | s.unwrap_err().to_string(), 56 | str!["enum Level does not have variant constructor error for key `log`"] 57 | ); 58 | } 59 | -------------------------------------------------------------------------------- /tests/testsuite/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_derive; 3 | 4 | pub mod async_builder; 5 | pub mod case; 6 | pub mod defaults; 7 | pub mod empty; 8 | pub mod env; 9 | pub mod errors; 10 | pub mod file; 11 | pub mod file_ini; 12 | pub mod file_json; 13 | pub mod file_json5; 14 | pub mod file_ron; 15 | pub mod file_toml; 16 | pub mod file_yaml; 17 | pub mod get; 18 | pub mod integer_range; 19 | pub mod log; 20 | pub mod merge; 21 | pub mod ron_enum; 22 | pub mod set; 23 | pub mod unsigned_int; 24 | pub mod unsigned_int_hm; 25 | pub mod weird_keys; 26 | -------------------------------------------------------------------------------- /tests/testsuite/merge.rs: -------------------------------------------------------------------------------- 1 | use snapbox::{assert_data_eq, prelude::*, str}; 2 | 3 | use config::{Config, File, FileFormat, Map}; 4 | 5 | #[test] 6 | #[cfg(feature = "json")] 7 | fn test_merge() { 8 | let c = Config::builder() 9 | .add_source(File::from_str( 10 | r#" 11 | { 12 | "debug": true, 13 | "production": false, 14 | "place": { 15 | "rating": 4.5, 16 | "creator": { 17 | "name": "John Smith", 18 | "username": "jsmith", 19 | "email": "jsmith@localhost" 20 | } 21 | } 22 | } 23 | "#, 24 | FileFormat::Json, 25 | )) 26 | .add_source(File::from_str( 27 | r#" 28 | { 29 | "debug": false, 30 | "production": true, 31 | "place": { 32 | "rating": 4.9, 33 | "creator": { 34 | "name": "Somebody New" 35 | } 36 | } 37 | } 38 | "#, 39 | FileFormat::Json, 40 | )) 41 | .build() 42 | .unwrap(); 43 | 44 | assert_eq!(c.get("debug").ok(), Some(false)); 45 | assert_eq!(c.get("production").ok(), Some(true)); 46 | assert_eq!(c.get("place.rating").ok(), Some(4.9)); 47 | 48 | if cfg!(feature = "preserve_order") { 49 | let m: Map = c.get("place.creator").unwrap(); 50 | assert_eq!( 51 | m.into_iter().collect::>(), 52 | vec![ 53 | ("name".to_owned(), "Somebody New".to_owned()), 54 | ("username".to_owned(), "jsmith".to_owned()), 55 | ("email".to_owned(), "jsmith@localhost".to_owned()), 56 | ] 57 | ); 58 | } else { 59 | assert_eq!( 60 | c.get("place.creator.name").ok(), 61 | Some("Somebody New".to_owned()) 62 | ); 63 | } 64 | } 65 | 66 | #[test] 67 | fn test_merge_whole_config() { 68 | let builder1 = Config::builder().set_override("x", 10).unwrap(); 69 | let builder2 = Config::builder().set_override("y", 25).unwrap(); 70 | 71 | let config1 = builder1.build_cloned().unwrap(); 72 | let config2 = builder2.build_cloned().unwrap(); 73 | 74 | assert_eq!(config1.get("x").ok(), Some(10)); 75 | assert_eq!(config2.get::<()>("x").ok(), None); 76 | 77 | assert_eq!(config2.get("y").ok(), Some(25)); 78 | assert_eq!(config1.get::<()>("y").ok(), None); 79 | 80 | let config3 = builder1.add_source(config2).build().unwrap(); 81 | 82 | assert_eq!(config3.get("x").ok(), Some(10)); 83 | assert_eq!(config3.get("y").ok(), Some(25)); 84 | } 85 | 86 | #[test] 87 | #[cfg(feature = "json")] 88 | /// Test a few scenarios with empty maps: 89 | fn test_merge_empty_maps() { 90 | use std::collections::BTreeMap; 91 | 92 | #[derive(Debug, Deserialize)] 93 | #[allow(dead_code)] // temporary while this test is broken 94 | struct Settings { 95 | profile: BTreeMap, 96 | } 97 | 98 | #[derive(Debug, Default, Deserialize)] 99 | #[allow(dead_code)] // temporary while this test is broken 100 | struct Profile { 101 | name: Option, 102 | } 103 | 104 | // * missing_to_empty: no key -> empty map 105 | let cfg = Config::builder() 106 | .add_source(File::from_str(r#"{ "profile": {} }"#, FileFormat::Json)) 107 | .add_source(File::from_str( 108 | r#"{ "profile": { "missing_to_empty": {} } }"#, 109 | FileFormat::Json, 110 | )) 111 | .build() 112 | .unwrap(); 113 | let res = cfg.try_deserialize::(); 114 | assert_data_eq!( 115 | res.unwrap().to_debug(), 116 | str![[r#" 117 | Settings { 118 | profile: { 119 | "missing_to_empty": Profile { 120 | name: None, 121 | }, 122 | }, 123 | } 124 | 125 | "#]] 126 | ); 127 | 128 | // * missing_to_non_empty: no key -> map with k/v 129 | let cfg = Config::builder() 130 | .add_source(File::from_str(r#"{ "profile": {} }"#, FileFormat::Json)) 131 | .add_source(File::from_str( 132 | r#"{ "profile": { "missing_to_non_empty": { "name": "bar" } } }"#, 133 | FileFormat::Json, 134 | )) 135 | .build() 136 | .unwrap(); 137 | let res = cfg.try_deserialize::(); 138 | assert_data_eq!( 139 | res.unwrap().to_debug(), 140 | str![[r#" 141 | Settings { 142 | profile: { 143 | "missing_to_non_empty": Profile { 144 | name: Some( 145 | "bar", 146 | ), 147 | }, 148 | }, 149 | } 150 | 151 | "#]] 152 | ); 153 | 154 | // * empty_to_empty: empty map -> empty map 155 | let cfg = Config::builder() 156 | .add_source(File::from_str( 157 | r#"{ "profile": { "empty_to_empty": {} } }"#, 158 | FileFormat::Json, 159 | )) 160 | .add_source(File::from_str( 161 | r#"{ "profile": { "empty_to_empty": {} } }"#, 162 | FileFormat::Json, 163 | )) 164 | .build() 165 | .unwrap(); 166 | let res = cfg.try_deserialize::(); 167 | assert_data_eq!( 168 | res.unwrap().to_debug(), 169 | str![[r#" 170 | Settings { 171 | profile: { 172 | "empty_to_empty": Profile { 173 | name: None, 174 | }, 175 | }, 176 | } 177 | 178 | "#]] 179 | ); 180 | 181 | // * empty_to_non_empty: empty map -> map with k/v 182 | let cfg = Config::builder() 183 | .add_source(File::from_str( 184 | r#"{ "profile": { "empty_to_non_empty": {} } }"#, 185 | FileFormat::Json, 186 | )) 187 | .add_source(File::from_str( 188 | r#"{ "profile": { "empty_to_non_empty": { "name": "bar" } } }"#, 189 | FileFormat::Json, 190 | )) 191 | .build() 192 | .unwrap(); 193 | let res = cfg.try_deserialize::(); 194 | assert_data_eq!( 195 | res.unwrap().to_debug(), 196 | str![[r#" 197 | Settings { 198 | profile: { 199 | "empty_to_non_empty": Profile { 200 | name: Some( 201 | "bar", 202 | ), 203 | }, 204 | }, 205 | } 206 | 207 | "#]] 208 | ); 209 | 210 | // * non_empty_to_empty: map with k/v -> empty map 211 | let cfg = Config::builder() 212 | .add_source(File::from_str( 213 | r#"{ "profile": { "non_empty_to_empty": { "name": "foo" } } }"#, 214 | FileFormat::Json, 215 | )) 216 | .add_source(File::from_str( 217 | r#"{ "profile": { "non_empty_to_empty": {} } }"#, 218 | FileFormat::Json, 219 | )) 220 | .build() 221 | .unwrap(); 222 | let res = cfg.try_deserialize::(); 223 | assert_data_eq!( 224 | res.unwrap().to_debug(), 225 | str![[r#" 226 | Settings { 227 | profile: { 228 | "non_empty_to_empty": Profile { 229 | name: Some( 230 | "foo", 231 | ), 232 | }, 233 | }, 234 | } 235 | 236 | "#]] 237 | ); 238 | 239 | // * non_empty_to_non_empty: map with k/v -> map with k/v (override) 240 | let cfg = Config::builder() 241 | .add_source(File::from_str( 242 | r#"{ "profile": { "non_empty_to_non_empty": { "name": "foo" } } }"#, 243 | FileFormat::Json, 244 | )) 245 | .add_source(File::from_str( 246 | r#"{ "profile": { "non_empty_to_non_empty": { "name": "bar" } } }"#, 247 | FileFormat::Json, 248 | )) 249 | .build() 250 | .unwrap(); 251 | let res = cfg.try_deserialize::(); 252 | assert_data_eq!( 253 | res.unwrap().to_debug(), 254 | str![[r#" 255 | Settings { 256 | profile: { 257 | "non_empty_to_non_empty": Profile { 258 | name: Some( 259 | "bar", 260 | ), 261 | }, 262 | }, 263 | } 264 | 265 | "#]] 266 | ); 267 | 268 | // * null_to_empty: null -> empty map 269 | // * null_to_non_empty: null -> map with k/v 270 | // * int_to_empty: int -> empty map 271 | // * int_to_non_empty: int -> map with k/v 272 | let cfg = Config::builder() 273 | .add_source(File::from_str( 274 | r#"{ "profile": { "null_to_empty": null } }"#, 275 | FileFormat::Json, 276 | )) 277 | .add_source(File::from_str( 278 | r#"{ "profile": { "null_to_empty": {} } }"#, 279 | FileFormat::Json, 280 | )) 281 | .build() 282 | .unwrap(); 283 | let res = cfg.try_deserialize::(); 284 | assert_data_eq!( 285 | res.unwrap().to_debug(), 286 | str![[r#" 287 | Settings { 288 | profile: { 289 | "null_to_empty": Profile { 290 | name: None, 291 | }, 292 | }, 293 | } 294 | 295 | "#]] 296 | ); 297 | 298 | // * null_to_non_empty: null -> map with k/v 299 | let cfg = Config::builder() 300 | .add_source(File::from_str( 301 | r#"{ "profile": { "null_to_non_empty": null } }"#, 302 | FileFormat::Json, 303 | )) 304 | .add_source(File::from_str( 305 | r#"{ "profile": { "null_to_non_empty": { "name": "bar" } } }"#, 306 | FileFormat::Json, 307 | )) 308 | .build() 309 | .unwrap(); 310 | let res = cfg.try_deserialize::(); 311 | assert_data_eq!( 312 | res.unwrap().to_debug(), 313 | str![[r#" 314 | Settings { 315 | profile: { 316 | "null_to_non_empty": Profile { 317 | name: Some( 318 | "bar", 319 | ), 320 | }, 321 | }, 322 | } 323 | 324 | "#]] 325 | ); 326 | 327 | // * int_to_empty: int -> empty map 328 | let cfg = Config::builder() 329 | .add_source(File::from_str( 330 | r#"{ "profile": { "int_to_empty": 42 } }"#, 331 | FileFormat::Json, 332 | )) 333 | .add_source(File::from_str( 334 | r#"{ "profile": { "int_to_empty": {} } }"#, 335 | FileFormat::Json, 336 | )) 337 | .build() 338 | .unwrap(); 339 | let res = cfg.try_deserialize::(); 340 | assert_data_eq!( 341 | res.unwrap().to_debug(), 342 | str![[r#" 343 | Settings { 344 | profile: { 345 | "int_to_empty": Profile { 346 | name: None, 347 | }, 348 | }, 349 | } 350 | 351 | "#]] 352 | ); 353 | 354 | // * int_to_non_empty: int -> map with k/v 355 | let cfg = Config::builder() 356 | .add_source(File::from_str( 357 | r#"{ "profile": { "int_to_non_empty": 42 } }"#, 358 | FileFormat::Json, 359 | )) 360 | .add_source(File::from_str( 361 | r#"{ "int_to_non_empty": { "name": "bar" } }"#, 362 | FileFormat::Json, 363 | )) 364 | .build() 365 | .unwrap(); 366 | let res = cfg.try_deserialize::(); 367 | assert_data_eq!( 368 | res.unwrap_err().to_string(), 369 | str!["invalid type: integer `42`, expected struct Profile for key `profile.int_to_non_empty`"] 370 | ); 371 | } 372 | -------------------------------------------------------------------------------- /tests/testsuite/ron_enum.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "ron")] 2 | 3 | use config::{Config, File, FileFormat}; 4 | use serde_derive::Deserialize; 5 | 6 | #[derive(Debug, Deserialize)] 7 | #[serde(untagged)] 8 | enum A { 9 | VariantA { port: u16 }, 10 | } 11 | 12 | #[derive(Debug, Deserialize)] 13 | struct Settings { 14 | a: A, 15 | } 16 | 17 | #[test] 18 | fn test_ron_enum() { 19 | let c = Config::builder() 20 | .add_source(File::from_str( 21 | r#" 22 | ( 23 | a: VariantA ( port: 5000 ) 24 | ) 25 | "#, 26 | FileFormat::Ron, 27 | )) 28 | .build() 29 | .unwrap(); 30 | 31 | // Deserialize the entire file as single struct 32 | let s = c.try_deserialize::(); 33 | assert!(s.is_ok(), "Not Ok(_): {}", s.unwrap_err()); 34 | let s = s.unwrap(); 35 | let A::VariantA { port } = s.a; 36 | assert_eq!(port, 5000); 37 | } 38 | -------------------------------------------------------------------------------- /tests/testsuite/set.rs: -------------------------------------------------------------------------------- 1 | use config::{Config, File, FileFormat}; 2 | 3 | #[test] 4 | fn test_set_override_scalar() { 5 | let config = Config::builder() 6 | .set_override("value", true) 7 | .and_then(|b| b.build()) 8 | .unwrap(); 9 | 10 | assert_eq!(config.get("value").ok(), Some(true)); 11 | } 12 | 13 | #[test] 14 | #[cfg(feature = "json")] 15 | fn test_set_scalar_default() { 16 | let config = Config::builder() 17 | .add_source(File::from_str( 18 | r#" 19 | { 20 | "debug": true 21 | } 22 | "#, 23 | FileFormat::Json, 24 | )) 25 | .set_default("debug", false) 26 | .unwrap() 27 | .set_default("staging", false) 28 | .unwrap() 29 | .build() 30 | .unwrap(); 31 | 32 | assert_eq!(config.get("debug").ok(), Some(true)); 33 | assert_eq!(config.get("staging").ok(), Some(false)); 34 | } 35 | 36 | #[test] 37 | #[cfg(feature = "json")] 38 | fn test_set_scalar_path() { 39 | let config = Config::builder() 40 | .set_override("first.second.third", true) 41 | .unwrap() 42 | .add_source(File::from_str( 43 | r#" 44 | { 45 | "place": { 46 | "favorite": false 47 | } 48 | } 49 | "#, 50 | FileFormat::Json, 51 | )) 52 | .set_default("place.favorite", true) 53 | .unwrap() 54 | .set_default("place.blocked", true) 55 | .unwrap() 56 | .build() 57 | .unwrap(); 58 | 59 | assert_eq!(config.get("first.second.third").ok(), Some(true)); 60 | assert_eq!(config.get("place.favorite").ok(), Some(false)); 61 | assert_eq!(config.get("place.blocked").ok(), Some(true)); 62 | } 63 | 64 | #[test] 65 | #[cfg(feature = "json")] 66 | fn test_set_arr_path() { 67 | let config = Config::builder() 68 | .set_override("present[0].name", "Ivan") 69 | .unwrap() 70 | .set_override("absent[0].things[1].name", "foo") 71 | .unwrap() 72 | .set_override("absent[0].things[1].value", 42) 73 | .unwrap() 74 | .set_override("absent[1]", 0) 75 | .unwrap() 76 | .set_override("present[2]", "George") 77 | .unwrap() 78 | .set_override("reverse[-1]", "Bob") 79 | .unwrap() 80 | .set_override("reverse[-2]", "Alice") 81 | .unwrap() 82 | .set_override("empty[-1]", "Bob") 83 | .unwrap() 84 | .set_override("empty[-2]", "Alice") 85 | .unwrap() 86 | .add_source(File::from_str( 87 | r#" 88 | { 89 | "present": [ 90 | { 91 | "name": "1" 92 | }, 93 | { 94 | "name": "2" 95 | } 96 | ], 97 | "reverse": [ 98 | { 99 | "name": "l1" 100 | }, 101 | { 102 | "name": "l2" 103 | } 104 | ], 105 | "empty": [] 106 | } 107 | "#, 108 | FileFormat::Json, 109 | )) 110 | .build() 111 | .unwrap(); 112 | 113 | assert_eq!(config.get("present[0].name").ok(), Some("Ivan".to_owned())); 114 | assert_eq!( 115 | config.get("absent[0].things[1].name").ok(), 116 | Some("foo".to_owned()) 117 | ); 118 | assert_eq!(config.get("absent[0].things[1].value").ok(), Some(42)); 119 | assert_eq!(config.get("absent[1]").ok(), Some(0)); 120 | assert_eq!(config.get("present[2]").ok(), Some("George".to_owned())); 121 | assert_eq!(config.get("reverse[1]").ok(), Some("Bob".to_owned())); 122 | assert_eq!(config.get("reverse[0]").ok(), Some("Alice".to_owned())); 123 | assert_eq!(config.get("empty[1]").ok(), Some("Bob".to_owned())); 124 | assert_eq!(config.get("empty[0]").ok(), Some("Alice".to_owned())); 125 | } 126 | 127 | #[test] 128 | #[cfg(feature = "json")] 129 | fn test_set_capital() { 130 | let config = Config::builder() 131 | .set_default("this", false) 132 | .unwrap() 133 | .set_override("ThAt", true) 134 | .unwrap() 135 | .add_source(File::from_str("{\"logLevel\": 5}", FileFormat::Json)) 136 | .build() 137 | .unwrap(); 138 | 139 | assert_eq!(config.get::("this").unwrap(), false); 140 | assert_eq!(config.get::("ThAt").unwrap(), true); 141 | assert_eq!(config.get::("logLevel").unwrap(), 5); 142 | } 143 | -------------------------------------------------------------------------------- /tests/testsuite/unsigned_int.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "preserve_order")] 2 | 3 | #[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] 4 | struct Container { 5 | inner: T, 6 | } 7 | 8 | #[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] 9 | struct Unsigned { 10 | unsigned: u16, 11 | } 12 | 13 | impl Default for Unsigned { 14 | fn default() -> Self { 15 | Self { unsigned: 128 } 16 | } 17 | } 18 | 19 | impl From for config::ValueKind { 20 | fn from(unsigned: Unsigned) -> Self { 21 | let mut properties = indexmap::IndexMap::new(); 22 | properties.insert( 23 | "unsigned".to_owned(), 24 | config::Value::from(unsigned.unsigned), 25 | ); 26 | 27 | Self::Table(properties) 28 | } 29 | } 30 | 31 | #[test] 32 | fn test_deser_unsigned_int() { 33 | let container = Container { 34 | inner: Unsigned::default(), 35 | }; 36 | 37 | let built = config::Config::builder() 38 | .set_default("inner", Unsigned::default()) 39 | .unwrap() 40 | .build() 41 | .unwrap() 42 | .try_deserialize::>() 43 | .unwrap(); 44 | 45 | assert_eq!(container, built); 46 | } 47 | -------------------------------------------------------------------------------- /tests/testsuite/unsigned_int_hm.rs: -------------------------------------------------------------------------------- 1 | #![cfg(not(feature = "preserve_order"))] 2 | 3 | #[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] 4 | struct Container { 5 | inner: T, 6 | } 7 | 8 | #[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] 9 | struct Unsigned { 10 | unsigned: u16, 11 | } 12 | 13 | impl Default for Unsigned { 14 | fn default() -> Self { 15 | Self { unsigned: 128 } 16 | } 17 | } 18 | 19 | impl From for config::ValueKind { 20 | fn from(unsigned: Unsigned) -> Self { 21 | let mut properties = std::collections::HashMap::new(); 22 | properties.insert( 23 | "unsigned".to_owned(), 24 | config::Value::from(unsigned.unsigned), 25 | ); 26 | 27 | Self::Table(properties) 28 | } 29 | } 30 | 31 | #[test] 32 | fn test_deser_unsigned_int_hm() { 33 | let container = Container { 34 | inner: Unsigned::default(), 35 | }; 36 | 37 | let built = config::Config::builder() 38 | .set_default("inner", Unsigned::default()) 39 | .unwrap() 40 | .build() 41 | .unwrap() 42 | .try_deserialize::>() 43 | .unwrap(); 44 | 45 | assert_eq!(container, built); 46 | } 47 | -------------------------------------------------------------------------------- /tests/testsuite/weird_keys.rs: -------------------------------------------------------------------------------- 1 | //! Please note: This file is named "weird" keys because these things are normally not keys, not 2 | //! because your software is weird if it expects these keys in the config file. 3 | //! 4 | //! Please don't be offended! 5 | 6 | use serde_derive::{Deserialize, Serialize}; 7 | 8 | use config::{File, FileFormat}; 9 | 10 | /// Helper fn to test the different deserializations 11 | fn test_config_as<'a, T>(config: &str, format: FileFormat) -> T 12 | where 13 | T: serde::Deserialize<'a> + std::fmt::Debug, 14 | { 15 | let cfg = config::Config::builder() 16 | .add_source(File::from_str(config, format)) 17 | .build(); 18 | 19 | assert!(cfg.is_ok(), "Config could not be built: {:?}", cfg); 20 | let cfg = cfg.unwrap().try_deserialize(); 21 | 22 | assert!(cfg.is_ok(), "Config could not be transformed: {:?}", cfg); 23 | let cfg: T = cfg.unwrap(); 24 | cfg 25 | } 26 | 27 | #[test] 28 | #[cfg(feature = "json")] 29 | fn test_colon_key_json() { 30 | #[derive(Debug, Serialize, Deserialize)] 31 | struct SettingsColon { 32 | #[serde(rename = "foo:foo")] 33 | foo: u8, 34 | 35 | bar: u8, 36 | } 37 | 38 | let config = r#" {"foo:foo": 8, "bar": 12 } "#; 39 | 40 | let cfg = test_config_as::(config, FileFormat::Json); 41 | assert_eq!(cfg.foo, 8); 42 | assert_eq!(cfg.bar, 12); 43 | } 44 | 45 | #[test] 46 | #[cfg(feature = "json")] 47 | fn test_slash_key_json() { 48 | #[derive(Debug, Serialize, Deserialize)] 49 | struct SettingsSlash { 50 | #[serde(rename = "foo/foo")] 51 | foo: u8, 52 | bar: u8, 53 | } 54 | 55 | let config = r#" {"foo/foo": 8, "bar": 12 } "#; 56 | 57 | let cfg = test_config_as::(config, FileFormat::Json); 58 | assert_eq!(cfg.foo, 8); 59 | assert_eq!(cfg.bar, 12); 60 | } 61 | 62 | #[test] 63 | #[cfg(feature = "json")] 64 | fn test_doublebackslash_key_json() { 65 | #[derive(Debug, Serialize, Deserialize)] 66 | struct SettingsDoubleBackslash { 67 | #[serde(rename = "foo\\foo")] 68 | foo: u8, 69 | bar: u8, 70 | } 71 | 72 | let config = r#" {"foo\\foo": 8, "bar": 12 } "#; 73 | 74 | let cfg = test_config_as::(config, FileFormat::Json); 75 | assert_eq!(cfg.foo, 8); 76 | assert_eq!(cfg.bar, 12); 77 | } 78 | --------------------------------------------------------------------------------