├── .github ├── dependabot.yml └── workflows │ ├── publish-crates.yml │ └── rust.yml ├── .gitignore ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── SECURITY.md ├── examples └── compress.rs ├── src ├── compression.rs ├── dirs.rs ├── error.rs ├── lib.rs ├── local.rs ├── remote.rs ├── source.rs └── spanned.rs └── tests ├── assets ├── README.md ├── logo.png └── styles.css ├── common.rs ├── local_copy.rs ├── local_load.rs ├── local_new.rs ├── local_remove.rs ├── local_write.rs ├── remote_copy.rs ├── remote_load.rs ├── remote_write.rs └── source.rs /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: "10:00" 8 | open-pull-requests-limit: 10 9 | -------------------------------------------------------------------------------- /.github/workflows/publish-crates.yml: -------------------------------------------------------------------------------- 1 | # Publishes a release to crates.io 2 | name: PublishRelease 3 | 4 | on: 5 | push: 6 | tags: 7 | - "**[0-9]+.[0-9]+.[0-9]+*" 8 | 9 | jobs: 10 | # publish the current repo state to crates.io 11 | cargo-publish: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout sources 15 | uses: actions/checkout@v4 16 | with: 17 | submodules: recursive 18 | - run: cargo publish --token ${CRATES_TOKEN} 19 | env: 20 | CRATES_TOKEN: ${{ secrets.CRATES_TOKEN }} 21 | -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - main 8 | schedule: 9 | - cron: '11 7 * * 1,4' 10 | 11 | env: 12 | RUSTFLAGS: -Dwarnings 13 | 14 | jobs: 15 | check: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v3 19 | - uses: dtolnay/rust-toolchain@stable 20 | - name: Run cargo check 21 | run: | 22 | cargo check --all --tests 23 | fmt: 24 | runs-on: ubuntu-latest 25 | steps: 26 | - uses: actions/checkout@v3 27 | - uses: dtolnay/rust-toolchain@stable 28 | with: 29 | components: rustfmt 30 | - name: Run cargo fmt 31 | run: | 32 | cargo fmt --all -- --check 33 | clippy: 34 | runs-on: ubuntu-latest 35 | steps: 36 | - uses: actions/checkout@v3 37 | - uses: dtolnay/rust-toolchain@stable 38 | with: 39 | components: clippy 40 | - name: Run cargo clippy 41 | run: | 42 | cargo clippy --tests --examples 43 | docs: 44 | runs-on: ubuntu-latest 45 | env: 46 | RUSTDOCFLAGS: -Dwarnings 47 | steps: 48 | - uses: actions/checkout@v3 49 | - uses: dtolnay/rust-toolchain@stable 50 | - name: Run rustdoc 51 | run: | 52 | cargo doc --no-deps 53 | feature-check: 54 | needs: check 55 | runs-on: ubuntu-latest 56 | steps: 57 | - uses: actions/checkout@v3 58 | - uses: dtolnay/rust-toolchain@stable 59 | - uses: taiki-e/install-action@cargo-hack 60 | - name: Run cargo hack powerset 61 | run: | 62 | cargo hack check --feature-powerset --no-dev-deps 63 | os-test: 64 | runs-on: ${{ matrix.os }} 65 | strategy: 66 | matrix: 67 | os: [ubuntu-latest, windows-latest, macOS-latest] 68 | feature-flags: ["", "--no-default-features", "--all-features"] 69 | steps: 70 | - uses: actions/checkout@v3 71 | - uses: dtolnay/rust-toolchain@stable 72 | - name: Run cargo test 73 | run: | 74 | cargo test ${{ matrix.feature-flags }} 75 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .vscode/ 3 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## v1.2.0 - 2024-12-09 4 | 5 | This release just updates dependencies. 6 | 7 | ## v1.1.0 - 2024-12-04 8 | 9 | This release adds support for deserializing YAML in the same way as TOML. [PR/235] 10 | 11 | [PR/235]: https://github.com/axodotdev/axoasset/pull/235 12 | 13 | ## v1.0.1 - 2024-10-30 14 | 15 | The primary feature of this release is adding some internal-for-now environment variables that allow the end user to change the level of compression at runtime. 16 | The primary motivator of this is improving the speed of testing dist. [PR/212] 17 | 18 | This release also includes some general dependency updates. 19 | 20 | [PR/212]: https://github.com/axodotdev/axoasset/pull/212 21 | 22 | ## v1.0.0 - 2024-07-05 23 | 24 | The design of APIs has been massively overhauled and normalized, with the changes too substantial to individually enumerate. Major highlights: 25 | 26 | * Asset (the union between LocalAsset and RemoteAsset) has been removed 27 | * RemoteAsset is largely replaced with AxoClient which to allow you to actually initialized/configure the underlying reqwest client 28 | * Errors cleaned up 29 | * Function names cleaned up to be unambiguous and normal 30 | * "missing" APIs added 31 | 32 | 33 | ## v0.10.1 - 2024-06-10 34 | 35 | Fixes the `pub use reqwest` that was added in the previous version. 36 | 37 | ### 🛠️ Fixes 38 | 39 | ### v0.10.0 - 2024-06-06 40 | 41 | ### 🛠️ Fixes 42 | 43 | - **RemoteAsset: fix mimetype requirement - [mistydemeo], [pr126]** 44 | 45 | Fixes an issue where functions like `RemoteAsset::copy` would fail on files without specific mimetypes. We used this to assign file extensions based on mimetype, but it shouldn't have rejected other files. 46 | 47 | [pr126]: https://github.com/axodotdev/axoasset/pull/126 48 | 49 | - **RemoteAsset: exposes reqwest - [mistydemeo], [pr137]** 50 | 51 | [pr137]: https://github.com/axodotdev/axoasset/pull/137 52 | 53 | - **LocalAsset: fixes a misleading error message - sorairolake, [pr126]** 54 | 55 | [pr133]: https://github.com/axodotdev/axoasset/pull/133 56 | 57 | ### Maintenace 58 | 59 | Updates several dependencies. 60 | 61 | ### v0.9.5 - 2024-05-22 62 | 63 | ### Maintenace 64 | 65 | Relaxes the `reqwest` dependency range. 66 | 67 | ### v0.9.4 - 2024-05-22 68 | 69 | ### Maintenace 70 | 71 | Updates several dependencies. 72 | 73 | ### v0.9.3 - 2024-04-16 74 | 75 | ### 🛠️ Fixes 76 | 77 | Reduces the dependency tree when the `remote` feature isn't in use by properly scoping the `image` dependency. 78 | 79 | ### v0.9.2 - 2024-04-15 80 | 81 | ### 🛠️ Fixes 82 | 83 | Fixes a branching error in the previous release which prevented the ZIP fix from being usable. 84 | 85 | ### v0.9.1 - 2024-03-26 86 | 87 | ### 🛠️ Fixes 88 | 89 | - **Zipping directory trees on Windows - [mistydemeo], [pr94]** 90 | 91 | Recursive directory trees on Windows would be zipped with mangled filenames; this has been fixed by preprocessing the file names before passing them to the `zip` crate. 92 | 93 | [pr94]: https://github.com/axodotdev/axoasset/pull/94 94 | 95 | ### v0.9.0 - 2024-03-14 96 | 97 | ### 🎁 Features 98 | 99 | - **Parsing JSON containing byte order marks - [mistydemeo], [pr87]** 100 | 101 | This fixes an issue parsing JSON from files containing a [byte order mark]. This is rare, but can occur with JSON files created in Windows with certain software, including data written to disk in PowerShell. 102 | 103 | The underlying JSON parsing library used by axoasset doesn't currently support parsing JSON files that begin with a byte order mark. In this release, we strip it from files that contain it before passing it to serde in order to work around this limitation. 104 | 105 | [byte order mark]: https://en.wikipedia.org/wiki/Byte_order_mark 106 | [pr87]: https://github.com/axodotdev/axoasset/pull/87 107 | 108 | ### v0.8.0 - 2024-03-06 109 | 110 | ### 🎁 Features 111 | 112 | - **Extract archives - [mistydemeo], [pr84]** 113 | 114 | Adds the ability to decompress tarballs and ZIP files from `LocalAsset`. Users can extract an entire archive to a directory via the `untar_gz_all`/`untar_xz_all`/`untar_zstd_all`/`unzip_all` methods, or extract individual files to bytearrays of their contents via the `untar_gz_file`/`untar_xz_file`/`untar_zstd_file`/`unzip_file` metods. 115 | 116 | [pr84]: https://github.com/axodotdev/axoasset/pull/84 117 | 118 | ### v0.7.0 - 2024-02-15 119 | 120 | Updates dependencies, including a breaking upgrade to miette. Users of this crate will need to update to at least miette 6.0.0. 121 | 122 | ### v0.6.2 - 2024-01-23 123 | 124 | Fixes zstd compression to actually use zstd, whoops! 125 | 126 | 127 | ### v0.6.1 - 2023-12-19 128 | 129 | Minor updates to dependencies to reduce the amount of compression libraries we dynamically link. 130 | 131 | 132 | ### v0.6.0 - 2023-10-31 133 | 134 | ### 🎁 Features 135 | 136 | - **New reexports - [mistydemeo], [pr68]** 137 | 138 | Reexports `toml`, `toml_edit` and `serde_json`. Types from these three crates 139 | appear in certain axoasset function signatures. 140 | 141 | [pr68]: https://github.com/axodotdev/axoasset/pull/68 142 | 143 | ### v0.5.1 - 2023-09-14 144 | 145 | ### 🛠️ Fixes 146 | 147 | - **Reduce dependency tree size - [mistydemeo], [pr66]** 148 | 149 | Reduces the size of axoasset's dependency tree by not installing unused 150 | features from the `images` dependency. 151 | 152 | ### v0.5.0 - 2023-08-08 153 | 154 | ### 🎁 Features 155 | 156 | - **Add a with_root argument to compression methods - [Gankra], [pr61]** 157 | 158 | The compression methods take a path to a directory to tar/zip up. The 159 | with_root argument specifies a root prefix of directories that the 160 | archive's contents should be nested under. If None then the dir's contents 161 | are flattened into the root of the archive. 162 | 163 | e.g. to make a tar.gz that matches the npm package format (which 164 | wants the tarball to contain a dir named "package"), you can 165 | compress: `"path/to/contents/", Some("package")` 166 | 167 | - **Add more copying APIs to LocalAsset - [Gankra], [pr62]** 168 | 169 | LocalAsset now includes `copy_named`, `copy_dir`, and `copy_dir_named`. 170 | All `copy` functions were change to return a `Utf8PathBuf` instead of a `PathBuf`. 171 | 172 | [pr61]: https://github.com/axodotdev/axoasset/pull/61 173 | [pr62]: https://github.com/axodotdev/axoasset/pull/62 174 | 175 | ## v0.4.0 - 2023-07-04 176 | 177 | ### 🎁 Features 178 | 179 | - **Don't use OpenSSL - [Gankra], [pr56]** 180 | 181 | ### 🛠️ Fixes 182 | 183 | - **Don't reject spans that cover the last char - [Gankra], [pr55]** 184 | 185 | [pr55]: https://github.com/axodotdev/axoasset/pull/55 186 | [pr56]: https://github.com/axodotdev/axoasset/pull/56 187 | 188 | ## v0.3.0 - 2023-05-23 189 | 190 | ### 🎁 Features 191 | 192 | - **SourceFile::deserialize_toml_edit (behind new toml-edit feature) - [Gankra], [pr52]** 193 | 194 | Just a convenience to read a SourceFile as toml-edit and map the error spans to the right format. 195 | 196 | ### 🛠️ Fixes 197 | 198 | - **Separate compression into cargo features - [shadows-withal], [pr47]** 199 | 200 | The APIs for processing tarballs/zips are now behind "compression-tar" and "compression-zip", 201 | with a convenience "compression" feature that covers both. 202 | 203 | - **LocalAsset API cleanup - [shadows-withal], [pr48]** 204 | 205 | Some breaking cleanups to APIs to make them more ergonomic longterm 206 | 207 | - Many APIs that previously took Strings now take `AsRef` 208 | - write_new_{all} now just takes a path to the file, instead of folder_path + name 209 | 210 | - **update github CI - [striezel], [pr50]** 211 | 212 | Updating several old Github CI actions to more modern/maintained versions, thanks a ton! 213 | 214 | * **fix typos - [striezel], [pr51]** 215 | 216 | Thanks!! 217 | 218 | [pr47]: https://github.com/axodotdev/axoasset/pull/47 219 | [pr48]: https://github.com/axodotdev/axoasset/pull/48 220 | [pr50]: https://github.com/axodotdev/axoasset/pull/50 221 | [pr51]: https://github.com/axodotdev/axoasset/pull/51 222 | [pr52]: https://github.com/axodotdev/axoasset/pull/52 223 | 224 | ## v0.2.0 - 2023-04-27 225 | 226 | ### 🎁 Features 227 | 228 | - **✨ New `LocalAsset` functionality! - [shadows-withal], [pr38], [pr46]** 229 | 230 | We've added a lot more functions to `LocalAsset`: 231 | 232 | - `write_new_all`, to write a file and its parent directories 233 | - `create_dir`, which creates, well, a new directory 234 | - `create_dir_all`, which creates a directory and its parent directories 235 | - `remove_file`, which deletes a file 236 | - `remove_dir`, which deletes an empty directory 237 | - `remove_dir_all`, which deletes a directory and its contents 238 | - `tar_{gz,xz,zstd}_dir`, which are three separate functions that create a tar archive with the 239 | specified compression algorithm, either Gzip, Xzip, or Zstd 240 | - `zip_dir`, which creates a zip archive 241 | 242 | - **✨ New feature: `SourceFile::span_for_substr` - [Gankra], [pr35]** 243 | 244 | This function enables the ability to get spans even when using a tool that 245 | doesn't support them as long as it returns actual substrings pointing into 246 | the original SourceFile's inner String. 247 | 248 | ### 🛠️ Fixes 249 | 250 | - **Simply SourceFile::new and new_empty - [Gankra], [pr43]** 251 | 252 | SourceFile::new and new_empty no longer return Results and simply use the origin_path 253 | as the file name, making them appropriate for synthetic/test inputs that don't map 254 | to actual files. 255 | 256 | [pr35]: https://github.com/axodotdev/axoasset/pull/35 257 | [pr43]: https://github.com/axodotdev/axoasset/pull/43 258 | [pr38]: https://github.com/axodotdev/axoasset/pull/38 259 | [pr46]: https://github.com/axodotdev/axoasset/pull/46 260 | 261 | 262 | ## v0.1.1 - 2023-04-06 263 | 264 | ### 🛠️ Fixes 265 | 266 | - **Fix compilation errors for features and add tests - [Gankra]/[ashleygwilliams], [pr33]** 267 | 268 | [pr33]: https://github.com/axodotdev/axoasset/pull/33 269 | 270 | ## v0.1.0 - 2023-04-06 271 | 272 | ### 🎁 Features 273 | 274 | - **✨ New type: `SourceFile` - [Gankra], [pr25]** 275 | 276 | `SourceFile` is a new asset type which is a readonly String version of 277 | `Asset` wrapped in an `Arc`. The purpose of this type is to be cheap to 278 | clone and pass around everywhere so that errors can refer to it (using the 279 | miette `#[source_code]` and `#[label]` attributes). The `Arc` ensures this 280 | is cheap at minimal overhead. The String ensures the contents make sense to 281 | display. 282 | 283 | - **✨ New type: `Spanned` - [Gankra], [pr25]** 284 | 285 | `Spanned` is a new type which tries to behave like `Box` in the sense 286 | that it's "as if" it's a `T` but with source span info embedded. If you want 287 | to remember that a value was decoded from an asset at bytes 100 to 200, you 288 | can wrap it in a `Spanned` without disrupting any of the code that uses it. 289 | Then if you determine that value caused a problem, you can call 290 | `Spanned::span(&value)` to extract the span and have miette include the 291 | asset context in the error message. 292 | 293 | - **✨ New features: `serde_json` and `toml-rs` - [Gankra], [pr25]** 294 | 295 | `json-serde` and `toml-serde` are new features which pull in dedicated 296 | support for `serde_json` and `toml-rs`. These features add `deserialize_json` 297 | and `deserialize_toml` methods to `SourceFile` which understand those crates' 298 | native error types and produce full pretty miette-y errors when deserializing, 299 | like this: 300 | 301 | ``` 302 | × failed to read JSON 303 | ╰─▶ trailing comma at line 3 column 1 304 | ╭─[src/tests/res/bad-package.json:2:1] 305 | 2 │ "name": null, 306 | 3 │ } 307 | · ─ 308 | ╰──── 309 | ``` 310 | 311 | (In this case serde_json itself points at the close brace and not the actual comma, we're just faithfully forwarding that.) 312 | 313 | `Spanned` has special integration with `toml-rs`, because it's actually a 314 | fork of that crate's [own magic `Spanned` type]. If you deserialize a struct 315 | that contains a `Spanned` it will automagically fill in the span info 316 | for you. Ours further improves on this by putting in more effort to be totally 317 | transparent like `Box`. 318 | 319 | - **✨ New function: `write_new` for `LocalAsset` - [ashleygwilliams], [pr28]** 320 | 321 | axoasset was first conceived to handle assets declared by end users for use 322 | in `oranda`, but quickly grew to encompass all fs/network calls. one of the 323 | things we often need to do is create a new file. This is only available on 324 | `LocalAsset` as, at least for the moment, that is the only place axoasset 325 | has permissions to create new assets. 326 | 327 | - **make `RemoteAsset` an optional feature - [Gankra], [pr26]** 328 | 329 | A feature of `axoasset` is that it is agnostic to the origin of the asset: 330 | it can be local or remote. However, often, authors can be certain that they 331 | will only be using local assets. In this case, it reduces dependencies to 332 | not include the remote functionality. Previously this wasn't possible! 333 | 334 | - **`miette-ify` errors - [Gankra], [pr24]** 335 | 336 | Previously we were using `thiserror` for error handling, but to be consistent 337 | across our toolchain, we've updated our errors to use `miette`. This has the 338 | added benefit of formalizing structures we were informally building into our 339 | error types (help/diagnostic text, forwarding the bare error as details, etc). 340 | 341 | 342 | - **consistent `Asset` interface - [ashleygwilliams], [pr30]** 343 | 344 | With 3 asset types, `LocalAsset`, `RemoteAsset`, and `SourceFile`, it felt 345 | important to align their structures so they could be used nearly identically. 346 | Every type now has a: 347 | 348 | - `origin_path`: the original source of the file 349 | - `filename`: derived from the `origin_path` and, in the case of `RemoteAsset`s 350 | also the headers from the network response. 351 | - `contents`: the contents of the asset as bytes or a String depending on 352 | asset type 353 | 354 | [pr24]: https://github.com/axodotdev/axoasset/pull/24 355 | [pr25]: https://github.com/axodotdev/axoasset/pull/25 356 | [pr26]: https://github.com/axodotdev/axoasset/pull/26 357 | [pr28]: https://github.com/axodotdev/axoasset/pull/28 358 | [pr30]: https://github.com/axodotdev/axoasset/pull/30 359 | 360 | [own magic `Spanned` type]: https://docs.rs/toml/latest/toml/struct.Spanned.html 361 | 362 | ## v0.0.1 - 2023-02-14 363 | 364 | Initial release. 365 | 366 | [ashleygwilliams]: https://github.com/ashleygwilliams 367 | [gankra]: https://github.com/gankra 368 | [mistydemeo]: https://github.com/mistydemeo 369 | [shadows-withal]: https://github.com/shadows-withal 370 | [striezel]: https://github.com/striezel 371 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | The repositories within the @axodotdev GitHub organization are formally owned 4 | by Axo Developer Co. which is a for-profit C-Corporation incorporated in 5 | Delaware, USA and operated by its global, distributed, team of employees. 6 | 7 | To the extent it is possible, reasonable, and legal, external contributors and 8 | employees are held to the same behavior standards, as defined by the 9 | [Contributor Covenant]'s Pledge and Standards. 10 | 11 | Enforcement will necessarily be different depending on the employment status 12 | of involved parties. 13 | 14 | All decisions are made by [Ashley Williams](mailto:ashley@axo.dev) through a 15 | consensus-seeking process with the [Axo team](https://www.axo.dev/team) and 16 | to the extent it is possible, reasonable, and legal, any involved external 17 | participants. 18 | 19 | [Contributor Covenant]: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ 20 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Thanks so much for your interest in contributing to `axoasset`. We are excited 4 | about the building a community of contributors to the project. Here's some 5 | guiding principles for working with us: 6 | 7 | **1. File an issue first!** 8 | 9 | Except for the absolute tiniest of PRs (e.g. a single typo fix), please file an 10 | issue before opening a PR. This can help ensure that the problem you are trying 11 | to solve and the solution you have in mind will be accepted. Where possible, we 12 | don't want folks wasting time on directions we don't want to take the project. 13 | 14 | **2. Write tests, or at least detailed reproduction steps** 15 | 16 | If you find a bug, the best way to prioritize getting it fixed is to open a PR 17 | with a failing test! If you a opening a bug fix PR, please add a test to show 18 | that your fix works. 19 | 20 | **3. Overcommunicate** 21 | 22 | In all scenarios, please provide as much context as possible- you may not think 23 | it's important but it may be! 24 | 25 | **4. Patience** 26 | 27 | Axo is a very small company, it's possible that we may not be able to 28 | immediately prioritize your issue. We are excite to develop a community of 29 | contributors around this project, but it won't always be on the top of our to-do 30 | list, even if we wish it could be. 31 | 32 | If you haven't heard from us in a while and want to check in, feel free to 33 | at-mention @ashleygwilliams- but please be kind while doing so! 34 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "axoasset" 3 | description = ">o_o<" 4 | version = "1.2.0" 5 | edition = "2021" 6 | license = "MIT OR Apache-2.0" 7 | repository = "https://github.com/axodotdev/axoasset" 8 | 9 | [features] 10 | # Default enable remote support 11 | default = ["remote"] 12 | # Enable SourceFile support for deserializing using the "toml" crate 13 | toml-serde = ["toml", "serde"] 14 | # Enable SourceFile support for deserializing using the "serde_json" crate 15 | json-serde = ["serde_json", "serde"] 16 | # Enable SourceFile support for deserializing using the "toml_edit" crate 17 | toml-edit = ["toml_edit"] 18 | # Enable SourceFile support for deserializing using the "serde_yml" crate 19 | yaml-serde = ["serde_yml", "serde"] 20 | # Enable reqwest-based http file fetching 21 | remote = ["reqwest", "image"] 22 | # On the off-chance native tls roots cause a problem, they can be opted out of 23 | # by only using remote-min 24 | tls-native-roots = ["reqwest/rustls-tls-native-roots"] 25 | # Enable support for reading and writing zips and tarballs 26 | compression = ["compression-tar", "compression-zip"] 27 | # Enable support for reading and writing tarballs 28 | compression-tar = ["flate2", "tar", "xz2", "zstd"] 29 | # Enable support for reading and writing zips 30 | compression-zip = ["zip"] 31 | 32 | [dependencies] 33 | image = { version = "0.25.4", default-features = false, optional = true } 34 | mime = "0.3.16" 35 | reqwest = { version = ">=0.11.0", optional = true, default-features = false, features = ["json", "rustls-tls-webpki-roots"] } 36 | thiserror = "2.0.0" 37 | url = "2.5.0" 38 | miette = "7.0.0" 39 | camino = "1.1.9" 40 | toml = { version = "0.8.12", optional = true } 41 | serde_json = { version = "1.0.132", optional = true } 42 | serde_yml = { version = "0.0.11", optional = true } 43 | serde = { version = "1.0.214", optional = true, features = ["derive"] } 44 | tar = { version = "0.4.42", optional = true } 45 | zip = { version = "0.6.4", optional = true } 46 | flate2 = { version = "1.0.34", optional = true } 47 | xz2 = { version = "0.1.7", optional = true, features = ["static"] } 48 | zstd = { version = "0.13.0", optional = true } 49 | toml_edit = { version = "0.22.22", optional = true } 50 | walkdir = "2.5.0" 51 | lazy_static = "1.5.0" 52 | 53 | [dev-dependencies] 54 | assert_fs = "1" 55 | tokio = {version = "1.24", features = ["macros"]} 56 | wiremock = "0.6" 57 | clap = { version = "4.5.24", features = ["derive"] } 58 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2023 Axo Developer Co. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2023 Axo Developer Co. 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # axoasset 2 | 3 | [![Github Actions Rust](https://github.com/axodotdev/axoasset/actions/workflows/rust.yml/badge.svg)](https://github.com/axodotdev/axoasset/actions) 4 | [![crates.io](https://img.shields.io/crates/v/axoasset.svg)](https://crates.io/crates/axoasset) 5 | [![License: MPL 2.0](https://img.shields.io/badge/License-MPL_2.0-brightgreen.svg)](https://opensource.org/licenses/MPL-2.0) 6 | 7 | This library offers `read`, `write`, and `copy` functions, for local and remote 8 | assets given a string that contains a relative or absolute local path or a 9 | remote address using http or https. 10 | 11 | 12 | ## Example 13 | 14 | ```rust 15 | use axoasset; 16 | 17 | let assets = vec!("https://my.co/logo.png", "./profile.jpg", "README.md"); 18 | let dest = "public"; 19 | 20 | for asset in assets { 21 | axoasset::copy(asset, dest)?; 22 | } 23 | ``` 24 | 25 | ## License 26 | 27 | Licensed under either of 28 | 29 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or [apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)) 30 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or [opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)) 31 | 32 | at your option. 33 | 34 | ## Contributions 35 | 36 | Unless you explicitly state otherwise, any contribution intentionally 37 | submitted for inclusion in the work by you, as defined in the Apache-2.0 38 | license, shall be dual licensed as above, without any additional terms or 39 | conditions. 40 | 41 | If you are interested in contributing, please read our [CONTRIBUTING notes] and our [Code of Conduct]. 42 | 43 | **Copyright 2022 Axo Developer Co.** 44 | 45 | [CONTRIBUTING notes]: CONTRIBUTING.md 46 | [Code of Conduct]: CODE_OF_CONDUCT.md 47 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | Axo Developer Co. takes the security of our software products and services seriously. If you believe you have found a security vulnerability in this open source repository, please report the issue to us directly using GitHub private vulnerability reporting or email ashley@axo.dev. If you aren't sure you have found a security vulnerability but have a suspicion or concern, feel free to message anyways; we prefer over-communication :) 2 | 3 | Please do not report security vulnerabilities publicly, such as via GitHub issues, Twitter, or other social media. 4 | 5 | Thanks for helping make software safe for everyone! 6 | -------------------------------------------------------------------------------- /examples/compress.rs: -------------------------------------------------------------------------------- 1 | //! Example that makes it easy to mess around with the compression backend 2 | //! 3 | //! ```ignore 4 | //! cargo run --example compress --features=compression -- src src.tar.gz --with-root=some/dir 5 | //! ``` 6 | //! 7 | //! ```ignore 8 | //! cargo run --example compress --features=compression -- src src.zip --with-root=some/dir 9 | //! ``` 10 | #![allow(unused_imports)] 11 | #![allow(unused_variables)] 12 | 13 | use axoasset::{AxoassetError, LocalAsset}; 14 | use camino::Utf8PathBuf; 15 | use clap::Parser; 16 | 17 | #[derive(Parser)] 18 | struct Cli { 19 | src_path: Utf8PathBuf, 20 | dest_path: Utf8PathBuf, 21 | #[clap(long)] 22 | with_root: Option, 23 | } 24 | 25 | fn main() { 26 | let args = Cli::parse(); 27 | 28 | doit(args).unwrap() 29 | } 30 | 31 | fn doit(args: Cli) -> Result<(), AxoassetError> { 32 | #[cfg(feature = "compression-tar")] 33 | if args.dest_path.as_str().ends_with("tar.zstd") { 34 | return LocalAsset::tar_zstd_dir(args.src_path, args.dest_path, args.with_root); 35 | } 36 | #[cfg(feature = "compression-tar")] 37 | if args.dest_path.as_str().ends_with("tar.xz") { 38 | return LocalAsset::tar_xz_dir(args.src_path, args.dest_path, args.with_root); 39 | } 40 | #[cfg(feature = "compression-tar")] 41 | if args.dest_path.as_str().ends_with("tar.gz") { 42 | return LocalAsset::tar_gz_dir(args.src_path, args.dest_path, args.with_root); 43 | } 44 | #[cfg(feature = "compression-zip")] 45 | if args.dest_path.as_str().ends_with("zip") { 46 | return LocalAsset::zip_dir(args.src_path, args.dest_path, args.with_root); 47 | } 48 | 49 | if !cfg!(any( 50 | feature = "compression-tar", 51 | feature = "compression-zip" 52 | )) { 53 | panic!("this example must be built with --features=compression") 54 | } else { 55 | panic!("unsupported dest_path extension") 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/compression.rs: -------------------------------------------------------------------------------- 1 | //! Compression-related methods, all used in `axoasset::Local` 2 | 3 | use camino::Utf8Path; 4 | #[cfg(feature = "compression-zip")] 5 | use camino::Utf8PathBuf; 6 | 7 | use crate::AxoassetError; 8 | 9 | /// Internal tar-file compression algorithms 10 | #[cfg(feature = "compression-tar")] 11 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] 12 | pub(crate) enum CompressionImpl { 13 | /// .gz 14 | Gzip, 15 | /// .xz 16 | Xzip, 17 | /// .zstd 18 | Zstd, 19 | } 20 | 21 | lazy_static::lazy_static! { 22 | static ref DEFAULT_GZ_LEVEL: u32 = { 23 | std::env::var("AXOASSET_GZ_LEVEL") 24 | .ok() 25 | .and_then(|val| val.parse().ok()) 26 | .unwrap_or(6) 27 | }; 28 | static ref DEFAULT_XZ_LEVEL: u32 = { 29 | std::env::var("AXOASSET_XZ_LEVEL") 30 | .ok() 31 | .and_then(|val| val.parse().ok()) 32 | .unwrap_or(9) 33 | }; 34 | static ref DEFAULT_ZSTD_LEVEL: i32 = { 35 | std::env::var("AXOASSET_ZSTD_LEVEL") 36 | .ok() 37 | .and_then(|val| val.parse().ok()) 38 | .unwrap_or(3) 39 | }; 40 | } 41 | 42 | #[cfg(feature = "compression-tar")] 43 | pub(crate) fn tar_dir( 44 | src_path: &Utf8Path, 45 | dest_path: &Utf8Path, 46 | with_root: Option<&Utf8Path>, 47 | compression: &CompressionImpl, 48 | ) -> crate::error::Result<()> { 49 | use crate::error::*; 50 | use flate2::{Compression, GzBuilder}; 51 | use std::fs; 52 | use xz2::write::XzEncoder; 53 | use zstd::stream::Encoder as ZstdEncoder; 54 | 55 | // Set up the archive/compression 56 | // dir_name here is a prefix directory/path that the src dir's contents will be stored 57 | // under when being tarred. Having it be empty means the contents 58 | // will be placed in the root of the tarball. 59 | let dir_name = with_root.unwrap_or_else(|| Utf8Path::new("")); 60 | let zip_contents_name = format!("{}.tar", dest_path.file_name().unwrap()); 61 | let final_zip_file = match fs::File::create(dest_path) { 62 | Ok(file) => file, 63 | Err(details) => { 64 | return Err(AxoassetError::LocalAssetWriteNewFailed { 65 | dest_path: dest_path.to_string(), 66 | details, 67 | }) 68 | } 69 | }; 70 | 71 | match compression { 72 | CompressionImpl::Gzip => { 73 | // Wrap our file in compression 74 | let zip_output = GzBuilder::new() 75 | .filename(zip_contents_name) 76 | .write(final_zip_file, Compression::new(*DEFAULT_GZ_LEVEL)); 77 | 78 | // Write the tar to the compression stream 79 | let mut tar = tar::Builder::new(zip_output); 80 | 81 | // Add the whole dir to the tar 82 | if let Err(details) = tar.append_dir_all(dir_name, src_path) { 83 | return Err(AxoassetError::Compression { 84 | reason: format!("failed to copy directory into tar: {src_path} => {dir_name}",), 85 | details, 86 | }); 87 | } 88 | // Finish up the tarring 89 | let zip_output = match tar.into_inner() { 90 | Ok(out) => out, 91 | Err(details) => { 92 | return Err(AxoassetError::Compression { 93 | reason: format!("failed to write tar: {dest_path}"), 94 | details, 95 | }) 96 | } 97 | }; 98 | // Finish up the compression 99 | let _zip_file = match zip_output.finish() { 100 | Ok(file) => file, 101 | Err(details) => { 102 | return Err(AxoassetError::Compression { 103 | reason: format!("failed to write archive: {dest_path}"), 104 | details, 105 | }) 106 | } 107 | }; 108 | // Drop the file to close it 109 | } 110 | CompressionImpl::Xzip => { 111 | let zip_output = XzEncoder::new(final_zip_file, *DEFAULT_XZ_LEVEL); 112 | // Write the tar to the compression stream 113 | let mut tar = tar::Builder::new(zip_output); 114 | 115 | // Add the whole dir to the tar 116 | if let Err(details) = tar.append_dir_all(dir_name, src_path) { 117 | return Err(AxoassetError::Compression { 118 | reason: format!("failed to copy directory into tar: {src_path} => {dir_name}",), 119 | details, 120 | }); 121 | } 122 | // Finish up the tarring 123 | let zip_output = match tar.into_inner() { 124 | Ok(out) => out, 125 | Err(details) => { 126 | return Err(AxoassetError::Compression { 127 | reason: format!("failed to write tar: {dest_path}"), 128 | details, 129 | }) 130 | } 131 | }; 132 | // Finish up the compression 133 | let _zip_file = match zip_output.finish() { 134 | Ok(file) => file, 135 | Err(details) => { 136 | return Err(AxoassetError::Compression { 137 | reason: format!("failed to write archive: {dest_path}"), 138 | details, 139 | }) 140 | } 141 | }; 142 | // Drop the file to close it 143 | } 144 | CompressionImpl::Zstd => { 145 | // Wrap our file in compression 146 | let zip_output = 147 | ZstdEncoder::new(final_zip_file, *DEFAULT_ZSTD_LEVEL).map_err(|details| { 148 | AxoassetError::Compression { 149 | reason: "failed to create zstd encoder".to_string(), 150 | details, 151 | } 152 | })?; 153 | 154 | // Write the tar to the compression stream 155 | let mut tar = tar::Builder::new(zip_output); 156 | 157 | // Add the whole dir to the tar 158 | if let Err(details) = tar.append_dir_all(dir_name, src_path) { 159 | return Err(AxoassetError::Compression { 160 | reason: format!("failed to copy directory into tar: {src_path} => {dir_name}",), 161 | details, 162 | }); 163 | } 164 | // Finish up the tarring 165 | let zip_output = match tar.into_inner() { 166 | Ok(out) => out, 167 | Err(details) => { 168 | return Err(AxoassetError::Compression { 169 | reason: format!("failed to write tar: {dest_path}"), 170 | details, 171 | }) 172 | } 173 | }; 174 | // Finish up the compression 175 | let _zip_file = match zip_output.finish() { 176 | Ok(file) => file, 177 | Err(details) => { 178 | return Err(AxoassetError::Compression { 179 | reason: format!("failed to write archive: {dest_path}"), 180 | details, 181 | }) 182 | } 183 | }; 184 | // Drop the file to close it 185 | } 186 | } 187 | 188 | Ok(()) 189 | } 190 | 191 | #[cfg(feature = "compression-tar")] 192 | fn open_tarball( 193 | tarball: &Utf8Path, 194 | compression: &CompressionImpl, 195 | ) -> crate::error::Result> { 196 | use crate::LocalAsset; 197 | 198 | let source = LocalAsset::load_bytes(tarball)?; 199 | let mut tarball_bytes = vec![]; 200 | decompress_tarball_bytes(&source, &mut tarball_bytes, compression) 201 | .map_err(wrap_decompression_err(tarball.as_str()))?; 202 | 203 | Ok(tarball_bytes) 204 | } 205 | 206 | #[cfg(feature = "compression-tar")] 207 | fn decompress_tarball_bytes( 208 | source: &[u8], 209 | tarball_bytes: &mut Vec, 210 | compression: &CompressionImpl, 211 | ) -> std::io::Result<()> { 212 | use std::io::Read; 213 | 214 | use flate2::read::GzDecoder; 215 | use xz2::read::XzDecoder; 216 | use zstd::stream::Decoder as ZstdDecoder; 217 | 218 | match compression { 219 | CompressionImpl::Gzip => { 220 | let mut decoder = GzDecoder::new(source); 221 | decoder.read_to_end(tarball_bytes)?; 222 | } 223 | CompressionImpl::Xzip => { 224 | let mut decoder = XzDecoder::new(source); 225 | decoder.read_to_end(tarball_bytes)?; 226 | } 227 | CompressionImpl::Zstd => { 228 | let mut decoder = ZstdDecoder::new(source)?; 229 | decoder.read_to_end(tarball_bytes)?; 230 | } 231 | } 232 | Ok(()) 233 | } 234 | 235 | #[cfg(feature = "compression-tar")] 236 | pub(crate) fn untar_all( 237 | tarball: &Utf8Path, 238 | dest_path: &Utf8Path, 239 | compression: &CompressionImpl, 240 | ) -> crate::error::Result<()> { 241 | let tarball_bytes = open_tarball(tarball, compression)?; 242 | let mut archive = tar::Archive::new(tarball_bytes.as_slice()); 243 | archive 244 | .unpack(dest_path) 245 | .map_err(wrap_decompression_err(tarball.as_str()))?; 246 | 247 | Ok(()) 248 | } 249 | 250 | #[cfg(feature = "compression-tar")] 251 | pub(crate) fn untar_file( 252 | tarball: &Utf8Path, 253 | filename: &str, 254 | compression: &CompressionImpl, 255 | ) -> crate::error::Result> { 256 | let tarball_bytes = open_tarball(tarball, compression)?; 257 | let archive = tar::Archive::new(tarball_bytes.as_slice()); 258 | let buf = find_tarball_file_bytes(archive, filename) 259 | .map_err(wrap_decompression_err(tarball.as_str()))?; 260 | match buf { 261 | Some(buf) => Ok(buf), 262 | None => Err(crate::AxoassetError::ExtractFilenameFailed { 263 | desired_filename: filename.to_owned(), 264 | }), 265 | } 266 | } 267 | 268 | #[cfg(feature = "compression-tar")] 269 | fn find_tarball_file_bytes( 270 | mut tarball: tar::Archive<&[u8]>, 271 | filename: &str, 272 | ) -> std::io::Result>> { 273 | use std::io::Read; 274 | for entry in tarball.entries()? { 275 | let mut entry = entry?; 276 | if let Some(name) = entry.path()?.file_name() { 277 | if name == filename { 278 | let mut buf = vec![]; 279 | entry.read_to_end(&mut buf)?; 280 | 281 | return Ok(Some(buf)); 282 | } 283 | } 284 | } 285 | Ok(None) 286 | } 287 | 288 | #[cfg(feature = "compression-zip")] 289 | pub(crate) fn zip_dir( 290 | src_path: &Utf8Path, 291 | dest_path: &Utf8Path, 292 | with_root: Option<&Utf8Path>, 293 | ) -> crate::error::Result<()> { 294 | zip_dir_impl(src_path, dest_path, with_root).map_err(|details| AxoassetError::Compression { 295 | reason: format!("failed to write zip: {}", dest_path), 296 | details: details.into(), 297 | }) 298 | } 299 | 300 | #[cfg(feature = "compression-zip")] 301 | pub(crate) fn zip_dir_impl( 302 | src_path: &Utf8Path, 303 | dest_path: &Utf8Path, 304 | with_root: Option<&Utf8Path>, 305 | ) -> zip::result::ZipResult<()> { 306 | use std::{ 307 | fs::File, 308 | io::{Read, Write}, 309 | }; 310 | use zip::{write::FileOptions, CompressionMethod}; 311 | 312 | let file = File::create(dest_path)?; 313 | 314 | // The `zip` crate lacks the conveniences of the `tar` crate so we need to manually 315 | // walk through all the subdirs of `src_path` and copy each entry. walkdir streamlines 316 | // that process for us. 317 | let walkdir = crate::dirs::walk_dir(src_path); 318 | let it = walkdir.into_iter(); 319 | 320 | let mut zip = zip::ZipWriter::new(file); 321 | let options = FileOptions::default().compression_method(CompressionMethod::STORE); 322 | 323 | // If there's a root prefix, add entries for all of its components 324 | if let Some(root) = with_root { 325 | for path in root.ancestors() { 326 | if !path.as_str().is_empty() { 327 | zip.add_directory(path.as_str(), options)?; 328 | } 329 | } 330 | } 331 | 332 | let mut buffer = Vec::new(); 333 | for entry in it.filter_map(|e| e.ok()) { 334 | let name = &entry.rel_path; 335 | let path = &entry.full_path; 336 | // Optionally apply the root prefix 337 | let name = if let Some(root) = with_root { 338 | root.join(name) 339 | } else { 340 | name.to_owned() 341 | }; 342 | 343 | // ZIP files always need Unix-style file separators; we need to 344 | // convert any Windows file names to use Unix separators before 345 | // passing them to any of the other functions. 346 | let unix_name = Utf8PathBuf::from(&name) 347 | .components() 348 | .map(|c| c.as_str()) 349 | .collect::>() 350 | .join("/"); 351 | 352 | // Write file or directory explicitly 353 | // Some unzip tools unzip files with directory paths correctly, some do not! 354 | if path.is_file() { 355 | zip.start_file(&unix_name, options)?; 356 | let mut f = File::open(path)?; 357 | 358 | f.read_to_end(&mut buffer)?; 359 | zip.write_all(&buffer)?; 360 | buffer.clear(); 361 | } else if !name.as_str().is_empty() { 362 | // Only if not root! Avoids path spec / warning 363 | // and mapname conversion failed error on unzip 364 | zip.add_directory(&unix_name, options)?; 365 | } 366 | } 367 | zip.finish()?; 368 | Ok(()) 369 | } 370 | 371 | #[cfg(feature = "compression-zip")] 372 | pub(crate) fn unzip_all(zipfile: &Utf8Path, dest_path: &Utf8Path) -> crate::error::Result<()> { 373 | use crate::LocalAsset; 374 | 375 | let source = LocalAsset::load_bytes(zipfile)?; 376 | unzip_all_impl(&source, dest_path).map_err(|details| AxoassetError::Decompression { 377 | origin_path: zipfile.to_string(), 378 | details: details.into(), 379 | }) 380 | } 381 | 382 | #[cfg(feature = "compression-zip")] 383 | fn unzip_all_impl(source: &[u8], dest_path: &Utf8Path) -> zip::result::ZipResult<()> { 384 | use std::io::Cursor; 385 | 386 | let seekable = Cursor::new(source); 387 | let mut archive = zip::ZipArchive::new(seekable)?; 388 | archive.extract(dest_path)?; 389 | Ok(()) 390 | } 391 | 392 | #[cfg(feature = "compression-zip")] 393 | pub(crate) fn unzip_file(zipfile: &Utf8Path, filename: &str) -> crate::error::Result> { 394 | use std::io::{Cursor, Read}; 395 | 396 | use crate::LocalAsset; 397 | 398 | let source = LocalAsset::load_bytes(zipfile)?; 399 | let seekable = Cursor::new(source); 400 | let mut archive = 401 | zip::ZipArchive::new(seekable).map_err(|details| AxoassetError::Decompression { 402 | origin_path: zipfile.to_string(), 403 | details: details.into(), 404 | })?; 405 | let mut file = 406 | archive 407 | .by_name(filename) 408 | .map_err(|_| crate::AxoassetError::ExtractFilenameFailed { 409 | desired_filename: filename.to_owned(), 410 | })?; 411 | 412 | let mut buf = vec![]; 413 | file.read_to_end(&mut buf) 414 | .map_err(wrap_decompression_err(zipfile.as_str()))?; 415 | 416 | Ok(buf) 417 | } 418 | 419 | fn wrap_decompression_err(origin_path: &str) -> impl FnOnce(std::io::Error) -> AxoassetError + '_ { 420 | |details| AxoassetError::Decompression { 421 | origin_path: origin_path.to_string(), 422 | details, 423 | } 424 | } 425 | -------------------------------------------------------------------------------- /src/dirs.rs: -------------------------------------------------------------------------------- 1 | //! Utilities for working with directories 2 | //! 3 | //! Right now just a wrapper around WalkDirs that does some utf8 conversions and strip_prefixing, 4 | //! since we always end up doing that. 5 | 6 | use crate::error::*; 7 | use camino::{Utf8Path, Utf8PathBuf}; 8 | 9 | /// Walk through this dir's descendants with `walkdirs` 10 | pub fn walk_dir(dir: impl AsRef) -> AxoassetWalkDir { 11 | let dir = dir.as_ref(); 12 | AxoassetWalkDir { 13 | root_dir: dir.to_owned(), 14 | inner: walkdir::WalkDir::new(dir), 15 | } 16 | } 17 | 18 | /// Wrapper around [`walkdir::WalkDir`][]. 19 | pub struct AxoassetWalkDir { 20 | root_dir: Utf8PathBuf, 21 | inner: walkdir::WalkDir, 22 | } 23 | 24 | /// Wrapper around [`walkdir::IntoIter`][]. 25 | pub struct AxoassetIntoIter { 26 | root_dir: Utf8PathBuf, 27 | inner: walkdir::IntoIter, 28 | } 29 | 30 | /// Wrapper around [`walkdir::DirEntry`][]. 31 | pub struct AxoassetDirEntry { 32 | /// full path to the entry 33 | pub full_path: Utf8PathBuf, 34 | /// path to the entry relative to the dir passed to [`walk_dir`][]. 35 | pub rel_path: Utf8PathBuf, 36 | /// Inner contents 37 | pub entry: walkdir::DirEntry, 38 | } 39 | 40 | impl IntoIterator for AxoassetWalkDir { 41 | type IntoIter = AxoassetIntoIter; 42 | type Item = Result; 43 | fn into_iter(self) -> Self::IntoIter { 44 | AxoassetIntoIter { 45 | root_dir: self.root_dir, 46 | inner: self.inner.into_iter(), 47 | } 48 | } 49 | } 50 | 51 | impl Iterator for AxoassetIntoIter { 52 | type Item = Result; 53 | fn next(&mut self) -> Option { 54 | self.inner.next().map(|next| { 55 | let entry = next.map_err(|e| AxoassetError::WalkDirFailed { 56 | origin_path: self.root_dir.clone(), 57 | details: e, 58 | })?; 59 | 60 | let full_path = Utf8PathBuf::from_path_buf(entry.path().to_owned()) 61 | .map_err(|details| AxoassetError::Utf8Path { path: details })?; 62 | let rel_path = full_path 63 | .strip_prefix(&self.root_dir) 64 | .map_err(|_| AxoassetError::PathNesting { 65 | root_dir: self.root_dir.clone(), 66 | child_dir: full_path.clone(), 67 | })? 68 | .to_owned(); 69 | 70 | Ok(AxoassetDirEntry { 71 | full_path, 72 | rel_path, 73 | entry, 74 | }) 75 | }) 76 | } 77 | } 78 | 79 | impl std::ops::Deref for AxoassetDirEntry { 80 | type Target = walkdir::DirEntry; 81 | fn deref(&self) -> &Self::Target { 82 | &self.entry 83 | } 84 | } 85 | impl std::ops::DerefMut for AxoassetDirEntry { 86 | fn deref_mut(&mut self) -> &mut Self::Target { 87 | &mut self.entry 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | //! Axoasset Errors 2 | 3 | use miette::Diagnostic; 4 | use thiserror::Error; 5 | 6 | /// Axoasset Result 7 | pub type Result = std::result::Result; 8 | 9 | /// The set of errors that can occur when axoasset is used 10 | #[derive(Debug, Error, Diagnostic)] 11 | #[non_exhaustive] 12 | pub enum AxoassetError { 13 | /// This error indicates that axoasset failed to fetch a remote asset. 14 | #[error("failed to fetch asset at {origin_path}: Encountered an error when requesting a remote asset.")] 15 | #[diagnostic(help("Make sure the url you provided is accurate."))] 16 | #[cfg(feature = "remote")] 17 | RemoteAssetRequestFailed { 18 | /// The origin path of the asset, used as an identifier 19 | origin_path: String, 20 | /// Details of the error 21 | #[source] 22 | details: reqwest::Error, 23 | }, 24 | 25 | /// error indicates that the provided URL did not properly parse and may 26 | /// either be invalid or an unsupported format. 27 | #[cfg(feature = "remote")] 28 | #[error("failed to parse URL {origin_path}")] 29 | UrlParse { 30 | /// The origin path of the asset, used as an identifier 31 | origin_path: String, 32 | /// Details of the error 33 | #[source] 34 | details: url::ParseError, 35 | }, 36 | 37 | /// This error indicates that the received headers were not able to be 38 | /// parsed into a string, which means they may be corrupted in some way. 39 | #[error("failed to parse header at {origin_path}")] 40 | #[cfg(feature = "remote")] 41 | HeaderParse { 42 | /// The origin path of the asset, used as an identifier 43 | origin_path: String, 44 | /// Details of the error 45 | #[source] 46 | details: reqwest::header::ToStrError, 47 | }, 48 | 49 | /// This error indicates that the given mime type was not able to be 50 | /// parsed into a string, which means it may be corrupted in some way. 51 | #[error( 52 | "when fetching asset at {origin_path}, the server's response mime type couldn't be parsed" 53 | )] 54 | #[cfg(feature = "remote")] 55 | MimeParse { 56 | /// The origin path of the asset, used as an identifier 57 | origin_path: String, 58 | /// Details of the error 59 | #[source] 60 | details: mime::FromStrError, 61 | }, 62 | 63 | /// This error indicates that the mime type of the requested remote asset 64 | /// was not an image. 65 | #[error("when fetching asset at {origin_path}, the server's response mime type did not indicate an image.")] 66 | #[diagnostic(help( 67 | "Please make sure the asset url is correct and that the server is properly configured." 68 | ))] 69 | #[cfg(feature = "remote")] 70 | RemoteAssetNonImageMimeType { 71 | /// The origin path of the asset, used as an identifier 72 | origin_path: String, 73 | }, 74 | 75 | /// This error indicates that the mime type of the requested remote asset 76 | /// was of a type that axoasset does not support. 77 | #[error("when fetching asset at {origin_path}, the server responded with a mime type that was non supported")] 78 | #[diagnostic(help( 79 | "Please make sure the asset url is correct and that the server is properly configured" 80 | ))] 81 | #[cfg(feature = "remote")] 82 | RemoteAssetMimeTypeNotSupported { 83 | /// The origin path of the asset, used as an identifier 84 | origin_path: String, 85 | /// The mimetype from the server response 86 | mimetype: String, 87 | }, 88 | 89 | /// This error indicates that the requested remote asset was an image, but 90 | /// axoasset could not determine what file extension to use for the 91 | /// received format. 92 | #[error("when fetching asset at {origin_path}, we could not determine an appropriate file extension based on the server response")] 93 | #[diagnostic(help( 94 | "Please make sure the asset url is correct and that the server is properly configured" 95 | ))] 96 | #[cfg(feature = "remote")] 97 | RemoteAssetIndeterminateImageFormatExtension { 98 | /// The origin path of the asset, used as an identifier 99 | origin_path: String, 100 | }, 101 | 102 | /// This error indicates that the server response for the remote asset request 103 | /// did not include a content-type header. Axoasset needs the content-type 104 | /// header to determine what type of file the asset contains. 105 | #[error("when fetching asset at {origin_path}, the server's response did not contain a content type header")] 106 | #[diagnostic(help( 107 | "Please make sure the asset url is correct and that the server is properly configured" 108 | ))] 109 | #[cfg(feature = "remote")] 110 | RemoteAssetMissingContentTypeHeader { 111 | /// The origin path of the asset, used as an identifier 112 | origin_path: String, 113 | }, 114 | 115 | /// This error indicates that axoasset failed to write a remote asset to the 116 | /// local filesystem. 117 | #[error("failed to write asset at {origin_url} to {dest_path}: Could not find asset at provided path.")] 118 | #[diagnostic(help("Make sure your path is correct and your server is configured correctly."))] 119 | #[cfg(feature = "remote")] 120 | RemoteAssetWriteFailed { 121 | /// The origin path of the asset, used as an identifier 122 | origin_url: crate::remote::UrlString, 123 | /// The path where the asset was being written to 124 | dest_path: camino::Utf8PathBuf, 125 | /// Details of the error 126 | #[source] 127 | details: std::io::Error, 128 | }, 129 | 130 | /// This error indicates that axoasset failed to fetch a local asset at the 131 | /// provided path. 132 | #[error("failed to fetch asset at {origin_path}: Could not find asset at provided path.")] 133 | LocalAssetNotFound { 134 | /// The origin path of the asset, used as an identifier 135 | origin_path: String, 136 | /// Details of the error 137 | #[source] 138 | details: std::io::Error, 139 | }, 140 | 141 | /// This error inidcates that axoasset failed to copy a local asset. 142 | #[error("failed to copy asset from {origin_path} to {dest_path}")] 143 | LocalAssetCopyFailed { 144 | /// The origin path of the asset, used as an identifier 145 | origin_path: String, 146 | /// The path where the asset was being copied to 147 | dest_path: String, 148 | /// Details of the error 149 | #[source] 150 | details: std::io::Error, 151 | }, 152 | 153 | /// This error indicates that axoasset failed to read a local asset at the 154 | /// provided path. 155 | #[error("failed to read asset from {origin_path}")] 156 | LocalAssetReadFailed { 157 | /// The origin path of the asset, used as an identifier 158 | origin_path: String, 159 | /// Details of the error 160 | #[source] 161 | details: std::io::Error, 162 | }, 163 | 164 | /// This error indicates that axoasset failed to write a local asset. 165 | #[error("failed to write asset from {origin_path} to {dest_path}.")] 166 | LocalAssetWriteFailed { 167 | /// The origin path of the asset, used as an identifier 168 | origin_path: String, 169 | /// The path where the asset was being written to 170 | dest_path: String, 171 | /// Details of the error 172 | #[source] 173 | details: std::io::Error, 174 | }, 175 | 176 | /// This error indicates that axoasset failed to write a new asset 177 | #[error("failed to write a new asset to {dest_path}.")] 178 | #[diagnostic(help("Make sure you have the correct permissions to create a new file."))] 179 | LocalAssetWriteNewFailed { 180 | /// The path where the asset was being written to 181 | dest_path: String, 182 | /// Details of the error 183 | #[source] 184 | details: std::io::Error, 185 | }, 186 | 187 | /// This error indicates that axoasset failed to create a new directory 188 | #[error("failed to write a new directory to {dest_path}.")] 189 | #[diagnostic(help("Make sure you have the correct permissions to create a new directory."))] 190 | LocalAssetDirCreationFailed { 191 | /// The path where the directory was meant to be created 192 | dest_path: String, 193 | /// Details of the error 194 | #[source] 195 | details: std::io::Error, 196 | }, 197 | 198 | /// This error indicates that axoasset failed to delete an asset 199 | #[error("failed to delete asset at {dest_path}.")] 200 | LocalAssetRemoveFailed { 201 | /// The path that was going to be deleted 202 | dest_path: String, 203 | /// Details of the error 204 | #[source] 205 | details: std::io::Error, 206 | }, 207 | 208 | /// This error indicates that axoasset could not determine the filename for 209 | /// a local asset. 210 | #[error("could not determine file name for asset at {origin_path}")] 211 | LocalAssetMissingFilename { 212 | /// The origin path of the asset, used as an identifier 213 | origin_path: String, 214 | }, 215 | 216 | /// This error indicates we ran into an issue when creating an archive. 217 | #[error("failed to create archive: {reason}")] 218 | Compression { 219 | /// A specific step that failed 220 | reason: String, 221 | /// Details of the error 222 | #[source] 223 | details: std::io::Error, 224 | }, 225 | 226 | /// Some error decompressing a tarball/zip 227 | #[cfg(any(feature = "compression-zip", feature = "compression-tar"))] 228 | #[error("Failed to extract archive {origin_path}")] 229 | Decompression { 230 | /// The origin path of the asset, used as an identifier 231 | origin_path: String, 232 | /// Details of the error 233 | #[source] 234 | details: std::io::Error, 235 | }, 236 | 237 | /// This error indicates we ran `std::env::current_dir` and somehow got an error. 238 | #[error("Failed to get the current working directory")] 239 | CurrentDir { 240 | /// Details of the error 241 | #[source] 242 | details: std::io::Error, 243 | }, 244 | /// This error indicates we failed to convert a Path/PathBuf to a Utf8Path/Utf8PathBuf 245 | #[error("This path isn't utf8: {path:?}")] 246 | Utf8Path { 247 | /// The problematic path 248 | path: std::path::PathBuf, 249 | }, 250 | /// This error indicates we tried to strip_prefix a path that should have been 251 | /// a descendant of another, but it didn't work. 252 | #[error("Child wasn't nested under its parent: {root_dir} => {child_dir}")] 253 | #[diagnostic(help("Are symlinks involved?"))] 254 | PathNesting { 255 | /// The root/ancestor dir 256 | root_dir: camino::Utf8PathBuf, 257 | /// THe child/descendent path 258 | child_dir: camino::Utf8PathBuf, 259 | }, 260 | 261 | #[error("Failed to find {desired_filename} in an ancestor of {start_dir}")] 262 | /// This error indicates we failed to find the desired file in an ancestor of the search dir. 263 | SearchFailed { 264 | /// The dir we started the search in 265 | start_dir: camino::Utf8PathBuf, 266 | /// The filename we were searching for 267 | desired_filename: String, 268 | }, 269 | 270 | #[error("Failed to find {desired_filename} within archive being decompressed")] 271 | /// This error indicates we failed to find the desired file within a tarball or zip 272 | ExtractFilenameFailed { 273 | /// The filename we were searching for 274 | desired_filename: String, 275 | }, 276 | 277 | #[error("Failed to walk to ancestor of {origin_path}")] 278 | /// Walkdir failed to yield an entry 279 | WalkDirFailed { 280 | /// The root path we were trying to walkdirs 281 | origin_path: camino::Utf8PathBuf, 282 | /// Inner walkdir error 283 | #[source] 284 | details: walkdir::Error, 285 | }, 286 | 287 | /// This error indicates we tried to deserialize some JSON with serde_json 288 | /// but failed. 289 | #[cfg(feature = "json-serde")] 290 | #[error("failed to parse JSON")] 291 | Json { 292 | /// The SourceFile we were try to parse 293 | #[source_code] 294 | source: crate::SourceFile, 295 | /// The range the error was found on 296 | #[label] 297 | span: Option, 298 | /// Details of the error 299 | #[source] 300 | details: serde_json::Error, 301 | }, 302 | 303 | /// This error indicates we tried to deserialize some TOML with toml-rs (serde) 304 | /// but failed. 305 | #[cfg(feature = "toml-serde")] 306 | #[error("failed to parse TOML")] 307 | Toml { 308 | /// The SourceFile we were try to parse 309 | #[source_code] 310 | source: crate::SourceFile, 311 | /// The range the error was found on 312 | #[label] 313 | span: Option, 314 | /// Details of the error 315 | #[source] 316 | details: toml::de::Error, 317 | }, 318 | 319 | /// This error indicates we tried to deserialize some TOML with toml_edit 320 | /// but failed. 321 | #[cfg(feature = "toml-edit")] 322 | #[error("failed to edit TOML document")] 323 | TomlEdit { 324 | /// The SourceFile we were trying to parse 325 | #[source_code] 326 | source: crate::SourceFile, 327 | /// The range the error was found on 328 | #[label] 329 | span: Option, 330 | /// Details of the error 331 | #[source] 332 | details: toml_edit::TomlError, 333 | }, 334 | 335 | /// This error indicates we tried to deserialize some YAML with serde_yml 336 | /// but failed. 337 | #[cfg(feature = "yaml-serde")] 338 | #[error("failed to parse YAML")] 339 | Yaml { 340 | /// The SourceFile we were try to parse 341 | #[source_code] 342 | source: crate::SourceFile, 343 | /// The range the error was found on 344 | #[label] 345 | span: Option, 346 | /// Details of the error 347 | #[source] 348 | details: serde_yml::Error, 349 | }, 350 | } 351 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny(missing_docs)] 2 | #![allow(clippy::result_large_err)] 3 | 4 | //! # axoasset 5 | //! > 📮 load, write, and copy remote and local assets 6 | //! 7 | //! this library is a utility focused on managing both local (filesystem) assets 8 | //! and remote (via http/https) assets. the bulk of the logic is not terribly 9 | //! interesting or uniquely engineered; the purpose this library is primarily 10 | //! to unify and co-locate the logic to make debugging simpler and error handling 11 | //! more consistent and comprehensive. 12 | 13 | #[cfg(any(feature = "compression-zip", feature = "compression-tar"))] 14 | pub(crate) mod compression; 15 | pub(crate) mod dirs; 16 | pub mod error; 17 | pub mod local; 18 | #[cfg(feature = "remote")] 19 | pub mod remote; 20 | pub mod source; 21 | pub mod spanned; 22 | 23 | pub use error::AxoassetError; 24 | pub use local::LocalAsset; 25 | #[cfg(feature = "remote")] 26 | pub use remote::AxoClient; 27 | // Simplifies raw access to reqwest without depending on a separate copy 28 | #[cfg(feature = "remote")] 29 | pub use reqwest; 30 | #[cfg(feature = "json-serde")] 31 | pub use serde_json; 32 | #[cfg(feature = "yaml-serde")] 33 | pub use serde_yml; 34 | pub use source::SourceFile; 35 | pub use spanned::Spanned; 36 | #[cfg(feature = "toml-serde")] 37 | pub use toml; 38 | #[cfg(feature = "toml-edit")] 39 | pub use toml_edit; 40 | -------------------------------------------------------------------------------- /src/local.rs: -------------------------------------------------------------------------------- 1 | //! Local file operations 2 | 3 | use std::fs; 4 | 5 | use camino::{Utf8Path, Utf8PathBuf}; 6 | 7 | use crate::{dirs, error::*}; 8 | 9 | /// A local asset contains a path on the local filesystem and its contents 10 | #[derive(Debug)] 11 | pub struct LocalAsset { 12 | /// The computed filename from origin_path 13 | filename: String, 14 | /// A string representing a path on the local filesystem, where the asset 15 | /// originated. For a new asset, this will be the path you want the asset 16 | /// to be written to. This path is how the filename is determined for all 17 | /// asset operations. 18 | origin_path: Utf8PathBuf, 19 | /// The contents of the asset as a vector of bytes. 20 | contents: Vec, 21 | } 22 | 23 | impl LocalAsset { 24 | /// Gets the filename of the LocalAsset 25 | pub fn filename(&self) -> &str { 26 | &self.filename 27 | } 28 | 29 | /// Gets the origin_path of the LocalAsset 30 | pub fn origin_path(&self) -> &Utf8Path { 31 | &self.origin_path 32 | } 33 | 34 | /// Gets the bytes of the LocalAsset 35 | pub fn as_bytes(&self) -> &[u8] { 36 | &self.contents 37 | } 38 | 39 | /// Gets the bytes of the LocalAsset by-value 40 | pub fn into_bytes(self) -> Vec { 41 | self.contents 42 | } 43 | 44 | /// A new asset is created with claimed path on the local filesystem and a 45 | /// vector of bytes representing its contents. 46 | /// 47 | /// Note that this DOES NOT do any IO, it just pretends the given bytes 48 | /// were loaded from that location. 49 | pub fn new(origin_path: impl AsRef, contents: Vec) -> Result { 50 | let origin_path = origin_path.as_ref(); 51 | Ok(LocalAsset { 52 | filename: filename(origin_path)?, 53 | origin_path: origin_path.to_owned(), 54 | contents, 55 | }) 56 | } 57 | 58 | /// Loads an asset from a path on the local filesystem, returning a 59 | /// LocalAsset struct 60 | pub fn load_asset(origin_path: impl AsRef) -> Result { 61 | let origin_path = origin_path.as_ref(); 62 | match origin_path.try_exists() { 63 | Ok(_) => match fs::read(origin_path) { 64 | Ok(contents) => Ok(LocalAsset { 65 | filename: filename(origin_path)?, 66 | origin_path: origin_path.to_owned(), 67 | contents, 68 | }), 69 | Err(details) => Err(AxoassetError::LocalAssetReadFailed { 70 | origin_path: origin_path.to_string(), 71 | details, 72 | }), 73 | }, 74 | Err(details) => Err(AxoassetError::LocalAssetNotFound { 75 | origin_path: origin_path.to_string(), 76 | details, 77 | }), 78 | } 79 | } 80 | 81 | /// Loads an asset from a path on the local filesystem, returning a 82 | /// string of its contents 83 | pub fn load_string(origin_path: impl AsRef) -> Result { 84 | let origin_path = origin_path.as_ref(); 85 | match origin_path.try_exists() { 86 | Ok(_) => match fs::read_to_string(origin_path) { 87 | Ok(contents) => Ok(contents), 88 | Err(details) => Err(AxoassetError::LocalAssetReadFailed { 89 | origin_path: origin_path.to_string(), 90 | details, 91 | }), 92 | }, 93 | Err(details) => Err(AxoassetError::LocalAssetNotFound { 94 | origin_path: origin_path.to_string(), 95 | details, 96 | }), 97 | } 98 | } 99 | 100 | /// Loads an asset from a path on the local filesystem, returning a 101 | /// vector of bytes of its contents 102 | pub fn load_bytes(origin_path: impl AsRef) -> Result> { 103 | let origin_path = origin_path.as_ref(); 104 | match origin_path.try_exists() { 105 | Ok(_) => match fs::read(origin_path) { 106 | Ok(contents) => Ok(contents), 107 | Err(details) => Err(AxoassetError::LocalAssetReadFailed { 108 | origin_path: origin_path.to_string(), 109 | details, 110 | }), 111 | }, 112 | Err(details) => Err(AxoassetError::LocalAssetNotFound { 113 | origin_path: origin_path.to_string(), 114 | details, 115 | }), 116 | } 117 | } 118 | 119 | /// Writes an asset to a path on the local filesystem, determines the 120 | /// filename from the origin path 121 | pub fn write_to_dir(&self, dest_dir: impl AsRef) -> Result { 122 | let dest_dir = dest_dir.as_ref(); 123 | let dest_path = dest_dir.join(&self.filename); 124 | match fs::write(&dest_path, &self.contents) { 125 | Ok(_) => Ok(dest_path), 126 | Err(details) => Err(AxoassetError::LocalAssetWriteFailed { 127 | origin_path: self.origin_path.to_string(), 128 | dest_path: dest_path.to_string(), 129 | details, 130 | }), 131 | } 132 | } 133 | 134 | /// Writes an asset to a path on the local filesystem 135 | pub fn write_new(contents: &str, dest_path: impl AsRef) -> Result { 136 | let dest_path = dest_path.as_ref(); 137 | if dest_path.file_name().is_none() { 138 | return Err(AxoassetError::LocalAssetMissingFilename { 139 | origin_path: dest_path.to_string(), 140 | }); 141 | } 142 | match fs::write(dest_path, contents) { 143 | Ok(_) => Ok(dest_path.into()), 144 | Err(details) => Err(AxoassetError::LocalAssetWriteNewFailed { 145 | dest_path: dest_path.to_string(), 146 | details, 147 | }), 148 | } 149 | } 150 | 151 | /// Writes an asset and all of its parent directories on the local filesystem. 152 | pub fn write_new_all(contents: &str, dest_path: impl AsRef) -> Result { 153 | let dest_path = dest_path.as_ref(); 154 | if dest_path.file_name().is_none() { 155 | return Err(AxoassetError::LocalAssetMissingFilename { 156 | origin_path: dest_path.to_string(), 157 | }); 158 | } 159 | let dest_dir = dest_path.parent().unwrap(); 160 | match fs::create_dir_all(dest_dir) { 161 | Ok(_) => (), 162 | Err(details) => { 163 | return Err(AxoassetError::LocalAssetWriteNewFailed { 164 | dest_path: dest_path.to_string(), 165 | details, 166 | }) 167 | } 168 | } 169 | LocalAsset::write_new(contents, dest_path) 170 | } 171 | 172 | /// Creates a new directory 173 | pub fn create_dir(dest: impl AsRef) -> Result { 174 | let dest_path = dest.as_ref(); 175 | match fs::create_dir(dest_path) { 176 | Ok(_) => Ok(dest_path.into()), 177 | Err(details) => Err(AxoassetError::LocalAssetDirCreationFailed { 178 | dest_path: dest_path.to_string(), 179 | details, 180 | }), 181 | } 182 | } 183 | 184 | /// Creates a new directory, including all parent directories 185 | pub fn create_dir_all(dest: impl AsRef) -> Result { 186 | let dest_path = dest.as_ref(); 187 | match fs::create_dir_all(dest_path) { 188 | Ok(_) => Ok(dest_path.into()), 189 | Err(details) => Err(AxoassetError::LocalAssetDirCreationFailed { 190 | dest_path: dest_path.to_string(), 191 | details, 192 | }), 193 | } 194 | } 195 | 196 | /// Removes a file 197 | pub fn remove_file(dest: impl AsRef) -> Result<()> { 198 | let dest_path = dest.as_ref(); 199 | if let Err(details) = fs::remove_file(dest_path) { 200 | return Err(AxoassetError::LocalAssetRemoveFailed { 201 | dest_path: dest_path.to_string(), 202 | details, 203 | }); 204 | } 205 | 206 | Ok(()) 207 | } 208 | 209 | /// Removes a directory 210 | pub fn remove_dir(dest: impl AsRef) -> Result<()> { 211 | let dest_path = dest.as_ref(); 212 | if dest_path.is_dir() { 213 | if let Err(details) = fs::remove_dir(dest_path) { 214 | return Err(AxoassetError::LocalAssetRemoveFailed { 215 | dest_path: dest_path.to_string(), 216 | details, 217 | }); 218 | } 219 | } 220 | 221 | Ok(()) 222 | } 223 | 224 | /// Removes a directory and all of its contents 225 | pub fn remove_dir_all(dest: impl AsRef) -> Result<()> { 226 | let dest_path = dest.as_ref(); 227 | if dest_path.is_dir() { 228 | if let Err(details) = fs::remove_dir_all(dest_path) { 229 | return Err(AxoassetError::LocalAssetRemoveFailed { 230 | dest_path: dest_path.to_string(), 231 | details, 232 | }); 233 | } 234 | } 235 | 236 | Ok(()) 237 | } 238 | 239 | /// Copies an asset from one location on the local filesystem to the given directory 240 | /// 241 | /// The destination will use the same file name as the origin has. 242 | /// If you want to specify the destination file's name, use [`LocalAsset::copy_file_to_file`][]. 243 | /// 244 | /// The returned path is the resulting file. 245 | pub fn copy_file_to_dir( 246 | origin_path: impl AsRef, 247 | dest_dir: impl AsRef, 248 | ) -> Result { 249 | let origin_path = origin_path.as_ref(); 250 | let dest_dir = dest_dir.as_ref(); 251 | 252 | let filename = filename(origin_path)?; 253 | let dest_path = dest_dir.join(filename); 254 | Self::copy_file_to_file(origin_path, &dest_path)?; 255 | 256 | Ok(dest_path) 257 | } 258 | 259 | /// Copies an asset from one location on the local filesystem to another 260 | /// 261 | /// Both paths are assumed to be file names. 262 | pub fn copy_file_to_file( 263 | origin_path: impl AsRef, 264 | dest_path: impl AsRef, 265 | ) -> Result<()> { 266 | let origin_path = origin_path.as_ref(); 267 | let dest_path = dest_path.as_ref(); 268 | 269 | fs::copy(origin_path, dest_path).map_err(|e| AxoassetError::LocalAssetCopyFailed { 270 | origin_path: origin_path.to_string(), 271 | dest_path: dest_path.to_string(), 272 | details: e, 273 | })?; 274 | 275 | Ok(()) 276 | } 277 | 278 | /// Recursively copies a directory from one location to the given directory 279 | /// 280 | /// The destination will use the same dir name as the origin has, so 281 | /// dest_dir is the *parent* of the copied directory. If you want to specify the destination's 282 | /// dir name, use [`LocalAsset::copy_dir_to_dir`][]. 283 | /// 284 | /// The returned path is the resulting dir. 285 | pub fn copy_dir_to_parent_dir( 286 | origin_path: impl AsRef, 287 | dest_dir: impl AsRef, 288 | ) -> Result { 289 | let origin_path = origin_path.as_ref(); 290 | let dest_dir = dest_dir.as_ref(); 291 | 292 | let filename = filename(origin_path)?; 293 | let dest_path = dest_dir.join(filename); 294 | Self::copy_dir_to_dir(origin_path, &dest_path)?; 295 | 296 | Ok(dest_path) 297 | } 298 | 299 | /// Recursively copies a directory from one location to another 300 | /// 301 | /// Both paths are assumed to be the names of the directory being copied 302 | /// (i.e. dest_path is not the parent dir). 303 | pub fn copy_dir_to_dir( 304 | origin_path: impl AsRef, 305 | dest_path: impl AsRef, 306 | ) -> Result<()> { 307 | let origin_path = origin_path.as_ref(); 308 | let dest_path = dest_path.as_ref(); 309 | 310 | for entry in dirs::walk_dir(origin_path) { 311 | let entry = entry?; 312 | let from = &entry.full_path; 313 | let to = dest_path.join(&entry.rel_path); 314 | 315 | if entry.file_type().is_dir() { 316 | // create directories (even empty ones!) 317 | LocalAsset::create_dir(to)?; 318 | } else if entry.file_type().is_file() { 319 | // copy files 320 | LocalAsset::copy_file_to_file(from, to)?; 321 | } else { 322 | // other kinds of file presumed to be symlinks which we don't handle 323 | debug_assert!( 324 | entry.file_type().is_symlink(), 325 | "unknown type of file at {from}, axoasset needs to be updated to support this!" 326 | ); 327 | } 328 | } 329 | Ok(()) 330 | } 331 | 332 | /// Get the current working directory 333 | pub fn current_dir() -> Result { 334 | let cur_dir = 335 | std::env::current_dir().map_err(|details| AxoassetError::CurrentDir { details })?; 336 | let cur_dir = Utf8PathBuf::from_path_buf(cur_dir) 337 | .map_err(|details| AxoassetError::Utf8Path { path: details })?; 338 | Ok(cur_dir) 339 | } 340 | 341 | /// Find a desired file in the provided dir or an ancestor of it. 342 | /// 343 | /// On success returns the path to the found file. 344 | pub fn search_ancestors( 345 | start_dir: impl AsRef, 346 | desired_filename: &str, 347 | ) -> Result { 348 | let start_dir = start_dir.as_ref(); 349 | // We want a proper absolute path so we can compare paths to workspace roots easily. 350 | // 351 | // Also if someone starts the path with ./ we should trim that to avoid weirdness. 352 | // Maybe we should be using proper `canonicalize` but then we'd need to canonicalize 353 | // every path we get from random APIs to be consistent and that's a whole mess of its own! 354 | let start_dir = if let Ok(clean_dir) = start_dir.strip_prefix("./") { 355 | clean_dir.to_owned() 356 | } else { 357 | start_dir.to_owned() 358 | }; 359 | let start_dir = if start_dir.is_relative() { 360 | let current_dir = LocalAsset::current_dir()?; 361 | current_dir.join(start_dir) 362 | } else { 363 | start_dir 364 | }; 365 | for dir_path in start_dir.ancestors() { 366 | let file_path = dir_path.join(desired_filename); 367 | if file_path.is_file() { 368 | return Ok(file_path); 369 | } 370 | } 371 | Err(AxoassetError::SearchFailed { 372 | start_dir, 373 | desired_filename: desired_filename.to_owned(), 374 | }) 375 | } 376 | 377 | /// Creates a new .tar.gz file from a provided directory 378 | /// 379 | /// The with_root argument specifies that all contents of dest_dir should be placed 380 | /// under the given path within the archive. If None then the contents of the dir will 381 | /// be placed directly in the root. root_dir can be a proper path with subdirs 382 | /// (e.g. `root_dir = "some/dir/prefix"` is valid). 383 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 384 | pub fn tar_gz_dir( 385 | origin_dir: impl AsRef, 386 | dest_dir: impl AsRef, 387 | with_root: Option>, 388 | ) -> Result<()> { 389 | crate::compression::tar_dir( 390 | Utf8Path::new(origin_dir.as_ref()), 391 | Utf8Path::new(dest_dir.as_ref()), 392 | with_root.as_ref().map(|p| p.as_ref()), 393 | &crate::compression::CompressionImpl::Gzip, 394 | ) 395 | } 396 | 397 | /// Extracts the entire tarball at `tarball` to a provided directory 398 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 399 | pub fn untar_gz_all(tarball: &Utf8Path, dest_path: &Utf8Path) -> Result<()> { 400 | crate::compression::untar_all( 401 | tarball, 402 | dest_path, 403 | &crate::compression::CompressionImpl::Gzip, 404 | ) 405 | } 406 | 407 | /// Extracts the file named `filename` within the tarball at `tarball` and returns its contents as bytes 408 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 409 | pub fn untar_gz_file(tarball: &Utf8Path, filename: &str) -> Result> { 410 | crate::compression::untar_file( 411 | tarball, 412 | filename, 413 | &crate::compression::CompressionImpl::Gzip, 414 | ) 415 | } 416 | 417 | /// Creates a new .tar.xz file from a provided directory 418 | /// 419 | /// The with_root argument specifies that all contents of dest_dir should be placed 420 | /// under the given path within the archive. If None then the contents of the dir will 421 | /// be placed directly in the root. root_dir can be a proper path with subdirs 422 | /// (e.g. `root_dir = "some/dir/prefix"` is valid). 423 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 424 | pub fn tar_xz_dir( 425 | origin_dir: impl AsRef, 426 | dest_dir: impl AsRef, 427 | with_root: Option>, 428 | ) -> Result<()> { 429 | crate::compression::tar_dir( 430 | Utf8Path::new(origin_dir.as_ref()), 431 | Utf8Path::new(dest_dir.as_ref()), 432 | with_root.as_ref().map(|p| p.as_ref()), 433 | &crate::compression::CompressionImpl::Xzip, 434 | ) 435 | } 436 | 437 | /// Extracts the entire tarball at `tarball` to a provided directory 438 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 439 | pub fn untar_xz_all( 440 | tarball: impl AsRef, 441 | dest_path: impl AsRef, 442 | ) -> Result<()> { 443 | crate::compression::untar_all( 444 | Utf8Path::new(tarball.as_ref()), 445 | Utf8Path::new(dest_path.as_ref()), 446 | &crate::compression::CompressionImpl::Xzip, 447 | ) 448 | } 449 | 450 | /// Extracts the file named `filename` within the tarball at `tarball` and returns its contents as bytes 451 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 452 | pub fn untar_xz_file(tarball: impl AsRef, filename: &str) -> Result> { 453 | crate::compression::untar_file( 454 | Utf8Path::new(tarball.as_ref()), 455 | filename, 456 | &crate::compression::CompressionImpl::Xzip, 457 | ) 458 | } 459 | 460 | /// Creates a new .tar.zstd file from a provided directory 461 | /// 462 | /// The with_root argument specifies that all contents of dest_dir should be placed 463 | /// under the given path within the archive. If None then the contents of the dir will 464 | /// be placed directly in the root. root_dir can be a proper path with subdirs 465 | /// (e.g. `root_dir = "some/dir/prefix"` is valid). 466 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 467 | pub fn tar_zstd_dir( 468 | origin_dir: impl AsRef, 469 | dest_dir: impl AsRef, 470 | with_root: Option>, 471 | ) -> Result<()> { 472 | crate::compression::tar_dir( 473 | Utf8Path::new(origin_dir.as_ref()), 474 | Utf8Path::new(dest_dir.as_ref()), 475 | with_root.as_ref().map(|p| p.as_ref()), 476 | &crate::compression::CompressionImpl::Zstd, 477 | ) 478 | } 479 | 480 | /// Extracts the entire tarball at `tarball` to a provided directory 481 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 482 | pub fn untar_zstd_all( 483 | tarball: impl AsRef, 484 | dest_path: impl AsRef, 485 | ) -> Result<()> { 486 | crate::compression::untar_all( 487 | Utf8Path::new(tarball.as_ref()), 488 | Utf8Path::new(dest_path.as_ref()), 489 | &crate::compression::CompressionImpl::Zstd, 490 | ) 491 | } 492 | 493 | /// Extracts the file named `filename` within the tarball at `tarball` and returns its contents as bytes 494 | #[cfg(any(feature = "compression", feature = "compression-tar"))] 495 | pub fn untar_zstd_file(tarball: impl AsRef, filename: &str) -> Result> { 496 | crate::compression::untar_file( 497 | Utf8Path::new(tarball.as_ref()), 498 | filename, 499 | &crate::compression::CompressionImpl::Zstd, 500 | ) 501 | } 502 | 503 | /// Creates a new .zip file from a provided directory 504 | /// 505 | /// The with_root argument specifies that all contents of dest_dir should be placed 506 | /// under the given path within the archive. If None then the contents of the dir will 507 | /// be placed directly in the root. root_dir can be a proper path with subdirs 508 | /// (e.g. `root_dir = "some/dir/prefix"` is valid). 509 | #[cfg(any(feature = "compression", feature = "compression-zip"))] 510 | pub fn zip_dir( 511 | origin_dir: impl AsRef, 512 | dest_dir: impl AsRef, 513 | with_root: Option>, 514 | ) -> Result<()> { 515 | crate::compression::zip_dir( 516 | Utf8Path::new(origin_dir.as_ref()), 517 | Utf8Path::new(dest_dir.as_ref()), 518 | with_root.as_ref().map(|p| p.as_ref()), 519 | ) 520 | } 521 | 522 | /// Extracts a .zip file to the a provided directory 523 | #[cfg(any(feature = "compression", feature = "compression-zip"))] 524 | pub fn unzip_all(zipfile: impl AsRef, dest_dir: impl AsRef) -> Result<()> { 525 | crate::compression::unzip_all( 526 | Utf8Path::new(zipfile.as_ref()), 527 | Utf8Path::new(dest_dir.as_ref()), 528 | ) 529 | } 530 | 531 | /// Extracts the file named `filename` within the ZIP file at `zipfile` and returns its contents as bytes 532 | #[cfg(any(feature = "compression", feature = "compression-zip"))] 533 | pub fn unzip_file(zipfile: impl AsRef, filename: &str) -> Result> { 534 | crate::compression::unzip_file(Utf8Path::new(zipfile.as_ref()), filename) 535 | } 536 | } 537 | 538 | /// Get the filename of a path, or a pretty error 539 | pub fn filename(origin_path: &Utf8Path) -> Result { 540 | if let Some(filename) = origin_path.file_name() { 541 | Ok(filename.to_string()) 542 | } else { 543 | Err(AxoassetError::LocalAssetMissingFilename { 544 | origin_path: origin_path.to_string(), 545 | }) 546 | } 547 | } 548 | -------------------------------------------------------------------------------- /src/remote.rs: -------------------------------------------------------------------------------- 1 | //! Remote HTTP operations 2 | 3 | use camino::{Utf8Path, Utf8PathBuf}; 4 | use std::fs; 5 | 6 | use crate::{error::*, SourceFile}; 7 | 8 | /// An unparsed Url (borrowed) 9 | pub type UrlStr = str; 10 | /// An unparsed Url (owned) 11 | pub type UrlString = String; 12 | 13 | /// A client for http file requests 14 | /// 15 | /// Note that you can and should freely Clone this, as the Client (and its 16 | /// underlying request pool) will be shared between the Clones. 17 | #[derive(Debug, Clone)] 18 | pub struct AxoClient { 19 | client: reqwest::Client, 20 | } 21 | 22 | impl AxoClient { 23 | /// Create an AxoClient with the given reqwest::Client 24 | pub fn with_reqwest(client: reqwest::Client) -> Self { 25 | Self { client } 26 | } 27 | 28 | /// Loads an asset from a URL and returns a [`RemoteAsset`][] containing its body 29 | pub async fn load_asset(&self, url: &UrlStr) -> Result { 30 | let response = self.get(url).await?; 31 | let filename = filename(url, response.headers())?; 32 | let bytes = response 33 | .bytes() 34 | .await 35 | .map_err(wrap_reqwest_err(url))? 36 | .to_vec(); 37 | Ok(RemoteAsset { 38 | url: url.to_string(), 39 | contents: bytes, 40 | filename, 41 | }) 42 | } 43 | 44 | /// GETs the URL and returns a [`crate::SourceFile`][] containing its body 45 | pub async fn load_source(&self, url: &UrlStr) -> Result { 46 | let text = self.load_string(url).await?; 47 | Ok(SourceFile::new(url, text)) 48 | } 49 | 50 | /// GETs the URL and returns its body as a `String` 51 | pub async fn load_string(&self, url: &UrlStr) -> Result { 52 | let response = self.get(url).await?; 53 | let text = response.text().await.map_err(wrap_reqwest_err(url))?; 54 | Ok(text) 55 | } 56 | 57 | /// GETs the URL and returns its body as a `Vec` 58 | pub async fn load_bytes(&self, url: &UrlStr) -> Result> { 59 | let response = self.get(url).await?; 60 | let bytes = response 61 | .bytes() 62 | .await 63 | .map_err(wrap_reqwest_err(url))? 64 | .to_vec(); 65 | Ok(bytes) 66 | } 67 | 68 | /// GETs the URL and write its bytes to the given local file 69 | pub async fn load_and_write_to_file( 70 | &self, 71 | url: &UrlStr, 72 | dest_file: impl AsRef, 73 | ) -> Result<()> { 74 | let asset = self.load_asset(url).await?; 75 | asset.write_to_file(dest_file).await 76 | } 77 | 78 | /// GETs the URL and write its bytes to the given local dir 79 | /// 80 | /// The filename used will be computed from the url/mime, and the resulting 81 | /// filepath will be returned. 82 | pub async fn load_and_write_to_dir( 83 | &self, 84 | url: &UrlStr, 85 | dest_dir: impl AsRef, 86 | ) -> Result { 87 | let asset = self.load_asset(url).await?; 88 | asset.write_to_dir(dest_dir).await 89 | } 90 | 91 | /// GETs the URL and returns the raw [`reqwest::Response`][] 92 | pub async fn get(&self, url: &UrlStr) -> Result { 93 | self.client 94 | .get(url) 95 | .send() 96 | .await 97 | .map_err(wrap_reqwest_err(url)) 98 | } 99 | 100 | /// HEADs the URL and returns the raw [`reqwest::Response`][] 101 | pub async fn head(&self, url: &UrlStr) -> Result { 102 | self.client 103 | .head(url) 104 | .send() 105 | .await 106 | .map_err(wrap_reqwest_err(url)) 107 | } 108 | } 109 | 110 | fn wrap_reqwest_err(url: &UrlStr) -> impl FnOnce(reqwest::Error) -> AxoassetError + '_ { 111 | |details| AxoassetError::RemoteAssetRequestFailed { 112 | origin_path: url.to_string(), 113 | details, 114 | } 115 | } 116 | 117 | /// A remote asset is an asset that is fetched over the network. 118 | #[derive(Debug)] 119 | pub struct RemoteAsset { 120 | /// A string containing a valid filename and extension. The filename is 121 | /// determined by the origin path and the content-type headers from the 122 | /// server response. 123 | filename: String, 124 | /// A string containing a http or https URL pointing to the asset. This does 125 | /// not need to be `https://origin.com/myfile.ext` as filename is determined by 126 | /// content-type headers in the server response. 127 | url: UrlString, 128 | /// The contents of the asset as a vector of bytes 129 | contents: Vec, 130 | } 131 | 132 | impl RemoteAsset { 133 | /// Gets the filename of the RemoteAsset 134 | /// 135 | /// Filename may be computed based on things like mimetypes, and does not necessarily 136 | /// reflect the raw URL's paths. 137 | pub fn filename(&self) -> &str { 138 | &self.filename 139 | } 140 | 141 | /// Gets the origin_path of the RemoteAsset (this is an alias for `url`) 142 | pub fn origin_path(&self) -> &str { 143 | &self.url 144 | } 145 | 146 | /// Gets the url of the RemoteAsset 147 | pub fn url(&self) -> &str { 148 | &self.url 149 | } 150 | 151 | /// Gets the bytes of the RemoteAsset 152 | pub fn as_bytes(&self) -> &[u8] { 153 | &self.contents 154 | } 155 | 156 | /// Gets the bytes of the RemoteAsset by-value 157 | pub fn into_bytes(self) -> Vec { 158 | self.contents 159 | } 160 | 161 | /// Writes an RemoteAsset's bytes to the given local directory 162 | /// 163 | /// The filename used will be `RemoteAsset::filename`, and the resulting file 164 | /// path will be returned. 165 | pub async fn write_to_dir(&self, dest_dir: impl AsRef) -> Result { 166 | let dest_path = dest_dir.as_ref().join(&self.filename); 167 | self.write_to_file(&dest_path).await?; 168 | Ok(dest_path) 169 | } 170 | 171 | /// Writes the RemoteAsset's bytes to the given local filepath 172 | /// 173 | /// Note that unlike [`RemoteAsset::write_to_dir`][] this will ignore 174 | /// the computed `RemoteAsset::filename`, preferring the one given here. 175 | pub async fn write_to_file(&self, dest_file: impl AsRef) -> Result<()> { 176 | let dest_path = dest_file.as_ref(); 177 | fs::write(dest_path, &self.contents).map_err(|details| { 178 | AxoassetError::RemoteAssetWriteFailed { 179 | origin_url: self.url.clone(), 180 | dest_path: dest_path.to_owned(), 181 | details, 182 | } 183 | }) 184 | } 185 | } 186 | 187 | fn mimetype(headers: &reqwest::header::HeaderMap, origin_url: &UrlStr) -> Result { 188 | match headers.get(reqwest::header::CONTENT_TYPE) { 189 | Some(content_type) => { 190 | let mtype: mime::Mime = content_type 191 | .to_str() 192 | .map_err(|details| AxoassetError::HeaderParse { 193 | origin_path: origin_url.to_string(), 194 | details, 195 | })? 196 | .parse() 197 | .map_err(|details| AxoassetError::MimeParse { 198 | origin_path: origin_url.to_string(), 199 | details, 200 | })?; 201 | match mtype.type_() { 202 | mime::IMAGE => Ok(mtype), 203 | mime::TEXT => Ok(mtype), 204 | _ => Err(AxoassetError::RemoteAssetNonImageMimeType { 205 | origin_path: origin_url.to_string(), 206 | }), 207 | } 208 | } 209 | None => Err(AxoassetError::RemoteAssetMissingContentTypeHeader { 210 | origin_path: origin_url.to_string(), 211 | }), 212 | } 213 | } 214 | 215 | fn extension(mimetype: mime::Mime, origin_path: &UrlStr) -> Option { 216 | match mimetype.type_() { 217 | mime::IMAGE => image_extension(mimetype, origin_path).ok(), 218 | mime::TEXT => text_extension(mimetype, origin_path).ok(), 219 | _ => None, 220 | } 221 | } 222 | 223 | fn text_extension(mimetype: mime::Mime, origin_path: &UrlStr) -> Result { 224 | if let Some(extension) = mimetype.suffix() { 225 | Ok(extension.to_string()) 226 | } else { 227 | match mimetype.subtype() { 228 | mime::PLAIN => Ok("txt".to_string()), 229 | mime::CSS => Ok("css".to_string()), 230 | _ => Err(AxoassetError::RemoteAssetMimeTypeNotSupported { 231 | origin_path: origin_path.to_string(), 232 | mimetype: mimetype.to_string(), 233 | }), 234 | } 235 | } 236 | } 237 | 238 | fn image_extension(mimetype: mime::Mime, origin_path: &UrlStr) -> Result { 239 | if let Some(img_format) = image::ImageFormat::from_mime_type(&mimetype) { 240 | let extensions = img_format.extensions_str(); 241 | if !extensions.is_empty() { 242 | Ok(extensions[0].to_string()) 243 | } else { 244 | Err( 245 | AxoassetError::RemoteAssetIndeterminateImageFormatExtension { 246 | origin_path: origin_path.to_string(), 247 | }, 248 | ) 249 | } 250 | } else { 251 | Err(AxoassetError::RemoteAssetMimeTypeNotSupported { 252 | origin_path: origin_path.to_string(), 253 | mimetype: mimetype.to_string(), 254 | }) 255 | } 256 | } 257 | 258 | // FIXME: https://github.com/axodotdev/axoasset/issues/6 259 | // FIXME: https://github.com/axodotdev/axoasset/issues/9 260 | /// Currently, this function will take an asset's origin path, and attempt 261 | /// to identify if the final segment of the URL is a filename. 262 | /// 263 | /// If it does not find a filename it will drop the host from the origin 264 | /// url, slugify the set of the path, and then add an extension based on the 265 | /// Mime type in the associated response headers. 266 | /// 267 | /// A large portion of the origin path is preserved in the filename to help 268 | /// avoid name conflicts, but this is a half measure at best and leaves a 269 | /// lot of room for improvement. 270 | pub fn filename(origin_url: &UrlStr, headers: &reqwest::header::HeaderMap) -> Result { 271 | let mut filestem = url::Url::parse(origin_url) 272 | .map_err(|details| AxoassetError::UrlParse { 273 | origin_path: origin_url.to_owned(), 274 | details, 275 | })? 276 | .path() 277 | .to_string() 278 | .replace('/', "_"); 279 | filestem.remove(0); 280 | if filestem.contains('.') { 281 | Ok(filestem) 282 | } else if let Ok(mimetype) = mimetype(headers, origin_url) { 283 | if let Some(extension) = extension(mimetype, origin_url) { 284 | Ok(format!("{filestem}.{extension}")) 285 | } else { 286 | Ok(filestem) 287 | } 288 | } else { 289 | Ok(filestem) 290 | } 291 | } 292 | -------------------------------------------------------------------------------- /src/source.rs: -------------------------------------------------------------------------------- 1 | //! Support for parsing text with richer spanned errors 2 | 3 | use std::fmt::Debug; 4 | use std::sync::Arc; 5 | 6 | use camino::Utf8Path; 7 | use miette::{MietteSpanContents, SourceCode, SourceSpan}; 8 | 9 | use crate::{error::*, LocalAsset}; 10 | 11 | #[cfg(feature = "toml-edit")] 12 | use crate::toml_edit::DocumentMut; 13 | 14 | #[cfg(feature = "json-serde")] 15 | use crate::serde_json; 16 | 17 | #[cfg(feature = "yaml-serde")] 18 | use crate::serde_yml; 19 | 20 | /// The inner contents of a [`SourceFile`][]. 21 | #[derive(Eq, PartialEq)] 22 | struct SourceFileInner { 23 | /// "Name" of the file 24 | filename: String, 25 | /// Origin path of the file 26 | origin_path: String, 27 | /// Contents of the file 28 | contents: String, 29 | } 30 | 31 | /// A file's contents along with its display name 32 | /// 33 | /// This is used for reporting rustc-style diagnostics where we show 34 | /// where in the file we found a problem. It contains an Arc so that 35 | /// it's ~free for everything to pass/copy these around and produce 36 | /// better diagnostics. 37 | #[derive(Clone, Eq, PartialEq)] 38 | pub struct SourceFile { 39 | /// The actual impl 40 | inner: Arc, 41 | } 42 | 43 | impl SourceFile { 44 | /// Create an empty SourceFile with the given name. 45 | /// 46 | /// See [`SourceFile::new`][] for details. 47 | pub fn new_empty(origin_path: &str) -> Self { 48 | Self::new(origin_path, String::new()) 49 | } 50 | 51 | /// Create a new source file with the given name and contents. 52 | /// 53 | /// This is intended for situations where you have the contents already 54 | /// and just want a SourceFile to manage it. This is appropriate for 55 | /// strings that were constructed dynamically or for tests. 56 | /// 57 | /// The origin_path will be used as the filename as well. 58 | pub fn new(origin_path: &str, contents: String) -> Self { 59 | SourceFile { 60 | inner: Arc::new(SourceFileInner { 61 | filename: origin_path.to_owned(), 62 | origin_path: origin_path.to_owned(), 63 | contents, 64 | }), 65 | } 66 | } 67 | 68 | /// SourceFile equivalent of [`LocalAsset::load_asset`][] 69 | pub fn load_local(origin_path: impl AsRef) -> Result { 70 | let origin_path = origin_path.as_ref(); 71 | let contents = LocalAsset::load_string(origin_path)?; 72 | Ok(SourceFile { 73 | inner: Arc::new(SourceFileInner { 74 | filename: crate::local::filename(origin_path)?, 75 | origin_path: origin_path.to_string(), 76 | contents, 77 | }), 78 | }) 79 | } 80 | 81 | /// Try to deserialize the contents of the SourceFile as json 82 | #[cfg(feature = "json-serde")] 83 | pub fn deserialize_json<'a, T: serde::Deserialize<'a>>(&'a self) -> Result { 84 | // Although many JSON parsers support JSON that begins with a BOM, 85 | // json-serde doesn't: 86 | // https://github.com/serde-rs/json/issues/1115 87 | // In UTF-8, \uFEFF (0xEF 0xBB 0xBF) is always the BOM; it's not 88 | // variable like in UTF-16. Since the string is already UTF-8 here, 89 | // stripping the BOM is pretty simple. 90 | let mut contents = self.contents(); 91 | if let Some(stripped) = contents.strip_prefix('\u{FEFF}') { 92 | contents = stripped; 93 | } 94 | 95 | let json = serde_json::from_str(contents).map_err(|details| { 96 | let span = self.span_for_line_col(details.line(), details.column()); 97 | AxoassetError::Json { 98 | source: self.clone(), 99 | span, 100 | details, 101 | } 102 | })?; 103 | Ok(json) 104 | } 105 | 106 | /// Try to deserialize the contents of the SourceFile as toml 107 | #[cfg(feature = "toml-serde")] 108 | pub fn deserialize_toml<'a, T: for<'de> serde::Deserialize<'de>>(&'a self) -> Result { 109 | let toml = toml::from_str(self.contents()).map_err(|details| { 110 | let span = details.span().map(SourceSpan::from); 111 | AxoassetError::Toml { 112 | source: self.clone(), 113 | span, 114 | details, 115 | } 116 | })?; 117 | Ok(toml) 118 | } 119 | 120 | /// Try to deserialize the contents of the SourceFile as a toml_edit Document 121 | #[cfg(feature = "toml-edit")] 122 | pub fn deserialize_toml_edit(&self) -> Result { 123 | let toml = self.contents().parse::().map_err(|details| { 124 | let span = details.span().map(SourceSpan::from); 125 | AxoassetError::TomlEdit { 126 | source: self.clone(), 127 | span, 128 | details, 129 | } 130 | })?; 131 | Ok(toml) 132 | } 133 | 134 | /// Try to deserialize the contents of the SourceFile as yaml 135 | #[cfg(feature = "yaml-serde")] 136 | pub fn deserialize_yaml<'a, T: for<'de> serde::Deserialize<'de>>(&self) -> Result { 137 | let yaml = serde_yml::from_str(self.contents()).map_err(|details| { 138 | let span = details 139 | .location() 140 | .and_then(|location| self.span_for_line_col(location.line(), location.column())); 141 | AxoassetError::Yaml { 142 | source: self.clone(), 143 | span, 144 | details, 145 | } 146 | })?; 147 | Ok(yaml) 148 | } 149 | 150 | /// Get the filename of a SourceFile 151 | pub fn filename(&self) -> &str { 152 | &self.inner.filename 153 | } 154 | 155 | /// Get the origin_path of a SourceFile 156 | pub fn origin_path(&self) -> &str { 157 | &self.inner.origin_path 158 | } 159 | 160 | /// Get the contents of a SourceFile 161 | pub fn as_str(&self) -> &str { 162 | &self.inner.contents 163 | } 164 | 165 | /// Get the contents of a SourceFile (alias for as_str) 166 | pub fn contents(&self) -> &str { 167 | &self.inner.contents 168 | } 169 | 170 | /// Gets a proper [`SourceSpan`] from a line-and-column representation 171 | /// 172 | /// Both values are 1's based, so `(1, 1)` is the start of the file. 173 | /// If anything underflows/overflows or goes out of bounds then we'll 174 | /// just return `None`. `unwrap_or_default()` will give you the empty span from that. 175 | /// 176 | /// This is a pretty heavy-weight process, we have to basically linearly scan the source 177 | /// for this position! 178 | pub fn span_for_line_col(&self, line: usize, col: usize) -> Option { 179 | let src = self.contents(); 180 | let src_line = src.lines().nth(line.checked_sub(1)?)?; 181 | if col > src_line.len() { 182 | return None; 183 | } 184 | let src_addr = src.as_ptr() as usize; 185 | let line_addr = src_line.as_ptr() as usize; 186 | let line_offset = line_addr.checked_sub(src_addr)?; 187 | let start = line_offset.checked_add(col)?.checked_sub(1)?; 188 | let end = start.checked_add(1)?; 189 | if start > end || end > src.len() { 190 | return None; 191 | } 192 | Some(SourceSpan::from(start..end)) 193 | } 194 | 195 | /// Creates a span for an item using a substring of `contents` 196 | /// 197 | /// Note that substr must be a literal substring, as in it must be 198 | /// a pointer into the same string! If it's not we'll return None. 199 | pub fn span_for_substr(&self, substr: &str) -> Option { 200 | // Get the bounds of the full string 201 | let base_addr = self.inner.contents.as_ptr() as usize; 202 | let base_len = self.inner.contents.len(); 203 | 204 | // Get the bounds of the substring 205 | let substr_addr = substr.as_ptr() as usize; 206 | let substr_len = substr.len(); 207 | 208 | // The index of the substring is just the number of bytes it is from the start 209 | // (This will bail out if the """substring""" has an address *before* the full string) 210 | let start = substr_addr.checked_sub(base_addr)?; 211 | // The end index (exclusive) is just the start index + sublen 212 | // (This will bail out if this overflows) 213 | let end = start.checked_add(substr_len)?; 214 | // Finally, make sure the substr endpoint isn't past the end of the full string 215 | if end > base_len { 216 | return None; 217 | } 218 | 219 | // At this point it's definitely a substring, nice! 220 | Some(SourceSpan::from(start..end)) 221 | } 222 | } 223 | 224 | impl SourceCode for SourceFile { 225 | fn read_span<'a>( 226 | &'a self, 227 | span: &SourceSpan, 228 | context_lines_before: usize, 229 | context_lines_after: usize, 230 | ) -> std::result::Result + 'a>, miette::MietteError> { 231 | let contents = 232 | self.contents() 233 | .read_span(span, context_lines_before, context_lines_after)?; 234 | Ok(Box::new(MietteSpanContents::new_named( 235 | self.origin_path().to_owned(), 236 | contents.data(), 237 | *contents.span(), 238 | contents.line(), 239 | contents.column(), 240 | contents.line_count(), 241 | ))) 242 | } 243 | } 244 | 245 | impl Debug for SourceFile { 246 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 247 | f.debug_struct("SourceFile") 248 | .field("origin_path", &self.origin_path()) 249 | .field("contents", &self.contents()) 250 | .finish() 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /src/spanned.rs: -------------------------------------------------------------------------------- 1 | //! Values with text Spans, for use with serde and miette 2 | 3 | use std::{ 4 | borrow::Borrow, 5 | cmp::Ordering, 6 | fmt::{self, Display}, 7 | hash::{Hash, Hasher}, 8 | ops::{Deref, DerefMut}, 9 | }; 10 | 11 | use miette::SourceSpan; 12 | #[cfg(feature = "toml-serde")] 13 | use serde::{de, ser}; 14 | 15 | /// A spanned value, indicating the range at which it is defined in the source. 16 | #[derive(Clone, Default)] 17 | pub struct Spanned { 18 | start: usize, 19 | end: usize, 20 | value: T, 21 | } 22 | 23 | impl Spanned { 24 | /// Create a Spanned with a specific SourceSpan. 25 | pub fn with_source_span(value: T, source: SourceSpan) -> Self { 26 | Spanned { 27 | start: source.offset(), 28 | end: source.offset() + source.len(), 29 | value, 30 | } 31 | } 32 | 33 | /// Access the start of the span of the contained value. 34 | pub fn start(this: &Self) -> usize { 35 | this.start 36 | } 37 | 38 | /// Access the end of the span of the contained value. 39 | pub fn end(this: &Self) -> usize { 40 | this.end 41 | } 42 | 43 | /// Update the span 44 | pub fn update_span(this: &mut Self, start: usize, end: usize) { 45 | this.start = start; 46 | this.end = end; 47 | } 48 | 49 | /// Alter a span to a length anchored from the end. 50 | pub fn from_end(mut this: Self, length: usize) -> Self { 51 | this.start = this.end - length; 52 | this 53 | } 54 | 55 | /// Get the span of the contained value. 56 | pub fn span(this: &Self) -> SourceSpan { 57 | (Self::start(this)..Self::end(this)).into() 58 | } 59 | 60 | /// Consumes the spanned value and returns the contained value. 61 | pub fn into_inner(this: Self) -> T { 62 | this.value 63 | } 64 | } 65 | 66 | impl IntoIterator for Spanned 67 | where 68 | T: IntoIterator, 69 | { 70 | type IntoIter = T::IntoIter; 71 | type Item = T::Item; 72 | fn into_iter(self) -> Self::IntoIter { 73 | self.value.into_iter() 74 | } 75 | } 76 | 77 | impl<'a, T> IntoIterator for &'a Spanned 78 | where 79 | &'a T: IntoIterator, 80 | { 81 | type IntoIter = <&'a T as IntoIterator>::IntoIter; 82 | type Item = <&'a T as IntoIterator>::Item; 83 | fn into_iter(self) -> Self::IntoIter { 84 | self.value.into_iter() 85 | } 86 | } 87 | 88 | impl<'a, T> IntoIterator for &'a mut Spanned 89 | where 90 | &'a mut T: IntoIterator, 91 | { 92 | type IntoIter = <&'a mut T as IntoIterator>::IntoIter; 93 | type Item = <&'a mut T as IntoIterator>::Item; 94 | fn into_iter(self) -> Self::IntoIter { 95 | self.value.into_iter() 96 | } 97 | } 98 | 99 | impl fmt::Debug for Spanned 100 | where 101 | T: fmt::Debug, 102 | { 103 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 104 | self.value.fmt(f) 105 | } 106 | } 107 | 108 | impl Display for Spanned 109 | where 110 | T: Display, 111 | { 112 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 113 | self.value.fmt(f) 114 | } 115 | } 116 | 117 | impl Deref for Spanned { 118 | type Target = T; 119 | fn deref(&self) -> &Self::Target { 120 | &self.value 121 | } 122 | } 123 | 124 | impl DerefMut for Spanned { 125 | fn deref_mut(&mut self) -> &mut Self::Target { 126 | &mut self.value 127 | } 128 | } 129 | 130 | impl Borrow for Spanned { 131 | fn borrow(&self) -> &str { 132 | self 133 | } 134 | } 135 | 136 | impl Borrow for Spanned { 137 | fn borrow(&self) -> &T { 138 | self 139 | } 140 | } 141 | 142 | impl AsRef for Spanned 143 | where 144 | T: AsRef, 145 | { 146 | fn as_ref(&self) -> &U { 147 | self.value.as_ref() 148 | } 149 | } 150 | 151 | impl PartialEq for Spanned { 152 | fn eq(&self, other: &Self) -> bool { 153 | self.value.eq(&other.value) 154 | } 155 | } 156 | 157 | impl> PartialEq for Spanned { 158 | fn eq(&self, other: &T) -> bool { 159 | self.value.eq(other) 160 | } 161 | } 162 | 163 | impl Eq for Spanned {} 164 | 165 | impl Hash for Spanned { 166 | fn hash(&self, state: &mut H) { 167 | self.value.hash(state); 168 | } 169 | } 170 | 171 | impl PartialOrd for Spanned { 172 | fn partial_cmp(&self, other: &Self) -> Option { 173 | self.value.partial_cmp(&other.value) 174 | } 175 | } 176 | 177 | impl> PartialOrd for Spanned { 178 | fn partial_cmp(&self, other: &T) -> Option { 179 | self.value.partial_cmp(other) 180 | } 181 | } 182 | 183 | impl Ord for Spanned { 184 | fn cmp(&self, other: &Self) -> Ordering { 185 | self.value.cmp(&other.value) 186 | } 187 | } 188 | 189 | impl From for Spanned { 190 | fn from(value: T) -> Self { 191 | Self { 192 | start: 0, 193 | end: 0, 194 | value, 195 | } 196 | } 197 | } 198 | 199 | #[cfg(feature = "toml-serde")] 200 | impl From> for Spanned { 201 | fn from(value: toml::Spanned) -> Self { 202 | let span = value.span(); 203 | Self { 204 | start: span.start, 205 | end: span.end, 206 | value: value.into_inner(), 207 | } 208 | } 209 | } 210 | 211 | #[cfg(feature = "toml-serde")] 212 | impl<'de, T: de::Deserialize<'de>> de::Deserialize<'de> for Spanned { 213 | fn deserialize(deserializer: D) -> Result 214 | where 215 | D: de::Deserializer<'de>, 216 | { 217 | Ok(toml::Spanned::::deserialize(deserializer)?.into()) 218 | } 219 | } 220 | 221 | #[cfg(feature = "toml-serde")] 222 | impl ser::Serialize for Spanned { 223 | fn serialize(&self, serializer: S) -> Result 224 | where 225 | S: ser::Serializer, 226 | { 227 | self.value.serialize(serializer) 228 | } 229 | } 230 | -------------------------------------------------------------------------------- /tests/assets/README.md: -------------------------------------------------------------------------------- 1 | # axoasset 2 | 3 | [![Github Actions Rust](https://github.com/axodotdev/axoasset/actions/workflows/rust.yml/badge.svg)](https://github.com/axodotdev/axoasset/actions) 4 | [![crates.io](https://img.shields.io/crates/v/axoasset.svg)](https://crates.io/crates/axoasset) 5 | [![License: MPL 2.0](https://img.shields.io/badge/License-MPL_2.0-brightgreen.svg)](https://opensource.org/licenses/MPL-2.0) 6 | 7 | This library offers `read`, `write`, and `copy` functions, for local and remote 8 | assets given a string that contains a relative or absolute local path or a 9 | remote address using http or https. 10 | 11 | 12 | ## Example 13 | 14 | ```rust 15 | use axoasset; 16 | 17 | let assets = vec!("https://my.co/logo.png", "./profile.jpg", "README.md"); 18 | let dest = "public"; 19 | 20 | for asset in assets { 21 | axoasset::copy(asset, "site assets", dest)?; 22 | } 23 | ``` 24 | 25 | ## License 26 | 27 | This software is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. 28 | 29 | Copyright 2022 Axo Developer Co. 30 | -------------------------------------------------------------------------------- /tests/assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/axodotdev/axoasset/3fbe4a7cf454775dd218a0dc799aa996e1f3972f/tests/assets/logo.png -------------------------------------------------------------------------------- /tests/assets/styles.css: -------------------------------------------------------------------------------- 1 | @import "@axodotdev/fringe/src/assets/main.css"; 2 | 3 | #__nuxt { 4 | @apply h-full; 5 | } 6 | -------------------------------------------------------------------------------- /tests/common.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "remote")] 2 | pub fn client() -> axoasset::AxoClient { 3 | axoasset::AxoClient::with_reqwest(reqwest::ClientBuilder::new().build().unwrap()) 4 | } 5 | -------------------------------------------------------------------------------- /tests/local_copy.rs: -------------------------------------------------------------------------------- 1 | #![allow(irrefutable_let_patterns)] 2 | 3 | use std::collections::HashMap; 4 | 5 | use assert_fs::prelude::*; 6 | use axoasset::LocalAsset; 7 | use camino::Utf8Path; 8 | 9 | #[tokio::test] 10 | async fn it_copies_local_assets() { 11 | let origin = assert_fs::TempDir::new().unwrap(); 12 | let dest = assert_fs::TempDir::new().unwrap(); 13 | let dest_dir = Utf8Path::from_path(dest.path()).unwrap(); 14 | 15 | let mut files = HashMap::new(); 16 | files.insert("README.md", "# axoasset"); 17 | files.insert("styles.css", "@import"); 18 | 19 | for (file, contents) in files { 20 | let asset = origin.child(file); 21 | asset.write_str(contents).unwrap(); 22 | 23 | LocalAsset::copy_file_to_dir(asset.to_str().unwrap(), dest.to_str().unwrap()).unwrap(); 24 | 25 | let copied_file = dest_dir.join(file); 26 | assert!(copied_file.exists()); 27 | let loaded_asset = LocalAsset::load_asset(copied_file.as_str()).unwrap(); 28 | assert!(std::str::from_utf8(loaded_asset.as_bytes()) 29 | .unwrap() 30 | .contains(contents)); 31 | } 32 | } 33 | 34 | #[tokio::test] 35 | async fn it_copies_named_local_assets() { 36 | let origin = assert_fs::TempDir::new().unwrap(); 37 | let dest = assert_fs::TempDir::new().unwrap(); 38 | let dest_dir = Utf8Path::from_path(dest.path()).unwrap(); 39 | 40 | let mut files = HashMap::new(); 41 | files.insert("README.md", "# axoasset"); 42 | files.insert("styles.css", "@import"); 43 | 44 | for (file, contents) in files { 45 | let asset = origin.child(file); 46 | asset.write_str(contents).unwrap(); 47 | 48 | let origin_path = asset.to_str().unwrap(); 49 | axoasset::LocalAsset::copy_file_to_file(origin_path, dest_dir.join(file)).unwrap(); 50 | 51 | let copied_file = dest_dir.join(file); 52 | assert!(copied_file.exists()); 53 | let loaded_asset = axoasset::LocalAsset::load_asset(copied_file).unwrap(); 54 | assert!(std::str::from_utf8(loaded_asset.as_bytes()) 55 | .unwrap() 56 | .contains(contents)); 57 | } 58 | } 59 | 60 | #[tokio::test] 61 | async fn it_copies_dirs() { 62 | let origin = assert_fs::TempDir::new().unwrap().child("result"); 63 | let dest = assert_fs::TempDir::new().unwrap(); 64 | let origin_dir = Utf8Path::from_path(origin.path()).unwrap(); 65 | let dest_dir = Utf8Path::from_path(dest.path()).unwrap(); 66 | origin.create_dir_all().unwrap(); 67 | 68 | // None means it's just a dir, used to make sure empty dirs get copied 69 | let mut files = HashMap::new(); 70 | files.insert("blah/blargh/README3.md", Some("# axoasset3")); 71 | files.insert("blah/README2.md", Some("# axoasset2")); 72 | files.insert("blah/README.md", Some("# axoasset")); 73 | files.insert("styles.css", Some("@import")); 74 | files.insert("blah/blargh/empty_dir", None); 75 | files.insert("empty/dirs", None); 76 | files.insert("root_empty", None); 77 | 78 | for (file, contents) in &files { 79 | let asset = origin.child(file); 80 | if let Some(contents) = contents { 81 | std::fs::create_dir_all(asset.parent().unwrap()).unwrap(); 82 | asset.write_str(contents).unwrap(); 83 | } else { 84 | asset.create_dir_all().unwrap(); 85 | } 86 | } 87 | 88 | axoasset::LocalAsset::copy_dir_to_parent_dir(origin_dir, dest_dir).unwrap(); 89 | 90 | for (file, contents) in &files { 91 | let copied_file = dest_dir.join("result").join(file); 92 | 93 | assert!(copied_file.exists()); 94 | if let Some(contents) = contents { 95 | let loaded_asset = axoasset::LocalAsset::load_asset(copied_file).unwrap(); 96 | assert!(std::str::from_utf8(loaded_asset.as_bytes()) 97 | .unwrap() 98 | .contains(contents)); 99 | } 100 | } 101 | } 102 | 103 | #[tokio::test] 104 | async fn it_copies_named_dirs() { 105 | let origin = assert_fs::TempDir::new().unwrap(); 106 | let dest = assert_fs::TempDir::new().unwrap(); 107 | let origin_dir = Utf8Path::from_path(origin.path()).unwrap(); 108 | let dest_dir = Utf8Path::from_path(dest.path()).unwrap().join("result"); 109 | 110 | // None means it's just a dir, used to make sure empty dirs get copied 111 | let mut files = HashMap::new(); 112 | files.insert("blah/blargh/README3.md", Some("# axoasset3")); 113 | files.insert("blah/README2.md", Some("# axoasset2")); 114 | files.insert("blah/README.md", Some("# axoasset")); 115 | files.insert("styles.css", Some("@import")); 116 | files.insert("blah/blargh/empty_dir", None); 117 | files.insert("empty/dirs", None); 118 | files.insert("root_empty", None); 119 | 120 | for (file, contents) in &files { 121 | let asset = origin.child(file); 122 | if let Some(contents) = contents { 123 | std::fs::create_dir_all(asset.parent().unwrap()).unwrap(); 124 | asset.write_str(contents).unwrap(); 125 | } else { 126 | asset.create_dir_all().unwrap(); 127 | } 128 | } 129 | 130 | axoasset::LocalAsset::copy_dir_to_dir(origin_dir, &dest_dir).unwrap(); 131 | 132 | for (file, contents) in &files { 133 | let copied_file = dest_dir.join(file); 134 | 135 | assert!(copied_file.exists()); 136 | if let Some(contents) = contents { 137 | let loaded_asset = axoasset::LocalAsset::load_asset(copied_file).unwrap(); 138 | assert!(std::str::from_utf8(loaded_asset.as_bytes()) 139 | .unwrap() 140 | .contains(contents)); 141 | } 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /tests/local_load.rs: -------------------------------------------------------------------------------- 1 | #![allow(irrefutable_let_patterns)] 2 | 3 | use std::collections::HashMap; 4 | use std::path::Path; 5 | 6 | use assert_fs::prelude::*; 7 | 8 | #[tokio::test] 9 | async fn it_loads_local_assets() { 10 | let origin = assert_fs::TempDir::new().unwrap(); 11 | 12 | let mut files = HashMap::new(); 13 | files.insert("README.md", "# axoasset"); 14 | files.insert("styles.css", "@import"); 15 | 16 | for (file, contents) in files { 17 | let asset = origin.child(file); 18 | let content = Path::new("./tests/assets").join(file); 19 | asset.write_file(&content).unwrap(); 20 | 21 | let origin_path = asset.to_str().unwrap(); 22 | let loaded_asset = axoasset::LocalAsset::load_asset(origin_path).unwrap(); 23 | assert!(std::str::from_utf8(loaded_asset.as_bytes()) 24 | .unwrap() 25 | .contains(contents)); 26 | } 27 | } 28 | 29 | #[tokio::test] 30 | async fn it_loads_local_assets_as_bytes() { 31 | let origin = assert_fs::TempDir::new().unwrap(); 32 | 33 | let mut files = HashMap::new(); 34 | files.insert("README.md", "# axoasset"); 35 | files.insert("styles.css", "@import"); 36 | 37 | for (file, contents) in files { 38 | let asset = origin.child(file); 39 | let content = Path::new("./tests/assets").join(file); 40 | asset.write_file(&content).unwrap(); 41 | 42 | let origin_path = asset.to_str().unwrap(); 43 | let loaded_bytes = axoasset::LocalAsset::load_bytes(origin_path).unwrap(); 44 | 45 | assert!(std::str::from_utf8(&loaded_bytes) 46 | .unwrap() 47 | .contains(contents)); 48 | } 49 | } 50 | 51 | #[tokio::test] 52 | async fn it_loads_local_assets_as_strings() { 53 | let origin = assert_fs::TempDir::new().unwrap(); 54 | 55 | let mut files = HashMap::new(); 56 | files.insert("README.md", "# axoasset"); 57 | files.insert("styles.css", "@import"); 58 | 59 | for (file, contents) in files { 60 | let asset = origin.child(file); 61 | let content = Path::new("./tests/assets").join(file); 62 | asset.write_file(&content).unwrap(); 63 | 64 | let origin_path = asset.to_str().unwrap(); 65 | let loaded_string = axoasset::LocalAsset::load_string(origin_path).unwrap(); 66 | 67 | assert!(loaded_string.contains(contents)) 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /tests/local_new.rs: -------------------------------------------------------------------------------- 1 | #![allow(irrefutable_let_patterns)] 2 | 3 | use std::collections::HashMap; 4 | use std::path::Path; 5 | 6 | #[tokio::test] 7 | async fn it_creates_new_assets() { 8 | let dest = assert_fs::TempDir::new().unwrap(); 9 | 10 | let mut files = HashMap::new(); 11 | files.insert("README.md", "# axoasset"); 12 | files.insert("styles.css", "@import"); 13 | 14 | for (file, contents) in files { 15 | let origin_path = Path::new("./tests/assets").join(file).display().to_string(); 16 | let dest_dir = Path::new(&dest.as_os_str()) 17 | .join(file) 18 | .display() 19 | .to_string(); 20 | axoasset::LocalAsset::new(&origin_path, contents.into()) 21 | .unwrap() 22 | .write_to_dir(dest.to_str().unwrap()) 23 | .unwrap(); 24 | 25 | let loaded_asset = axoasset::LocalAsset::load_asset(&dest_dir).unwrap(); 26 | 27 | assert!(std::str::from_utf8(loaded_asset.as_bytes()) 28 | .unwrap() 29 | .contains(contents)); 30 | } 31 | } 32 | 33 | #[test] 34 | fn it_creates_parent_directories() { 35 | let dest = assert_fs::TempDir::new().unwrap(); 36 | 37 | let dest_path = Path::new(&dest.as_os_str()) 38 | .join("subdir") 39 | .join("test.md") 40 | .display() 41 | .to_string(); 42 | axoasset::LocalAsset::write_new_all("file content", dest_path).unwrap(); 43 | 44 | assert!(Path::new(&dest.as_os_str()).join("subdir").exists()); 45 | } 46 | 47 | #[test] 48 | fn it_creates_a_new_directory() { 49 | let dest = assert_fs::TempDir::new().unwrap(); 50 | 51 | let dest_dir = Path::new(&dest.as_os_str()) 52 | .join("subdir") 53 | .display() 54 | .to_string(); 55 | axoasset::LocalAsset::create_dir(dest_dir).unwrap(); 56 | 57 | assert!(Path::new(&dest.as_os_str()).join("subdir").exists()); 58 | } 59 | -------------------------------------------------------------------------------- /tests/local_remove.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use std::path::Path; 3 | 4 | #[test] 5 | fn it_removes_both_file_and_directory() { 6 | let dest = assert_fs::TempDir::new().unwrap(); 7 | let file_path = Path::new(&dest.as_os_str()).join("subdir").join("test.md"); 8 | let dir_path = Path::new(&dest.as_os_str()).join("subdir"); 9 | 10 | fs::create_dir_all(file_path.parent().unwrap()).unwrap(); 11 | fs::write(&file_path, "hello").unwrap(); 12 | 13 | axoasset::LocalAsset::remove_file(file_path.display().to_string()).unwrap(); 14 | assert!(!file_path.exists()); 15 | 16 | axoasset::LocalAsset::remove_dir(dir_path.display().to_string()).unwrap(); 17 | assert!(!dir_path.exists()); 18 | } 19 | -------------------------------------------------------------------------------- /tests/local_write.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "image")] 2 | #![allow(irrefutable_let_patterns)] 3 | 4 | use std::collections::HashMap; 5 | use std::fs; 6 | use std::path::Path; 7 | 8 | use assert_fs::prelude::*; 9 | use image::ImageFormat; 10 | 11 | #[test] 12 | fn it_writes_a_new_file_from_string() { 13 | let dest = assert_fs::TempDir::new().unwrap(); 14 | let dest_file = Path::new(dest.to_str().unwrap()).join("contents.txt"); 15 | 16 | let contents = "CONTENTS"; 17 | axoasset::LocalAsset::write_new(contents, dest_file.to_str().unwrap()).unwrap(); 18 | assert!(dest_file.exists()); 19 | 20 | let loaded_contents = 21 | axoasset::LocalAsset::load_string(dest_file.display().to_string()).unwrap(); 22 | assert!(loaded_contents.contains(contents)); 23 | } 24 | 25 | #[tokio::test] 26 | async fn it_writes_local_assets() { 27 | let origin = assert_fs::TempDir::new().unwrap(); 28 | let dest = assert_fs::TempDir::new().unwrap(); 29 | let dest_dir = Path::new(dest.to_str().unwrap()); 30 | 31 | let mut files = HashMap::new(); 32 | files.insert("README.md", "# axoasset"); 33 | files.insert("styles.css", "@import"); 34 | files.insert("logo.png", ""); 35 | 36 | for (file, contents) in files { 37 | let asset = origin.child(file); 38 | let content = Path::new("./tests/assets").join(file); 39 | asset.write_file(&content).unwrap(); 40 | 41 | let origin_path = asset.to_str().unwrap(); 42 | let asset = axoasset::LocalAsset::load_asset(origin_path).unwrap(); 43 | 44 | asset.write_to_dir(dest.to_str().unwrap()).unwrap(); 45 | let written_file = dest_dir.join(file); 46 | assert!(written_file.exists()); 47 | if asset.origin_path().as_str().ends_with("png") { 48 | let format = ImageFormat::from_path(written_file).unwrap(); 49 | assert_eq!(format, ImageFormat::Png); 50 | } else { 51 | fs::read_to_string(written_file).unwrap().contains(contents); 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /tests/remote_copy.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "remote")] 2 | 3 | mod common; 4 | 5 | use std::fs; 6 | use std::path::Path; 7 | 8 | use wiremock::matchers::{method, path}; 9 | use wiremock::{Mock, MockServer, ResponseTemplate}; 10 | 11 | #[tokio::test] 12 | async fn it_copies_remote_assets() { 13 | let mock_server = MockServer::start().await; 14 | 15 | let dest = assert_fs::TempDir::new().unwrap(); 16 | let dest_dir = Path::new(dest.to_str().unwrap()); 17 | 18 | let routes = vec!["README.md", "styles.css"]; 19 | let readme_string = fs::read_to_string("./tests/assets/README.md").unwrap(); 20 | let styles_string = fs::read_to_string("./tests/assets/styles.css").unwrap(); 21 | 22 | for route in routes { 23 | let resp_string = if route.to_uppercase().contains("README") { 24 | &readme_string 25 | } else { 26 | &styles_string 27 | }; 28 | 29 | Mock::given(method("GET")) 30 | .and(path(route)) 31 | .respond_with(ResponseTemplate::new(200).set_body_string(resp_string)) 32 | .mount(&mock_server) 33 | .await; 34 | 35 | let origin_path = format!("http://{}/{}", mock_server.address(), route); 36 | let copied_filename = common::client() 37 | .load_and_write_to_dir(&origin_path, dest.to_str().unwrap()) 38 | .await 39 | .unwrap(); 40 | let copied_file = dest_dir.join(copied_filename); 41 | assert!(copied_file.exists()); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /tests/remote_load.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "remote")] 2 | 3 | use std::collections::HashMap; 4 | use std::fs; 5 | 6 | use wiremock::matchers::{method, path}; 7 | use wiremock::{Mock, MockServer, ResponseTemplate}; 8 | 9 | mod common; 10 | 11 | #[tokio::test] 12 | async fn it_loads_remote_assets() { 13 | let mock_server = MockServer::start().await; 14 | 15 | let mut assets = HashMap::new(); 16 | assets.insert("/README.md", "# axoasset"); 17 | assets.insert("/README", "# axoasset"); 18 | assets.insert("/styles.css", "@import"); 19 | assets.insert("/styles", "@import"); 20 | 21 | for (route, contents) in assets { 22 | let response = if route.contains("README") { 23 | let readme_bytes = fs::read("./tests/assets/README.md").unwrap(); 24 | ResponseTemplate::new(200) 25 | .set_body_bytes(readme_bytes) 26 | .insert_header("Content-Type", "text/plain+md") 27 | } else { 28 | let styles_bytes = fs::read("./tests/assets/styles.css").unwrap(); 29 | ResponseTemplate::new(200) 30 | .set_body_bytes(styles_bytes) 31 | .insert_header("Content-Type", "text/css") 32 | }; 33 | 34 | Mock::given(method("GET")) 35 | .and(path(route)) 36 | .respond_with(response) 37 | .mount(&mock_server) 38 | .await; 39 | 40 | let mut origin_path = format!("http://{}", mock_server.address()); 41 | origin_path.push_str(route); 42 | let asset = common::client().load_asset(&origin_path).await.unwrap(); 43 | 44 | assert!(std::str::from_utf8(asset.as_bytes()) 45 | .unwrap() 46 | .contains(contents)); 47 | } 48 | } 49 | 50 | #[tokio::test] 51 | async fn it_loads_remote_assets_as_bytes() { 52 | let mock_server = MockServer::start().await; 53 | 54 | let mut assets = HashMap::new(); 55 | assets.insert("/README.md", "# axoasset"); 56 | assets.insert("/README", "# axoasset"); 57 | assets.insert("/styles.css", "@import"); 58 | assets.insert("/styles", "@import"); 59 | 60 | for (route, contents) in assets { 61 | let response = if route.contains("README") { 62 | let readme_bytes = fs::read("./tests/assets/README.md").unwrap(); 63 | ResponseTemplate::new(200) 64 | .set_body_bytes(readme_bytes) 65 | .insert_header("Content-Type", "text/plain+md") 66 | } else { 67 | let styles_bytes = fs::read("./tests/assets/styles.css").unwrap(); 68 | ResponseTemplate::new(200) 69 | .set_body_bytes(styles_bytes) 70 | .insert_header("Content-Type", "text/css") 71 | }; 72 | 73 | Mock::given(method("GET")) 74 | .and(path(route)) 75 | .respond_with(response) 76 | .mount(&mock_server) 77 | .await; 78 | 79 | let mut origin_path = format!("http://{}", mock_server.address()); 80 | origin_path.push_str(route); 81 | let loaded_bytes = common::client().load_bytes(&origin_path).await.unwrap(); 82 | 83 | assert!(std::str::from_utf8(&loaded_bytes) 84 | .unwrap() 85 | .contains(contents)); 86 | } 87 | } 88 | 89 | #[tokio::test] 90 | async fn it_loads_remote_assets_as_string() { 91 | let mock_server = MockServer::start().await; 92 | 93 | let mut assets = HashMap::new(); 94 | assets.insert("/README.md", "# axoasset"); 95 | assets.insert("/README", "# axoasset"); 96 | assets.insert("/styles.css", "@import"); 97 | assets.insert("/styles", "@import"); 98 | 99 | for (route, contents) in assets { 100 | let response = if route.contains("README") { 101 | let readme_bytes = fs::read("./tests/assets/README.md").unwrap(); 102 | ResponseTemplate::new(200) 103 | .set_body_bytes(readme_bytes) 104 | .insert_header("Content-Type", "text/plain+md") 105 | } else { 106 | let styles_bytes = fs::read("./tests/assets/styles.css").unwrap(); 107 | ResponseTemplate::new(200) 108 | .set_body_bytes(styles_bytes) 109 | .insert_header("Content-Type", "text/css") 110 | }; 111 | 112 | Mock::given(method("GET")) 113 | .and(path(route)) 114 | .respond_with(response) 115 | .mount(&mock_server) 116 | .await; 117 | 118 | let mut origin_path = format!("http://{}", mock_server.address()); 119 | origin_path.push_str(route); 120 | let loaded_string = common::client().load_string(&origin_path).await.unwrap(); 121 | 122 | assert!(loaded_string.contains(contents)); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /tests/remote_write.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "remote")] 2 | 3 | use std::collections::HashMap; 4 | use std::fs; 5 | 6 | use wiremock::matchers::{method, path}; 7 | use wiremock::{Mock, MockServer, ResponseTemplate}; 8 | 9 | mod common; 10 | 11 | #[tokio::test] 12 | async fn it_writes_remote_assets() { 13 | let mock_server = MockServer::start().await; 14 | 15 | let dest = assert_fs::TempDir::new().unwrap(); 16 | 17 | let mut assets = HashMap::new(); 18 | assets.insert("/README.md", "# axoasset"); 19 | assets.insert("/README", "# axoasset"); 20 | assets.insert("/styles.css", "@import"); 21 | assets.insert("/styles", "@import"); 22 | 23 | for (route, contents) in assets { 24 | let response = if route.contains("README") { 25 | let readme_bytes = fs::read("./tests/assets/README.md").unwrap(); 26 | ResponseTemplate::new(200) 27 | .set_body_bytes(readme_bytes) 28 | .insert_header("Content-Type", "text/plain+md") 29 | } else { 30 | let styles_bytes = fs::read("./tests/assets/styles.css").unwrap(); 31 | ResponseTemplate::new(200) 32 | .set_body_bytes(styles_bytes) 33 | .insert_header("Content-Type", "text/css") 34 | }; 35 | 36 | Mock::given(method("GET")) 37 | .and(path(route)) 38 | .respond_with(response) 39 | .mount(&mock_server) 40 | .await; 41 | 42 | let mut origin_path = format!("http://{}", mock_server.address()); 43 | origin_path.push_str(route); 44 | let asset = common::client().load_asset(&origin_path).await.unwrap(); 45 | 46 | let dest = asset.write_to_dir(dest.to_str().unwrap()).await.unwrap(); 47 | assert!(dest.exists()); 48 | fs::read_to_string(dest).unwrap().contains(contents); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /tests/source.rs: -------------------------------------------------------------------------------- 1 | use miette::SourceCode; 2 | 3 | #[test] 4 | fn substr_span() { 5 | // Make the file 6 | let contents = String::from("hello !there!"); 7 | let source = axoasset::SourceFile::new("file.md", contents); 8 | 9 | // Do some random parsing operation 10 | let mut parse = source.contents().split('!'); 11 | let _ = parse.next(); 12 | let there = parse.next().unwrap(); 13 | 14 | // Get the span 15 | let there_span = source.span_for_substr(there).unwrap(); 16 | 17 | // Assert the span is correct 18 | let span_bytes = source.read_span(&there_span, 0, 0).unwrap().data(); 19 | assert_eq!(std::str::from_utf8(span_bytes).unwrap(), "there"); 20 | } 21 | 22 | #[test] 23 | fn substr_span_invalid() { 24 | // Make the file 25 | let contents = String::from("hello !there!"); 26 | let source = axoasset::SourceFile::new("file.md", contents); 27 | 28 | // Get the span for a non-substring (string literal isn't pointing into the String) 29 | let there_span = source.span_for_substr("there"); 30 | assert_eq!(there_span, None); 31 | } 32 | 33 | #[cfg(feature = "json-serde")] 34 | #[test] 35 | fn json_valid() { 36 | #[derive(serde::Deserialize, PartialEq, Eq, Debug)] 37 | struct MyType { 38 | hello: String, 39 | goodbye: bool, 40 | } 41 | 42 | // Make the file 43 | let contents = String::from(r##"{ "hello": "there", "goodbye": true }"##); 44 | let source = axoasset::SourceFile::new("file.js", contents); 45 | 46 | // Get the span for a non-substring (string literal isn't pointing into the String) 47 | let val = source.deserialize_json::().unwrap(); 48 | assert_eq!( 49 | val, 50 | MyType { 51 | hello: "there".to_string(), 52 | goodbye: true 53 | } 54 | ); 55 | } 56 | 57 | #[cfg(feature = "json-serde")] 58 | #[test] 59 | fn json_with_bom() { 60 | #[derive(serde::Deserialize, PartialEq, Eq, Debug)] 61 | struct MyType { 62 | hello: String, 63 | goodbye: bool, 64 | } 65 | 66 | // Make the file 67 | let contents = 68 | String::from("\u{FEFF}") + &String::from(r##"{ "hello": "there", "goodbye": true }"##); 69 | let source = axoasset::SourceFile::new("file.js", contents); 70 | 71 | // Get the span for a non-substring (string literal isn't pointing into the String) 72 | let val = source.deserialize_json::().unwrap(); 73 | assert_eq!( 74 | val, 75 | MyType { 76 | hello: "there".to_string(), 77 | goodbye: true 78 | } 79 | ); 80 | } 81 | 82 | #[cfg(feature = "json-serde")] 83 | #[test] 84 | fn json_invalid() { 85 | use axoasset::AxoassetError; 86 | 87 | #[derive(serde::Deserialize, PartialEq, Eq, Debug)] 88 | struct MyType { 89 | hello: String, 90 | goodbye: bool, 91 | } 92 | 93 | // Make the file 94 | let contents = String::from(r##"{ "hello": "there", "goodbye": true, }"##); 95 | let source = axoasset::SourceFile::new("file.js", contents); 96 | 97 | // Get the span for a non-substring (string literal isn't pointing into the String) 98 | let res = source.deserialize_json::(); 99 | assert!(res.is_err()); 100 | let Err(AxoassetError::Json { span: Some(_), .. }) = res else { 101 | panic!("span was invalid"); 102 | }; 103 | } 104 | 105 | #[cfg(feature = "toml-serde")] 106 | #[test] 107 | fn toml_valid() { 108 | #[derive(serde::Deserialize, PartialEq, Eq, Debug)] 109 | struct MyType { 110 | hello: String, 111 | goodbye: bool, 112 | } 113 | 114 | // Make the file 115 | let contents = String::from( 116 | r##" 117 | hello = "there" 118 | goodbye = true 119 | "##, 120 | ); 121 | let source = axoasset::SourceFile::new("file.toml", contents); 122 | 123 | // Get the span for a non-substring (string literal isn't pointing into the String) 124 | let val = source.deserialize_toml::().unwrap(); 125 | assert_eq!( 126 | val, 127 | MyType { 128 | hello: "there".to_string(), 129 | goodbye: true 130 | } 131 | ); 132 | } 133 | 134 | #[cfg(feature = "toml-serde")] 135 | #[test] 136 | fn toml_invalid() { 137 | use axoasset::AxoassetError; 138 | 139 | #[derive(serde::Deserialize, PartialEq, Eq, Debug)] 140 | struct MyType { 141 | hello: String, 142 | goodbye: bool, 143 | } 144 | 145 | // Make the file 146 | let contents = String::from( 147 | r##" 148 | hello = "there" 149 | goodbye = 150 | "##, 151 | ); 152 | let source = axoasset::SourceFile::new("file.toml", contents); 153 | 154 | // Get the span for a non-substring (string literal isn't pointing into the String) 155 | let res = source.deserialize_toml::(); 156 | assert!(res.is_err()); 157 | let Err(AxoassetError::Toml { span: Some(_), .. }) = res else { 158 | panic!("span was invalid"); 159 | }; 160 | } 161 | 162 | #[cfg(feature = "toml-edit")] 163 | #[test] 164 | fn toml_edit_valid() { 165 | // Make the file 166 | let contents = String::from( 167 | r##" 168 | hello = "there" 169 | goodbye = true 170 | "##, 171 | ); 172 | let source = axoasset::SourceFile::new("file.toml", contents); 173 | 174 | // Get the span for a non-substring (string literal isn't pointing into the String) 175 | let val = source.deserialize_toml_edit().unwrap(); 176 | assert_eq!(val["hello"].as_str().unwrap(), "there"); 177 | assert_eq!(val["goodbye"].as_bool().unwrap(), true); 178 | } 179 | 180 | #[cfg(feature = "toml-edit")] 181 | #[test] 182 | fn toml_edit_invalid() { 183 | use axoasset::AxoassetError; 184 | 185 | // Make the file 186 | let contents = String::from( 187 | r##" 188 | hello = "there" 189 | goodbye = 190 | "##, 191 | ); 192 | let source = axoasset::SourceFile::new("file.toml", contents); 193 | 194 | // Get the span for a non-substring (string literal isn't pointing into the String) 195 | let res = source.deserialize_toml_edit(); 196 | assert!(res.is_err()); 197 | let Err(AxoassetError::TomlEdit { span: Some(_), .. }) = res else { 198 | panic!("span was invalid"); 199 | }; 200 | } 201 | 202 | #[test] 203 | #[cfg(feature = "yaml-serde")] 204 | fn yaml_valid() { 205 | #[derive(serde::Deserialize, PartialEq, Eq, Debug)] 206 | struct MyType { 207 | hello: String, 208 | goodbye: bool, 209 | } 210 | 211 | // Make the file 212 | let contents = String::from( 213 | r##" 214 | hello: "there" 215 | goodbye: true 216 | "##, 217 | ); 218 | let source = axoasset::SourceFile::new("file.yaml", contents); 219 | 220 | let res = source.deserialize_yaml::().unwrap(); 221 | assert_eq!(res.hello, "there"); 222 | assert_eq!(res.goodbye, true); 223 | } 224 | 225 | #[test] 226 | #[cfg(feature = "yaml-serde")] 227 | fn yaml_invalid() { 228 | use axoasset::AxoassetError; 229 | 230 | #[derive(serde::Deserialize, PartialEq, Eq, Debug)] 231 | struct MyType { 232 | hello: String, 233 | goodbye: bool, 234 | } 235 | 236 | // Make the file 237 | let contents = String::from( 238 | r##" 239 | hello: "there" 240 | goodbye: "this shouldn't be a string" 241 | "##, 242 | ); 243 | let source = axoasset::SourceFile::new("file.yml", contents); 244 | 245 | let res = source.deserialize_yaml::(); 246 | assert!(res.is_err()); 247 | let Err(AxoassetError::Yaml { span: Some(_), .. }) = res else { 248 | panic!("span was invalid"); 249 | }; 250 | } 251 | --------------------------------------------------------------------------------